def test_insert_retrieve_versioning(self):

        test_status = 'pyapi.status'
        now = datetime.now()

        series = Series(ENTITY, VERSION_METRIC)
        val = random.randint(0, VALUE - 1)
        series.add_samples(
            Sample(value=val,
                   time=now - timedelta(seconds=2),
                   version={'status': test_status}))

        sf = SeriesFilter(metric=VERSION_METRIC)
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=now - timedelta(hours=1), end_date=now)
        vf = VersioningFilter(versioned=True)
        query = SeriesQuery(series_filter=sf,
                            entity_filter=ef,
                            date_filter=df,
                            versioning_filter=vf)

        successful = self.service.insert(series)
        time.sleep(WAIT_TIME)
        series, = self.service.query(query)
        # print(series)
        last_sample = series.data[-1]

        self.assertTrue(successful)
        self.assertEqual(last_sample.v, val)
        self.assertIsNotNone(last_sample.version)
        self.assertEqual(last_sample.version['status'], test_status)
    def test_insert_retrieve_versioning(self):

        test_status = 'pyapi.status'
        now = datetime.now()

        series = Series(ENTITY, VERSION_METRIC)
        val = random.randint(0, VALUE - 1)
        series.add_samples(Sample(value=val, time=now - timedelta(seconds=2), version={'status': test_status}))

        sf = SeriesFilter(metric=VERSION_METRIC)
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=now - timedelta(hours=1), end_date=now)
        vf = VersioningFilter(versioned=True)
        query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df, versioning_filter=vf)

        successful = self.service.insert(series)
        time.sleep(WAIT_TIME)
        series, = self.service.query(query)
        # print(series)
        last_sample = series.data[-1]

        self.assertTrue(successful)
        self.assertEqual(last_sample.v, val)
        self.assertIsNotNone(last_sample.version)
        self.assertEqual(last_sample.version['status'], test_status)
 def test_fields_match(self):
     sample = Sample(1, datetime.now())
     series = Series(ENTITY, METRIC, tags={TAG: TAG_VALUE})
     series.add_samples(sample)
     self.assertEqual(ENTITY, series.entity)
     self.assertEqual(METRIC, series.metric)
     self.assertEqual({TAG: TAG_VALUE}, series.tags)
     self.assertEqual([sample], series.data)
예제 #4
0
 def setUp(self):
     """
     Insert series.
     """
     series = Series(ENTITY, METRIC)
     series.add_samples(Sample(VALUE, datetime.now()))
     self._series_service = SeriesService(self.connection)
     self._series_service.insert(series)
     time.sleep(self.wait_time)
예제 #5
0
 def setUp(self):
     """
     Insert series to open the alert.
     """
     series = Series(ENTITY, METRIC)
     series.add_samples(Sample(VALUE, datetime.now()))
     self._series_service = SeriesService(self.connection)
     self._series_service.insert(series)
     time.sleep(self.wait_time)
    def test_series_data_field_empty(self):
        series = Series(entity=ENTITY, metric=METRIC)
        series.tags = {TAG: TAG_VALUE}
        # print(series)

        with self.assertRaises(DataParseException) as cm:
            self.service.insert(series)

        self.assertEqual(cm.exception.non_parsed_field, 'data')
 def setUpClass(cls):
     """
     Insert series.
     """
     super().setUpClass()
     series = Series(NAME, METRIC)
     series.add_samples(Sample(1, datetime.now()))
     series_service = SeriesService(cls.connection)
     series_service.insert(series)
     time.sleep(cls.wait_time)
    def test_series_data_field_empty(self):
        series = Series(entity=ENTITY,
                        metric=METRIC)
        series.tags = {TAG: TAG_VALUE}
        # print(series)

        with self.assertRaises(DataParseException) as cm:
            self.service.insert(series)

        self.assertEqual(cm.exception.non_parsed_field, 'data')
 def setUpClass(cls):
     """
     Insert series.
     """
     super().setUpClass()
     series = Series(NAME, METRIC)
     series.add_samples(Sample(1, datetime.now()))
     series_service = SeriesService(cls.connection)
     series_service.insert(series)
     time.sleep(cls.wait_time)
예제 #10
0
 def test_fields_match(self):
     """
     Check fields of Series model were set as expected.
     """
     sample = Sample(1, datetime.now())
     series = Series(ENTITY, METRIC, tags={TAG: TAG_VALUE})
     series.add_samples(sample)
     self.assertEqual(ENTITY, series.entity)
     self.assertEqual(METRIC, series.metric)
     self.assertEqual({TAG: TAG_VALUE}, series.tags)
     self.assertEqual([sample], series.data)
 def test_fields_match(self):
     """
     Check fields of Series model were set as expected.
     """
     sample = Sample(1, datetime.now())
     series = Series(ENTITY, METRIC, tags={TAG: TAG_VALUE})
     series.add_samples(sample)
     self.assertEqual(ENTITY, series.entity)
     self.assertEqual(METRIC, series.metric)
     self.assertEqual({TAG: TAG_VALUE}, series.tags)
     self.assertEqual([sample], series.data)
예제 #12
0
    def test_history_query(self):
        # Insert series to close the alert.
        series = Series(ENTITY, METRIC)
        series.add_samples(Sample(-1, datetime.now()))
        self._series_service.insert(series)

        time.sleep(self.wait_time)

        query = AlertHistoryQuery(entity_filter=ef, date_filter=df, rule=RULE, metric=METRIC)
        result = self.service.history_query(query)
        # print(result)
        self.assertIsNotNone(result)
        self.assertGreater(len(result), 0)
        a = result[0]
        self.common_checks(a)
예제 #13
0
    def test_history_query(self):
        # Insert series to close the alert.
        series = Series(ENTITY, METRIC)
        series.add_samples(Sample(-1, datetime.now()))
        self._series_service.insert(series)

        time.sleep(self.wait_time)

        query = AlertHistoryQuery(entity_filter=ef,
                                  date_filter=df,
                                  rule=RULE,
                                  metric=METRIC)
        result = self.service.history_query(query)
        # print(result)
        self.assertIsNotNone(result)
        self.assertGreater(len(result), 0)
        a = result[0]
        self.common_checks(a)
    def test_series(self):
        # Insert series.
        series = Series(ENTITY, NAME)
        series.add_samples(Sample(1, datetime.now()))
        series_service = SeriesService(self.connection)
        series_service.insert(series)

        time.sleep(self.wait_time)

        result = self.service.series(metric=NAME, min_insert_date='current_hour')
        # print(result)
        self.assertIsNotNone(result)
        self.assertGreater(len(result), 0)
        series = result[0]
        self.assertIsNotNone(series)
        self.assertIsInstance(series, Series)
        self.assertEqual(NAME, series.metric)
        self.assertEqual(ENTITY, series.entity)
        self.assertEqual({}, series.tags)
    def test_series(self):
        # Insert series.
        series = Series(ENTITY, NAME)
        series.add_samples(Sample(1, datetime.now()))
        series_service = SeriesService(self.connection)
        series_service.insert(series)

        time.sleep(self.wait_time)

        result = self.service.series(metric=NAME,
                                     min_insert_date='current_hour')
        # print(result)
        self.assertIsNotNone(result)
        self.assertGreater(len(result), 0)
        series = result[0]
        self.assertIsNotNone(series)
        self.assertIsInstance(series, Series)
        self.assertEqual(NAME, series.metric)
        self.assertEqual(ENTITY, series.entity)
        self.assertEqual({}, series.tags)
def insert_series_sample(data_service, val=None, *vals):
    series = Series(ENTITY, METRIC)
    series.tags = {TAG: TAG_VALUE}
    series.add_samples(Sample(val, datetime.now()))
    if vals:
        for i, v in enumerate(vals):
            time.sleep(WAIT_TIME + 2)
            series.add_samples(Sample(v, datetime.now()))

    # print('insertion =', series)

    return data_service.insert(series)
예제 #17
0
def insert_series_sample(data_service, val=None, *vals):
    series = Series(ENTITY, METRIC)
    series.tags = {TAG: TAG_VALUE}
    series.add_samples(Sample(val, datetime.now()))
    if vals:
        for i, v in enumerate(vals):
            time.sleep(WAIT_TIME + 2)
            series.add_samples(Sample(v, datetime.now()))

    # print('insertion =', series)

    return data_service.insert(series)
예제 #18
0
    batches_left = BATCH_COUNT
    total_asset_duration[entity_name] = timedelta()

    while batches_left > 0:
        procedure_name = ''

        if proc == 0:
            stage_2_leaps, metrics = update_metrics_behaviour()
            procedures, td = update_durations(stage_2_leaps)
            total_asset_duration[entity_name] += timedelta(hours=td)
            if SAVE_AS_COMMANDS:
                commands.append(
                    'series e:%s x:%s=%s d:%s' % (entity_name, entity_name + UNIT_BATCH_ID_SUFFIX, batch_id, to_iso(t)))
            else:
                series.append(Series(entity_name, entity_name + UNIT_BATCH_ID_SUFFIX,
                                     data=[Sample(time=t, x=batch_id, value=None)]))
            batch_id += 1
            procedure_name = 'Stage 1 Static Drying'
        elif procedures[proc][0] == 'Inactive':
            if SAVE_AS_COMMANDS:
                commands.append(
                    'series e:%s x:%s=%s d:%s' % (
                    entity_name, entity_name + UNIT_BATCH_ID_SUFFIX, 'Inactive', to_iso(t)))
            else:
                series.append(Series(entity_name, entity_name + UNIT_BATCH_ID_SUFFIX,
                                     data=[Sample(time=t, x='Inactive', value=None)]))
            procedure_name = 'Inactive'
            batches_left -= 1
        elif procedures[proc][0] == 'Stage 2: Enable agitator 0':
            procedure_name = 'Stage 2 Intermittent Agitation'
        elif procedures[proc][0] == 'Stage 3: Product Cooled Down':
예제 #19
0
    if target_series is None:
        logging.warning(err(series.tags, first_dst_time))
        continue

    target_series.entity = dst_entity
    if batch_size == 0:
        insert_or_warning(target_series)
    else:
        size = len(target_series.data)
        start_position = 0
        iteration = 1
        while size > 0:
            batch_len = min(size, batch_size)
            batch_data = [target_series.data[i] for i in range(start_position, start_position + batch_len, 1)]
            batch = Series(target_series.entity, target_series.metric, tags=target_series.tags,
                           data=batch_data)
            logging.info("Iteration %s: Sending %s series to ATSD" % (iteration, batch_len))
            start_position += batch_len
            iteration += 1
            size -= batch_len
            insert_or_warning(batch)
            logging.info("Pending %s samples to send" % (size))

    logging.info("Sent series with '%s' entity, '%s' metric, '%s' tags" % (target_series.entity,
                                                                          target_series.metric, target_series.tags))
    sample_count = len(target_series.data)
    samples_to_log = 5
    logging.info("Sample count: %d" % sample_count)
    for i in range(min(samples_to_log, sample_count)):
        sample = target_series.data[i]
        logging.info("Sample: %s : %s" % (sample.get_date(), sample.v))
예제 #20
0
        tags = dict(tag.split('=') for tag in tags.split(';'))
    else:
        tags = None

    # prepare aggregate types
    if aggregate_types == 'all':
        aggregate_types = [key for key in dir(AggregateType) if not key.startswith('_')]
    else:
        aggregate_types = aggregate_types.split('+')

    # try to retrieve series from the previous query
    expected_series = []
    with open(filename) as fp:
        line = fp.readline()
        while line:
            series = Series.from_dict(ast.literal_eval(line))
            expected_series.extend(series)
            line = fp.readline()

    # prepare series query
    sf = SeriesFilter(metric=metric_name, tags=tags, exact_match=exact_match)
    ef = EntityFilter(entity=entity_name)
    df = DateFilter(start_date=start_date, end_date=end_date)
    aggregate = Aggregate(period={'count': 7, 'unit': TimeUnit.MINUTE}, threshold={'min': 10, 'max': 90},
                          types=aggregate_types, order=1)
    tf = TransformationFilter(aggregate=aggregate)

    # add rate and group to the transformation filter if specified instead of query
    if query != 'query':
        for attr in query.split('+'):
            if attr == 'rate':
예제 #21
0
 def tearDown(self):
     # Insert series to close the alert.
     series = Series(ENTITY, METRIC)
     series.add_samples(Sample(-1, datetime.now()))
     self._series_service.insert(series)
     time.sleep(self.wait_time)
예제 #22
0
dataCommandSplines = []
series = []

for asset in assets:
    proc = 0
    t = next_time(None, MIN_TIME)

    dataSplines = SplineHolder()
    dataCommandSplines.append([asset, dataSplines])

    while t < MAX_TIME:
        iso = t.isoformat()
        if proc == 0:
            series.append(
                Series(asset['id'],
                       'axi.Unit_BatchID',
                       data=[Sample(time=iso, x=batch_id, value=None)]))
            batch_id += 1
        elif procedures[proc][0] == 'Inactive':
            series.append(
                Series(asset['id'],
                       'axi.Unit_BatchID',
                       data=[Sample(time=iso, x='Inactive', value=None)]))
        series.append(
            Series(asset['id'],
                   'axi.Unit_Procedure',
                   data=[Sample(time=iso, x=procedures[proc][0], value=None)]))
        next_t = next_time(procedures[proc], t)
        for [metric, splines] in metrics:
            dataSplines.put_spline(t, next_t, metric,
                                   splines[procedures[proc][0]])
예제 #23
0
        tags = None

    # prepare aggregate types
    if aggregate_types == 'all':
        aggregate_types = [
            key for key in dir(AggregateType) if not key.startswith('_')
        ]
    else:
        aggregate_types = aggregate_types.split('+')

    # try to retrieve series from the previous query
    expected_series = []
    with open(filename) as fp:
        line = fp.readline()
        while line:
            series = Series.from_dict(ast.literal_eval(line))
            expected_series.extend(series)
            line = fp.readline()

    # prepare series query
    sf = SeriesFilter(metric=metric_name, tags=tags, exact_match=exact_match)
    ef = EntityFilter(entity=entity_name)
    df = DateFilter(start_date=start_date, end_date=end_date)
    aggregate = Aggregate(period={
        'count': 7,
        'unit': TimeUnit.MINUTE
    },
                          threshold={
                              'min': 10,
                              'max': 90
                          },
예제 #24
0
 def tearDown(self):
     # Insert series to close the alert.
     series = Series(ENTITY, METRIC)
     series.add_samples(Sample(-1, datetime.now()))
     self._series_service.insert(series)
     time.sleep(self.wait_time)