Esempio n. 1
0
    async def get_application(self):
        app = web.Application()
        app.add_routes(joule.controllers.routes)

        db, app["psql"] = create_db([
            "/top/leaf/stream1:float32[x, y, z]",
            "/top/middle/leaf/stream2:int8[val1, val2]"
        ])
        self.stream1 = db.query(DataStream).filter_by(
            name="stream1").one_or_none()
        self.stream2 = db.query(DataStream).filter_by(
            name="stream2").one_or_none()

        # add 5 event annotations to stream1
        # add 5 interval annotations to stream2
        for x in range(5):
            # start: 0, 1000, 2000, 3000, 4000
            start = utilities.timestamp_to_datetime(x * 1000)
            end = utilities.timestamp_to_datetime((x * 1000) + 200)
            a1 = Annotation(title="stream1_%d" % x, start=start)
            a2 = Annotation(title="stream2_%d" % x, start=start, end=end)
            a1.stream = self.stream1
            a2.stream = self.stream2
            db.add(a1)
            db.add(a2)
        db.commit()
        app["db"] = db
        self.db = db
        return app
Esempio n. 2
0
async def copy(copy_map: 'CopyMap', src_datastore: 'DataStore',
               dest_datastore: 'DataStore', src_db: 'Session',
               dest_db: 'Session'):
    from joule.models import annotation, Annotation
    global num_rows
    num_rows = 0
    # compute the duration of data to copy
    duration = 0
    for interval in copy_map.intervals:
        duration += interval[1] - interval[0]

    with click.progressbar(label='[%s] --> [%s]' %
                           (copy_map.source_path, copy_map.dest_path),
                           length=duration) as bar:
        for interval in copy_map.intervals:
            await copy_interval(interval[0], interval[1], bar, copy_map.source,
                                copy_map.dest, src_datastore, dest_datastore)
            start_dt = utilities.timestamp_to_datetime(interval[0])
            end_dt = utilities.timestamp_to_datetime(interval[1])
            # remove existing annotations (if any)
            dest_db.query(Annotation). \
                filter(Annotation.stream == copy_map.dest). \
                filter(Annotation.start >= start_dt). \
                filter(Annotation.start < end_dt). \
                delete()
            # retrieve source annotations that start in this interval
            items: List[Annotation] = src_db.query(Annotation). \
                filter(Annotation.stream == copy_map.source). \
                filter(Annotation.start >= start_dt). \
                filter(Annotation.start < end_dt)
            # copy them over to the destination
            for item in items:
                item_copy = annotation.from_json(item.to_json())
                item_copy.id = None
                item_copy.stream = copy_map.dest
                dest_db.add(item_copy)
            dest_db.commit()
    if num_rows == 0:
        print("[%s]\t--nothing to copy--" % copy_map.dest_path)
    else:
        print("[%s]\t copied %d rows" % (copy_map.dest_path, num_rows))
Esempio n. 3
0
 async def test_annotation_create_by_stream_id(self):
     annotation_json = {
         "title": "new_annotation",
         "content": "content",
         "start": 900,
         "end": 1000,
         "stream_id": self.stream1.id
     }
     resp = await self.client.request("POST",
                                      "/annotation.json",
                                      json=annotation_json)
     values = await resp.json()
     new_id = values["id"]
     # make sure it was added
     new_annotation = self.db.query(Annotation).get(new_id)
     self.assertEqual(new_annotation.title, "new_annotation")
     self.assertEqual(new_annotation.stream_id, self.stream1.id)
     self.assertEqual(new_annotation.start,
                      utilities.timestamp_to_datetime(900))
     self.assertEqual(new_annotation.end,
                      utilities.timestamp_to_datetime(1000))
Esempio n. 4
0
    async def consolidate(self, stream: 'DataStream', start: int, end: int,
                          max_gap: int) -> int:
        # remove interval gaps less than or equal to max_gap duration (in us)
        intervals = await self.intervals(stream, start, end)
        if len(intervals) == 0:
            return  # no data, nothing to do
        duration = [intervals[0][0], intervals[-1][1]]

        gaps = interval_tools.interval_difference([duration], intervals)

        if len(gaps) == 0:
            return  # no interval breaks, nothing to do
        small_gaps = [gap for gap in gaps if (gap[1] - gap[0]) <= max_gap]
        boundaries = [gap[0] for gap in small_gaps]
        str_datetimes = [
            "'%s'" % str(timestamp_to_datetime(ts)) for ts in boundaries
        ]
        query = "DELETE FROM data.stream%d_intervals WHERE time IN (%s)" % (
            stream.id, ",".join(str_datetimes))
        async with self.pool.acquire() as conn:
            await conn.execute(query)
        return len(small_gaps)