async def remove(request: web.Request): db: Session = request.app["db"] data_store: DataStore = request.app["data-store"] # find the requested stream if 'path' in request.query: stream = folder.find_stream_by_path(request.query['path'], db, stream_type=DataStream) elif 'id' in request.query: stream = db.query(DataStream).get(request.query["id"]) else: return web.Response(text="specify an id or a path", status=400) if stream is None: return web.Response(text="stream does not exist", status=404) # parse time bounds start = None end = None try: if 'start' in request.query: start = int(request.query['start']) if 'end' in request.query: end = int(request.query['end']) except ValueError: return web.Response(text="[start] and [end] must be integers", status=400) # make sure bounds make sense if ((start is not None and end is not None) and (start >= end)): return web.Response(text="[start] must be < [end]", status=400) await data_store.remove(stream, start, end) return web.Response(text="ok")
async def create(request): db: Session = request.app["db"] if request.content_type != 'application/json': return web.Response(text='content-type must be application/json', status=400) body = await request.json() if 'stream_id' in body: my_stream = db.query(DataStream).get(body["stream_id"]) elif 'stream_path' in body: my_stream = folder.find_stream_by_path(body['stream_path'], db, stream_type=DataStream) else: return web.Response(text="specify a stream_id", status=400) if my_stream is None: return web.Response(text="stream does not exist", status=404) try: my_annotation = from_json(body) if my_annotation.title is None or my_annotation.title == '': raise ApiError("title is reqiured") except ApiError as e: return web.Response(text=str(e), status=400) my_annotation.stream = my_stream db.add(my_annotation) db.commit() return web.json_response(my_annotation.to_json())
async def write(request: web.Request): db: Session = request.app["db"] data_store: DataStore = request.app["data-store"] # find the requested stream if 'path' in request.query: stream = folder.find_stream_by_path(request.query['path'], db, stream_type=DataStream) elif 'id' in request.query: stream = db.query(DataStream).get(request.query["id"]) else: return web.Response(text="specify an id or a path", status=400) if stream is None: return web.Response(text="stream does not exist", status=404) # spawn in inserter task stream.is_destination = True db.commit() pipe = pipes.InputPipe(name="inbound", stream=stream, reader=request.content) try: task = await data_store.spawn_inserter(stream, pipe, insert_period=0) await task except DataError as e: return web.Response(text=str(e), status=400) except asyncio.CancelledError as e: raise e finally: stream.is_destination = False db.commit() return web.Response(text="ok")
async def _subscribe(request: web.Request, json: bool): db: Session = request.app["db"] supervisor: Supervisor = request.app['supervisor'] if json: return web.Response(text="JSON subscription not implemented", status=400) # find the requested stream if 'path' in request.query: stream = folder.find_stream_by_path(request.query['path'], db, stream_type=DataStream) elif 'id' in request.query: stream = db.query(DataStream).get(request.query["id"]) else: return web.Response(text="specify an id or a path", status=400) if stream is None: return web.Response(text="stream does not exist", status=404) pipe = pipes.LocalPipe(stream.layout) try: unsubscribe = supervisor.subscribe(stream, pipe) except SubscriptionError: return web.Response(text="stream is not being produced", status=400) resp = web.StreamResponse(status=200, headers={ 'joule-layout': stream.layout, 'joule-decimation': '1' }) resp.enable_chunked_encoding() try: await resp.prepare(request) except ConnectionResetError: unsubscribe() return try: while True: try: data = await pipe.read() except pipes.EmptyPipe: unsubscribe() return resp pipe.consume(len(data)) if len(data) > 0: await resp.write(data.tobytes()) if pipe.end_of_interval: await resp.write(pipes.interval_token(stream.layout).tobytes()) except asyncio.CancelledError as e: unsubscribe() # propogate the CancelledError up raise e except ConnectionResetError: unsubscribe()
async def test_folder_delete_by_path(self): db: Session = self.app["db"] f = folder.find("/top/leaf", db) payload = {'path': "/top/leaf"} resp = await self.client.delete("/folder.json", params=payload) self.assertEqual(resp.status, 200) self.assertIsNone(folder.find("/top/leaf", db)) # deletes the streams self.assertIsNone(folder.find_stream_by_path("/top/leaf/stream1", db)) # keeps the parent folders self.assertIsNotNone(folder.find("/top", db))
async def info(request: web.Request): db: Session = request.app["db"] data_store: DataStore = request.app["data-store"] if 'path' in request.query: my_stream = folder.find_stream_by_path(request.query['path'], db, stream_type=DataStream) elif 'id' in request.query: my_stream = db.query(DataStream).get(request.query["id"]) else: return web.Response(text="specify an id or a path", status=400) if my_stream is None: return web.Response(text="stream does not exist", status=404) stream_info = await data_store.info([my_stream]) return web.json_response(my_stream.to_json(stream_info))
async def index(request): db: Session = request.app["db"] # specify stream_ids as array # optionally specify start and end # parse time bounds start = None end = None try: if 'start' in request.query: ts = int(request.query['start']) start = datetime.datetime.utcfromtimestamp(ts / 1e6) if 'end' in request.query: ts = int(request.query['end']) end = datetime.datetime.utcfromtimestamp(ts / 1e6) except ValueError: return web.Response( text="[start] and [end] must be microsecond utc timestamps", status=400) if (('stream_id' not in request.query) and ('stream_path' not in request.query)): return web.Response( text="must specify at least one stream_id or stream_path", status=400) response = [] stream_ids = [] if "stream_id" in request.query: stream_ids = request.query.getall("stream_id") if "stream_path" in request.query: for path in request.query.getall("stream_path"): stream = folder.find_stream_by_path(path, db, stream_type=DataStream) if stream is None: return web.Response(text="stream [%s] does not exist" % path, status=404) stream_ids.append(stream.id) for stream_id in stream_ids: annotations = db.query(Annotation).filter_by(stream_id=stream_id) if start is not None: annotations = annotations.filter(Annotation.start >= start) if end is not None: annotations = annotations.filter(Annotation.start <= end) data = [a.to_json() for a in annotations] response += [a.to_json() for a in annotations] return web.json_response(response)
async def remove_events(request): db: Session = request.app["db"] event_store: EventStore = request.app["event-store"] # find the requested stream if 'path' in request.query: my_stream = folder.find_stream_by_path(request.query['path'], db, stream_type=EventStream) elif 'id' in request.query: my_stream = db.query(EventStream).get(request.query["id"]) else: return web.Response(text="specify an id or a path", status=400) if my_stream is None: return web.Response(text="stream does not exist", status=404) # parse optional parameters params = {'start': None, 'end': None} param = "" # to appease type checker try: for param in params: if param in request.query: params[param] = int(request.query[param]) except ValueError: return web.Response(text="parameter [%s] must be an int" % param, status=400) # make sure parameters make sense if ((params['start'] is not None and params['end'] is not None) and (params['start'] >= params['end'])): return web.Response(text="[start] must be < [end]", status=400) # handle json filter parameter json_filter = None if 'filter' in request.query and request.query[ 'filter'] is not None and len(request.query['filter']) > 0: try: json_filter = json.loads(request.query['filter']) # TODO verify syntax except (json.decoder.JSONDecodeError, ValueError): return web.Response(text="invalid filter parameter", status=400) await event_store.remove(my_stream, params['start'], params['end'], json_filter=json_filter) return web.Response(text="ok")
async def delete(request): db: Session = request.app["db"] data_store: EventStore = request.app["event-store"] # find the requested stream if 'path' in request.query: my_stream = folder.find_stream_by_path(request.query['path'], db, stream_type=EventStream) elif 'id' in request.query: my_stream = db.query(EventStream).get(request.query["id"]) else: return web.Response(text="specify an id or a path", status=400) if my_stream is None: return web.Response(text="stream does not exist", status=404) await data_store.destroy(my_stream) db.delete(my_stream) db.commit() return web.Response(text="ok")
async def delete_all(request): db: Session = request.app["db"] if "stream_id" in request.query: stream_id = request.query["stream_id"] my_stream = db.query(DataStream).get(stream_id) elif "stream_path" in request.query: path = request.query["stream_path"] my_stream = folder.find_stream_by_path(path, db, stream_type=DataStream) else: return web.Response( text="must specify either stream_id or stream_path", status=400) start = None end = None try: if 'start' in request.query: ts = int(request.query['start']) start = datetime.datetime.utcfromtimestamp(ts / 1e6) if 'end' in request.query: ts = int(request.query['end']) end = datetime.datetime.utcfromtimestamp(ts / 1e6) except ValueError: return web.Response( text="[start] and [end] must be microsecond utc timestamps", status=400) if my_stream is None: return web.Response(text="stream does not exist", status=404) annotations = db.query(Annotation).filter_by(stream_id=my_stream.id) if start is not None: annotations = annotations.filter(Annotation.start >= start) if end is not None: annotations = annotations.filter(Annotation.start <= end) for annotation in annotations: db.delete(annotation) db.commit() return web.Response(text="ok")
async def move(request: web.Request): db: Session = request.app["db"] if request.content_type != 'application/json': return web.Response(text='content-type must be application/json', status=400) body = await request.json() # find the stream if 'src_path' in body: my_stream = folder.find_stream_by_path(body['src_path'], db, stream_type=DataStream) elif 'src_id' in body: my_stream = db.query(DataStream).get(body["src_id"]) else: return web.Response(text="specify a source id or a path", status=400) if my_stream is None: return web.Response(text="stream does not exist", status=404) if my_stream.locked: return web.Response(text="locked streams cannot be moved", status=400) # find or create the destination folder if 'dest_path' in body: try: destination = folder.find(body['dest_path'], db, create=True) except ConfigurationError as e: return web.Response(text="Destination error: %s" % str(e), status=400) elif 'dest_id' in body: destination = db.query(Folder).get(body["dest_id"]) else: return web.Response(text="specify a destination", status=400) # make sure name is unique in this destination existing_names = [ s.name for s in destination.data_streams + destination.event_streams ] if my_stream.name in existing_names: db.rollback() return web.Response( text="stream with the same name exists in the destination folder", status=400) destination.data_streams.append(my_stream) db.commit() return web.json_response({"stream": my_stream.to_json()})
async def consolidate(request): db: Session = request.app["db"] data_store: DataStore = request.app["data-store"] # find the requested stream if 'path' in request.query: stream = folder.find_stream_by_path(request.query['path'], db, stream_type=DataStream) elif 'id' in request.query: stream = db.query(DataStream).get(request.query["id"]) else: return web.Response(text="specify an id or a path", status=400) if stream is None: return web.Response(text="stream does not exist", status=404) # parse time bounds start = None end = None try: if 'start' in request.query: start = int(request.query['start']) if 'end' in request.query: end = int(request.query['end']) except ValueError: return web.Response(text="[start] and [end] must be integers", status=400) # make sure bounds make sense if ((start is not None and end is not None) and (start >= end)): return web.Response(text="[start] must be < [end]", status=400) # parse the max_gap parameter if 'max_gap' not in request.query: return web.Response(text="specify max_gap as us integer", status=400) try: max_gap = int(request.query['max_gap']) if max_gap <= 0: raise ValueError() except ValueError: return web.Response(text="max_gap must be postive integer", status=400) num_removed = await data_store.consolidate(stream, start, end, max_gap) return web.json_response(data={"num_consolidated": num_removed})
async def write_events(request): db: Session = request.app["db"] event_store: EventStore = request.app["event-store"] body = await request.json() # find the requested stream if 'path' in body: my_stream = folder.find_stream_by_path(body['path'], db, stream_type=EventStream) elif 'id' in body: my_stream = db.query(EventStream).get(body["id"]) else: return web.Response(text="specify an id or a path!!", status=400) if my_stream is None: return web.Response(text="stream does not exist", status=404) if 'events' not in body: return web.Response(text="specify events to add", status=400) events = await event_store.upsert(my_stream, body['events']) return web.json_response(data={'count': len(events), 'events': events})
async def run(src_db: 'Session', dest_db: 'Session', src_datastore: 'DataStore', dest_datastore: 'DataStore', stream_map: Optional[List], confirmed: bool, start: Optional[int], end: Optional[int]): from joule.models import DataStream, folder, data_stream from joule.services import parse_pipe_config src_streams = src_db.query(DataStream).all() dest_streams = dest_db.query(DataStream).all() await src_datastore.initialize(src_streams) await dest_datastore.initialize(dest_streams) if stream_map is None: src_streams = src_db.query(DataStream).all() src_paths = map(folder.get_stream_path, src_streams) stream_map = map(lambda _path: [_path, _path], src_paths) # create the copy map array copy_maps = [] for item in stream_map: # get the source stream source = folder.find_stream_by_path(item[0], src_db) if source is None: raise errors.ConfigurationError( "source stream [%s] does not exist" % item[0]) src_intervals = await src_datastore.intervals(source, start, end) # get or create the destination stream dest = folder.find_stream_by_path(item[1], dest_db) if dest is None: (path, name, _) = parse_pipe_config.parse_pipe_config(item[1]) dest_folder = folder.find(path, dest_db, create=True) dest = data_stream.from_json(source.to_json()) # set the attributes on the new stream dest.name = name dest.keep_us = dest.KEEP_ALL dest.is_configured = False dest.is_source = False dest.is_destination = False dest.id = None for e in dest.elements: e.id = None dest_folder.streams.append(dest) dest_intervals = None else: # make sure the destination is compatible if dest.layout != source.layout: raise errors.ConfigurationError( "source stream [%s] is not compatible with destination stream [%s]" % (item[0], item[1])) dest_intervals = await dest_datastore.intervals(dest, start, end) # figure out the time bounds to copy if dest_intervals is None: copy_intervals = src_intervals else: copy_intervals = utilities.interval_difference( src_intervals, dest_intervals) copy_maps.append(CopyMap(source, dest, copy_intervals)) # display the copy table rows = [] copy_required = False for item in copy_maps: if item.start is None: start = "\u2014" end = "\u2014" else: start = utilities.timestamp_to_human(item.start) end = utilities.timestamp_to_human(item.end) copy_required = True rows.append([item.source_path, item.dest_path, start, end]) click.echo( tabulate(rows, headers=["Source", "Destination", "From", "To"], tablefmt="fancy_grid")) if not copy_required: click.echo("No data needs to be copied") return if not confirmed and not click.confirm("Start data copy?"): click.echo("cancelled") return dest_db.commit() # execute the copy for item in copy_maps: await copy(item, src_datastore, dest_datastore, src_db, dest_db)
async def read_events(request): db: Session = request.app["db"] event_store: EventStore = request.app["event-store"] # find the requested stream if 'path' in request.query: my_stream = folder.find_stream_by_path(request.query['path'], db, stream_type=EventStream) elif 'id' in request.query: my_stream = db.query(EventStream).get(request.query["id"]) else: return web.Response(text="specify an id or a path", status=400) if my_stream is None: return web.Response(text="stream does not exist", status=404) # parse optional parameters params = {'start': None, 'end': None, 'limit': None} param = "" # to appease type checker try: for param in params: if param in request.query: params[param] = int(request.query[param]) except ValueError: return web.Response(text="parameter [%s] must be an int" % param, status=400) # make sure parameters make sense if ((params['start'] is not None and params['end'] is not None) and (params['start'] >= params['end'])): return web.Response(text="[start] must be < [end]", status=400) # handle json filter parameter json_filter = None if 'filter' in request.query and request.query[ 'filter'] is not None and len(request.query['filter']) > 0: try: json_filter = json.loads(request.query['filter']) # TODO verify syntax except (json.decoder.JSONDecodeError, ValueError): return web.Response(text="invalid filter parameter", status=400) # handle limit parameter, default is HARD, do not return unless count < limit if params['limit'] is not None and 'return-subset' not in request.query: if params['limit'] <= 0: return web.Response(text="[limit] must be > 0", status=400) event_count = await event_store.count(my_stream, params['start'], params['end'], json_filter=json_filter) if event_count > params['limit']: # too many events, just send the count parameter return web.json_response(data={'count': event_count, 'events': []}) # if return-subset, limit is SOFT, return just that many events limit = None if params['limit'] is not None and 'return-subset' in request.query: if params['limit'] <= 0: return web.Response(text="[limit] must be > 0", status=400) limit = params['limit'] events = await event_store.extract(my_stream, params['start'], params['end'], limit=limit, json_filter=json_filter) return web.json_response(data={'count': len(events), 'events': events})
async def _read(request: web.Request, json): db: Session = request.app["db"] data_store: DataStore = request.app["data-store"] # find the requested stream if 'path' in request.query: stream = folder.find_stream_by_path(request.query['path'], db, stream_type=DataStream) elif 'id' in request.query: stream = db.query(DataStream).get(request.query["id"]) else: return web.Response(text="specify an id or a path", status=400) if stream is None: return web.Response(text="stream does not exist", status=404) # parse optional parameters params = { 'start': None, 'end': None, 'max-rows': None, 'decimation-level': None } param = "" # to appease type checker try: for param in params: if param in request.query: params[param] = int(request.query[param]) except ValueError: return web.Response(text="parameter [%s] must be an int" % param, status=400) # make sure parameters make sense if ((params['start'] is not None and params['end'] is not None) and (params['start'] >= params['end'])): return web.Response(text="[start] must be < [end]", status=400) if params['max-rows'] is not None and params['max-rows'] <= 0: return web.Response(text="[max-rows] must be > 0", status=400) if params['decimation-level'] is not None and params[ 'decimation-level'] <= 0: return web.Response(text="[decimation-level] must be > 0", status=400) # --- Binary Streaming Handler --- resp = None async def stream_data(data: np.ndarray, layout, factor): nonlocal resp if resp is None: resp = web.StreamResponse(status=200, headers={ 'joule-layout': layout, 'joule-decimation': str(factor) }) resp.enable_chunked_encoding() await resp.prepare(request) await resp.write(data.tobytes()) # --- JSON Handler --- data_blocks = [] # array of data segments data_segment = None decimation_factor = 1 async def retrieve_data(data: np.ndarray, layout, factor): nonlocal data_blocks, data_segment, decimation_factor decimation_factor = factor if np.array_equal(data, pipes.interval_token(layout)): if data_segment is not None: data_blocks.append(data_segment.tolist()) data_segment = None else: data = np.c_[data['timestamp'][:, None], data['data']] if data_segment is None: data_segment = data else: data_segment = np.vstack((data_segment, data)) if json: callback = retrieve_data else: callback = stream_data # create an extraction task try: await data_store.extract(stream, params['start'], params['end'], callback=callback, max_rows=params['max-rows'], decimation_level=params['decimation-level']) except InsufficientDecimationError as e: return web.Response(text="decimated data is not available: %s" % e, status=400) except DataError as e: msg = str(e) if 'no such stream' in msg.lower() and ( params['decimation-level'] is not None): # pragma: no cover # clean up error message when user requested a particular decimation level msg = "requested decimation level [%d] does not exist" % params[ 'decimation-level'] return web.Response(text="read error: %s" % msg, status=400) if json: # put the last data_segment on if data_segment is not None: data_blocks.append(data_segment.tolist()) return web.json_response({ "data": data_blocks, "decimation_factor": decimation_factor }) else: if resp is None: return web.json_response(text="this stream has no data", status=400) return resp