async def event_stream_move(session: BaseSession, source: Union[EventStream, str, int], destination: Union[Folder, str, int]) -> None: data = {} if type(source) is EventStream: data["src_id"] = source.id elif type(source) is int: data["src_id"] = source elif type(source) is str: data["src_path"] = source else: raise errors.ApiError( "Invalid source datatype. Must be EventStream, Path, or ID") if type(destination) is Folder: data["dest_id"] = destination.id elif type(destination) is int: data["dest_id"] = destination elif type(destination) is str: data["dest_path"] = destination else: raise errors.ApiError( "Invalid destination datatype. Must be Folder, Path, or ID") await session.put("/event/move.json", data)
async def data_subscribe(session: BaseSession, stream: Union[DataStream, str, int]) -> Pipe: # make sure the input is compatible src_stream = await data_stream_get(session, stream) if type(stream) is DataStream: if src_stream.layout != src_stream.layout: raise errors.ApiError( "Input [%s] configured for [%s] but source is [%s]" % (stream, stream.layout, src_stream.layout)) # make sure the stream is being produced if not src_stream.is_destination: raise errors.ApiError( "DataStream [%s] is not being produced, specify time bounds for historic execution" % src_stream.name) # replace the stub stream (from config file) with actual stream # do not let the buffer grow beyond 5 server chunks pipe = LocalPipe(src_stream.layout, name=src_stream.name, stream=src_stream, write_limit=5) pipe.stream = src_stream task = asyncio.create_task(_live_reader(session, src_stream, pipe)) async def close(): task.cancel() try: await task except asyncio.CancelledError: pass pipe.close_cb = close return pipe
async def event_stream_read(session: BaseSession, stream: Union[EventStream, str, int], start_time: Optional[int] = None, end_time: Optional[int] = None, limit=None, json_filter=None) -> List[Event]: params = {} if type(stream) is EventStream: params["id"] = stream.id elif type(stream) is int: params["id"] = stream elif type(stream) is str: params["path"] = stream else: raise errors.ApiError( "Invalid stream datatype. Must be EventStream, Path, or ID") if start_time is not None: params['start'] = int(start_time) if end_time is not None: params['end'] = int(end_time) if limit is not None: if limit <= 0: raise errors.ApiError("Limit must be > 0") params['limit'] = limit params['return-subset'] = 1 if json_filter is not None: params['filter'] = json_filter resp = await session.get("/event/data.json", params) return [event_from_json(e) for e in resp["events"]]
async def _request(self, method, path, data=None, json=None, params=None, chunked=None): session = await self.get_session() try: async with session.request(method, self.url + path, data=data, params=params, json=json) as resp: if resp.status != 200: raise errors.ApiError( "%s %s [%d]" % (path, await resp.text(), resp.status)) if resp.content_type != 'application/json': body = await resp.text() if body.lower() != "ok": raise errors.ApiError( "Invalid node response (not json)") else: return None try: return await resp.json() except ValueError: raise errors.ApiError("Invalid node response (not json)") except aiohttp.ClientError as e: raise errors.ApiError("Cannot contact node at [%s]" % self.path) from e
def get_node(name: str = "") -> BaseNode: try: configs = _get_node_configs() if name == "": config = _get_default_node(configs) else: config = configs[name] except ValueError as e: raise errors.ApiError(str(e)) except KeyError as e: raise errors.ApiError("Node [%s] is not available, add it with [joule master add]" % str(e)) return TcpNode(config.name, config.url, config.key, _get_cafile())
async def _run(source_node, dest_node, start, end, new, replace, source, destination): # make sure the time bounds make sense if start is not None: try: start = h2ts(start) except ValueError: raise errors.ApiError("invalid start time: [%s]" % start) if end is not None: try: end = h2ts(end) except ValueError: raise errors.ApiError("invalid end time: [%s]" % end) if (start is not None) and (end is not None) and ((end - start) <= 0): raise click.ClickException(f"Error: start {ts2h(start)} " + f"must be before end f{ts2h(end)}") # create the destination stream if necessary name = destination.split('/')[-1] path = "/".join(destination.split('/')[:-1]) try: event_stream = joule.api.EventStream(name=name) await dest_node.event_stream_create(event_stream, path) except joule.errors.ApiError: pass # stream already exists if replace: await dest_node.event_stream_remove(destination, start, end) stream_info = await source_node.event_stream_info(source) event_count = stream_info.event_count num_copied_events = 0 with click.progressbar(length=event_count) as bar: while True: events = await source_node.event_stream_read(source, start=start, end=end, limit=1000) # remove the event id's so it inserts as a new event for event in events: event.id = None if len(events) == 0: break await dest_node.event_stream_write(destination, events) num_copied_events += len(events) bar.update(len(events)) start = events[-1].start_time + 1 # bring bar up to 100% bar.update(event_count - num_copied_events)
async def _retrieve_source(server: Union[BaseNode, str], path: str, is_nilmdb: bool = False) -> DataStream: if is_nilmdb: src_stream, src_info = await _retrieve_nilmdb_stream(server, path) if src_stream is None: raise errors.ApiError("The stream [%s] is not available on [%s]" % (path, server)) else: src_stream = await server.data_stream_get(path) src_info = await server.data_stream_info(path) if src_info.start is None or src_info.end is None: raise errors.ApiError("[%s] has no data" % path) return src_stream
async def annotation_create( session: BaseSession, annotation: Annotation, stream: Union[int, str, 'DataStream'], ) -> Annotation: from .data_stream import DataStream data = { "title": annotation.title, "content": annotation.content, "start": annotation.start, "end": annotation.end } if type(stream) is DataStream: data["stream_id"] = stream.id elif type(stream) is int: data["stream_id"] = stream elif type(stream) is str: data["stream_path"] = stream else: raise errors.ApiError( "Invalid stream datatype. Must be DataStream, Path, or ID") json = await session.post("/annotation.json", json=data) return from_json(json)
async def event_stream_get(session: BaseSession, stream: Union[EventStream, str, int], create: bool = False, description: str = "", event_fields=None) -> EventStream: data = {} if type(stream) is EventStream: data["id"] = stream.id elif type(stream) is int: data["id"] = stream elif type(stream) is str: data["path"] = stream else: raise errors.ApiError( "Invalid stream datatype. Must be EventStream, Path, or ID") try: resp = await session.get("/event.json", data) except errors.ApiError as e: # pass the error if the stream should not or cannot be created if not create or type(stream) is not str: raise e name = stream.split('/')[-1] path = '/'.join(stream.split('/')[:-1]) event_stream = EventStream(name, description, event_fields) return await event_stream_create(session, event_stream, path) return from_json(resp)
async def data_read(session: BaseSession, stream: Union[DataStream, str, int], start: Optional[int] = None, end: Optional[int] = None, max_rows: Optional[int] = None) -> Pipe: # make sure the input is compatible src_stream = await data_stream_get(session, stream) if type(stream) is DataStream: if src_stream.layout != src_stream.layout: raise errors.ApiError( "Input [%s] configured for [%s] but source is [%s]" % (stream, stream.layout, src_stream.layout)) # replace the stub stream (from config file) with actual stream pipe = LocalPipe(src_stream.layout, name=src_stream.name, stream=src_stream, write_limit=5) pipe.stream = src_stream task = asyncio.create_task( _historic_reader(session, src_stream, pipe, start, end, max_rows)) async def close(): task.cancel() try: await task except asyncio.CancelledError: pass pipe.close_cb = close return pipe
async def _get_intervals(server: Union[BaseNode, str], my_stream: DataStream, path: str, start: Optional[int], end: Optional[int], is_nilmdb: bool = False) -> List[Interval]: if is_nilmdb: intervals = [] params = {"path": path} if start is not None: params['start'] = start if end is not None: params['end'] = end url = "{server}/stream/intervals".format(server=server) async with aiohttp.ClientSession() as session: async with session.get(url, params=params) as resp: if not resp.status == 200: raise errors.ApiError( "unable to retrieve intervals for [%s]" % path) body = await resp.text() if body == '': return intervals for line in body.strip().split("\n"): intervals.append(json.loads(line)) return intervals else: return await server.data_intervals(my_stream, start, end)
async def _send_lumen_key(key: str, identifier: str, local_port: int, local_name: str, local_scheme: str, local_uri: str, cafile: str, lumen_params: Dict) -> str: # pragma: no cover # if the identifier is an IP address or a domain name, turn it into a URL if not identifier.startswith("http"): url = await detect_url(identifier + "/lumen") if url is None: raise errors.ApiError("cannot connect to [%s] on port 80 or 443" % identifier) else: url = identifier # no key needed to access the lumen /nilms end point session = TcpSession(url, "stub_key", cafile) params = {'api_key': key, 'port': local_port, 'name': local_name, 'base_uri': local_uri, 'scheme': local_scheme} if lumen_params is not None: params = {**params, **lumen_params} # TODO enable this for CA checking on Lumen server # if cafile != "": # params["name_is_host"] = 1 try: await session.post("/nilms.json", json=params) except errors.ApiError as e: raise e finally: await session.close() return identifier
async def _send_node_key(key: str, identifier: str, local_port: int, local_name: str, local_scheme: str, local_uri: str, cafile: str) -> str: # pragma: no cover # if the identifier is an IP address or a domain name, turn it into a URL if not identifier.startswith("http"): url = await detect_url(identifier, 443) if url is None: raise errors.ApiError("cannot connect to [%s] on port 443" % identifier) else: url = identifier session = TcpSession(url, key, cafile) params = {'key': key, 'port': local_port, 'name': local_name, 'base_uri': local_uri, 'scheme': local_scheme} if cafile != "": params["name_is_host"] = 1 resp = await session.post("/follower.json", json=params) await session.close() return resp['name']
async def annotation_get(session: BaseSession, stream: Union['DataStream', str, int], start: Optional[int], end: Optional[int]) -> List[Annotation]: from .data_stream import DataStream data = {} if start is not None: data["start"] = start if end is not None: data["end"] = end if type(stream) is DataStream: data["stream_id"] = stream.id elif type(stream) is int: data["stream_id"] = stream elif type(stream) is str: data["stream_path"] = stream else: raise errors.ApiError( "Invalid stream datatype. Must be DataStream, Path, or ID") json = await session.get("/annotations.json", data) annotations = [] for item in json: annotations.append(from_json(item)) return annotations
async def _request(self, method, path, data=None, json=None, params=None, chunked=None): raise errors.ApiError("Implement in child class")
def _validate_event_fields( self, fields: Optional[Dict[str, str]]) -> Dict[str, str]: # make sure values are either string or numeric if fields is None: return {} for field in fields: if fields[field] not in ['string', 'numeric']: raise errors.ApiError( "invalid event field type, must be numeric or string") return fields
async def data_write( session: BaseSession, stream: Union[DataStream, str, int], start_time: Optional[int] = None, end_time: Optional[int] = None, ) -> Pipe: # stream must exist, does not automatically create a stream # retrieve the destination stream object dest_stream = await data_stream_get(session, stream) if start_time is not None or end_time is not None: await data_delete(session, dest_stream, start_time, end_time) if type(stream) is DataStream: if dest_stream.layout != stream.layout: raise errors.ApiError( "DataStream [%s] configured for [%s] but destination is [%s]" % (stream.name, stream.layout, dest_stream.layout)) # raise a warning if the element names do not match actual_names = [e.name for e in dest_stream.elements] requested_names = [e.name for e in stream.elements] if actual_names != requested_names: # pragma: no cover log.warning("[%s] elements do not match the existing stream" % stream.name) # make sure the stream is not currently produced if dest_stream.is_destination: raise errors.ApiError("DataStream [%s] is already being produced" % dest_stream.name) # all checks passed, subscribe to the output async def close(): await task pipe = LocalPipe(dest_stream.layout, name=dest_stream.name, stream=dest_stream, close_cb=close, debug=False, write_limit=5) task = asyncio.create_task(_send_data(session, dest_stream, pipe)) return pipe
async def _send_nilmdb_data(url, params, generator, dtype, session): bstart = params['start'] async for data in generator: params['start'] = bstart np_data = np.frombuffer(data, dtype) bend = int(np_data['timestamp'][-1] + 1) params['end'] = bend async with session.put(url, params=params, data=data) as dest_response: if dest_response.status != 200: msg = await dest_response.text() raise errors.ApiError("Error writing to destination: %s" % msg) bstart = bend
async def event_stream_delete(session: BaseSession, stream: Union[EventStream, str, int]) -> None: data = {} if type(stream) is EventStream: data["id"] = stream.id elif type(stream) is int: data["id"] = stream elif type(stream) is str: data["path"] = stream else: raise errors.ApiError( "Invalid stream datatype. Must be EventStream, Path, or ID") await session.delete("/event.json", data)
async def data_stream_delete(session: BaseSession, stream: Union[DataStream, str, int]): data = {} if type(stream) is DataStream: data["id"] = stream.id elif type(stream) is int: data["id"] = stream elif type(stream) is str: data["path"] = stream else: raise errors.ApiError( "Invalid stream datatype. Must be DataStream, Path, or ID") await session.delete("/stream.json", data)
async def module_logs(session: BaseSession, module: Union[Module, str, int]) -> List[str]: params = {} if type(module) is Module: params["id"] = module.id elif type(module) is str: params["name"] = module elif type(module) is int: params["id"] = module else: raise errors.ApiError( "Invalid module datatype. Must be Module, Name, or ID") return await session.get("/module/logs.json", params)
async def proxy_get(session: BaseSession, proxy: Union[Proxy, str, int]) -> Proxy: params = {} if type(proxy) is Proxy: params["id"] = proxy.id elif type(proxy) is str: params["name"] = proxy elif type(proxy) is int: params["id"] = proxy else: raise errors.ApiError( "Invalid proxy datatype. Must be Proxy, Name, or ID") resp = await session.get("/proxy.json", params) return from_json(resp)
async def folder_get(session: BaseSession, folder: Union[Folder, str, int]) -> Folder: params = {} if type(folder) is Folder: params["id"] = folder.id elif type(folder) is int: params["id"] = folder elif type(folder) is str: params["path"] = folder else: raise errors.ApiError( "Invalid folder datatype. Must be Folder, Path, or ID") resp = await session.get("/folder.json", params) return from_json(resp)
async def _request(self, method, path, data=None, json=None, params=None, chunked=None): session = await self.get_session() try: # logging.warning("requesting: "+self.url+path) i = 0 MAX_RETRY_COUNT = 8 RETRY_DELAY = 2 while i < MAX_RETRY_COUNT: async with session.request(method, self.url + path, data=data, params=params, json=json, chunked=chunked, ssl=self.ssl_context) as resp: if resp.status != 200: msg = await resp.text() if resp.status > 500: print("API Error: [%d], retrying %d/%d" % (resp.status, i, MAX_RETRY_COUNT)) i += 1 await asyncio.sleep(RETRY_DELAY) continue # retry raise errors.ApiError("%s [%d]" % (msg, resp.status)) if resp.content_type != 'application/json': body = await resp.text() if body.lower() != "ok": raise errors.ApiError("Invalid node response: %s" % body) else: return None try: # logging.warning("\trequest is done") return await resp.json() except ValueError: raise errors.ApiError( "Invalid node response (not json)") except ssl.CertificateError as e: raise errors.ApiError( "the specified certificate authority did not validate this server" ) except aiohttp.ClientError as e: raise errors.ApiError("Cannot contact node at [%s]" % self.url) from e raise errors.ApiError("API Error [%d]: max retry count exceeded" % resp.status)
async def event_stream_create(session: BaseSession, stream: EventStream, folder: Union[Folder, str, int]) -> EventStream: data = {"stream": stream.to_json()} if type(folder) is Folder: data["dest_id"] = folder.id elif type(folder) is int: data["dest_id"] = folder elif type(folder) is str: data["dest_path"] = folder else: raise errors.ApiError( "Invalid folder datatype. Must be Folder, Path, or ID") resp = await session.post("/event.json", json=data) return from_json(resp)
async def data_stream_get(session: BaseSession, stream: Union[DataStream, str, int]) -> DataStream: data = {} if type(stream) is DataStream: data["id"] = stream.id elif type(stream) is int: data["id"] = stream elif type(stream) is str: data["path"] = stream else: raise errors.ApiError( "Invalid stream datatype. Must be DataStream, Path, or ID") resp = await session.get("/stream.json", data) return from_json(resp)
async def event_stream_info( session: BaseSession, stream: Union[EventStream, str, int]) -> EventStreamInfo: data = {} if type(stream) is EventStream: data["id"] = stream.id elif type(stream) is int: data["id"] = stream elif type(stream) is str: data["path"] = stream else: raise errors.ApiError( "Invalid stream datatype. Must be EventStream, Path, or ID") resp = await session.get("/event.json", data) return info_from_json(resp['data_info'])
def delete_node(node: Union[str, BaseNode]) -> None: configs = _get_node_configs() if type(node) is str: name = node else: name = node.name if name not in configs: raise errors.ApiError("Node [%s] does not exist" % name) del configs[name] _write_node_configs(configs) # if this is the default node, pick a new one try: default_config = _get_default_node(configs) if default_config.name == name: set_default_node("") except ValueError: # this was the last node, no other nodes available pass
async def folder_delete(session: BaseSession, folder: Union[Folder, str, int], recursive: bool = False) -> None: _recursive = 0 if recursive: _recursive = 1 data = {"recursive": _recursive} if type(folder) is Folder: data["id"] = folder.id elif type(folder) is int: data["id"] = folder elif type(folder) is str: data["path"] = folder else: raise errors.ApiError( "Invalid folder datatype. Must be Folder, Path, or ID") await session.delete("/folder.json", data)
async def data_delete(session: BaseSession, stream: Union[DataStream, str, int], start: Optional[int] = None, end: Optional[int] = None): params = {} if start is not None: params['start'] = int(start) if end is not None: params['end'] = int(end) if type(stream) is DataStream: params['id'] = stream.id elif type(stream) is str: params['path'] = stream elif type(stream) is int: params['id'] = stream else: raise errors.ApiError( "Invalid stream datatype. Must be DataStream, Path, or ID") await session.delete("/data", params)