def __init__(self, entity_id: int, aggregation_length: int, aggregation_type: str, measurement_id: int): super().__init__(entity_id, aggregation_length, aggregation_type) self._measurement_id = measurement_id self._measurement = get_one(Session(), SeriesAttribute, id=measurement_id)
def run_assertion(data, entity_type_id, attribute_id=None): attributes = [ m.name for m in get_all(Session(), cls, entity_type_id_fk=entity_type_id) if m.id != attribute_id ] if 'name' in data and data['name'] in attributes: raise ValueError("attribute {} exists for entity type {}".format( data['name'], entity_type_id))
def __enter__(self): self._session = Session() self._entity = get_one(self._session, Entity, id=self._entity_id) self._entity_measurement_names = {} for measurement_id in self._measurements: if measurement_id not in self._entity_measurement_names: measurement_name = get_one(self._session, SeriesAttribute, id=measurement_id).name self._entity_measurement_names[measurement_id] = measurement_name return self
def login(): body = request.get_json() username = body.get('username') password = body.get('password') profile = User.objects(username=username).first() if profile and bcrypt.check_password_hash(profile.password, password): token = get_token() Session(username=username, session_id=token).save() return {'token': token}, 200 return {'message': 'Invalid username or password'}, 401
def __init__(self, request_id: int, payload: dict): super().__init__(request_id, payload) self._last_data_timestamp = 0 session = Session() self._measurement = get_one(session, SeriesAttribute, id=self._raw_payload.measurement_id, exception_cls=ValueError) self._entity = get_one(session, Entity, id=self._raw_payload.node_id, exception_cls=ValueError) self._run_assertions()
def __init__(self, request_id: int, payload: dict): super().__init__(request_id, payload) session = Session() self._entity = get_one(session, Entity, id=self._raw_payload.node_id, exception_cls=ValueError) self._requested_data = [ create_measurement_handler(self._raw_payload.node_id, self._raw_payload.aggregation_length, self._raw_payload.aggregation_type, data) for data in self._raw_payload.requested_data ] self._run_assertions()
def check_all_alerts( on_state_change: Iterable[Callable[[Alert, str], Any]]) -> None: session = Session() for alert in get_all(session, Alert, is_enabled=True): status = check_alert(alert) if status and not alert.last_check_status: [callback(alert, str(status)) for callback in on_state_change ] # call all callbacks in on_state_change alert.last_check_status = True if alert.last_check_status is None or (not status and alert.last_check_status): [callback(alert, str(status)) for callback in on_state_change] alert.last_check_status = False session.commit() session.close()
def push_new_measurements(directory, pattern): session = Session() files = os.listdir(directory) for node in get_all(session, Entity): to_read = [ directory + '/' + f for f in files if re.match(pattern.format(ID=node.id), f) and os.path.getmtime(directory + '/' + f) >= node.last_data_fetch_ts ] # get input files sorted by modification time for file in sorted(to_read, key=os.path.getmtime): try: InfluxWriter().write( PointGenerator(node, FileParser(node.id, file)).generate_points()) node.last_data_fetch_ts = int(os.path.getmtime(file)) session.commit() except Exception as err: logging.error(err) session.close()
def _assert_objects_were_not_created(data, entity_type_id): session = Session() existing_objects = { 'tags': [ tag.name for tag in get_all( session, TagAttribute, entity_type_id_fk=entity_type_id) ], 'meta': [ meta.name for meta in get_all( session, MetaAttribute, entity_type_id_fk=entity_type_id) ], 'series': [ series.name for series in get_all( session, SeriesAttribute, entity_type_id_fk=entity_type_id) ] } for object_type in ('tags', 'meta', 'series'): for obj in data.get(object_type, []): if obj in existing_objects[object_type]: raise ValueError('{} is in existing {}'.format( obj, object_type))
def __init__(self): self.session = Session()
def __init__(self): self.session = Session() self._cached_tree = dict() self._update_cache()
def __init__(self, entity_id, filename): self._entity = get_one(Session(), Entity, id=entity_id) self._filename = filename