def set_wpa_mode(): logger.debug("setting all wlan into wpa mode") session = Session() # get the info for the wpa_supplicant file wifi_defs = session.query(WifiDefinition).filter(WifiDefinition.wifi_mode != 'ap').all() networks = [] for wifi in wifi_defs: new_network = {} new_network['ssid'] = wifi.wifi_name new_network['password'] = wifi.wifi_password networks.append(new_network) iptables_file(None, None, flush_only=True) interface_file() wpa_supplicant_file(networks) dhcpcd_file() install_root = session.query(RoutingDefiniton.route).filter_by(name='install_root').scalar() session.close() path = install_root + 'farm_device/network/wpa_script.sh' command = ['sudo', 'sh', path] subprocess.check_call(command) return
def scan_wifi(interface=None): # if no interface is given, try find an interface in the database # that has the state set to 'dhcp' if interface is None: session = Session() interfaces = session.query(InterfaceDefinition).all() for x in interfaces: if not x.interface.startswith('eth'): if x.state == 'dhcp': interface = x.interface # exit if still no interface if interface is None: logger.warn("No interface available to scan wifi networks") return [] # scan the interface for networks command = ['sudo', 'iwlist', interface, 'scan'] output = subprocess.check_output(command) index = output.find('ESSID:"') ssid = [] while index > 0: stop = output.find('"\n', index) ssid.append(output[index + 7: stop]) output = output[stop + 2:] index = output.find('ESSID:"') return ssid
class SimulatorSource(PointSource): def __init__(self, entity_id, measurements): self._entity_id = entity_id self._measurements = measurements def __enter__(self): self._session = Session() self._entity = get_one(self._session, Entity, id=self._entity_id) self._entity_measurement_names = {} for measurement_id in self._measurements: if measurement_id not in self._entity_measurement_names: measurement_name = get_one(self._session, SeriesAttribute, id=measurement_id).name self._entity_measurement_names[measurement_id] = measurement_name return self def __exit__(self, *args): self._entity = None self._entity_measurement_names = {} self._session.close() def get_values(self) -> Iterable[dict]: result = [] for measurement_id in self._measurements: series_measurements = self._measurements[measurement_id] for (point_ts, point_val) in series_measurements: result.append({ 'timestamp': convert_timestamp(create_data_timestamp(point_ts)), 'measurements': [(self._entity_measurement_names[measurement_id], float(point_val))] }) return result def get_entity(self) -> Entity: return self._entity
def create(self): self.timestamp = time() session = Session() device = session.query(Device).first() self.id = device.id self.software_version = device.software_version self.interior_temp = temperature(device.interior_sensor) self.exterior_temp = temperature(device.exterior_sensor) self.device_temp = getCPUtemperature() self.uptime = get_uptime_seconds() self.current_time = datetime.datetime.now() self.load_avg = os.getloadavg() storage = get_storage() self.disk_total = storage['disk_total'] self.disk_used = storage['disk_used'] self.disk_free = storage['disk_free'] session.close() return
def on_update(self, did, msg): session = Session() device = session.query(Device).filter_by(id=did).first() if device: if device.user_configured: update = pickle.loads(msg[0]) device.interior_temp = update['interior_temp'] device.exterior_temp = update['exterior_temp'] session.commit() session.close() values = {'interior_temp': update['interior_temp'], 'exterior_temp': update['exterior_temp']} rrd = RRD(did, 'device') print(values) print(update['timestamp']) rrd.update(values, timestamp=update['timestamp']) return "updated" else: session.close() return 'device_unconfigured' else: session.close() return 'no_device' return
def __enter__(self): self._session = Session() self._entity = get_one(self._session, Entity, id=self._entity_id) self._entity_measurement_names = {} for measurement_id in self._measurements: if measurement_id not in self._entity_measurement_names: measurement_name = get_one(self._session, SeriesAttribute, id=measurement_id).name self._entity_measurement_names[measurement_id] = measurement_name return self
def backup(self): backup_index = create_backup() session = Session() backup = session.query(Backup).filter_by(index=backup_index).first() standalone_configuration = session.query(SystemSetup.standalone_configuration).scalar() if self.device_state == 'connected': failed_attempts = 0 # don't send the file if we are a combined configuration if standalone_configuration: logger.info("Sending device backup file to FarmMonitor") file_size = os.path.getsize(backup.filepath) bytes_sent = 0 zip_file = open(backup.filepath, 'r') while bytes_sent < file_size and failed_attempts < 3: zip_file.seek(bytes_sent) data = zip_file.read(512) logger.debug("Sending {0} bytes of backup file".format(len(data))) message = FileMessage(self.device_id, 'farm_monitor', 'backup') message.set_file(backup.filepath, data) reply = self.send([pickle.dumps(message)]) if reply: # only mark the data received if a reply is recieved bytes_sent += len(data) failed_attempts = 0 reply_message = pickle.loads(reply[1]) logger.debug("Update reply: {0}".format(reply_message.reply)) else: logger.warn("Failed attempt in sending backup file") failed_attempts += 1 zip_file.close() # only send the database object if the file was sent correctly if failed_attempts == 0: logger.info("Sending device backup database object to FarmMonitor") message = FileMessage(self.device_id, 'farm_monitor', 'backup') message.set_db_object(pickle.dumps(backup)) reply = self.send([pickle.dumps(message)]) if reply: reply_message = pickle.loads(reply[1]) logger.debug("Update response: {0}".format(reply_message.reply)) else: logger.warn("To many failed attempts. Backup failed to send to Farm Monitor") session.close() return
def __init__(self, context, endpoint, service): """Overridden initializer for MDPWorker. Adds the device_timer to manage connected devices """ session = Session() session.query(Device).update({Device.connected: False}) session.commit() self.device_timer = None super(DeviceServiceManager, self).__init__(context, endpoint, service) return
def device_watcher(self): for device in self._connected_devices.values(): if not device.is_alive(): session = Session() session.query(Device).filter_by(id=device.id) \ .update({Device.connected: False}) session.commit() device.shutdown() del self._connected_devices[device.id] return
def create(self): session = Session() grainbins = session.query(Grainbin).all() for grainbin in grainbins: self.grainbins.append(GrainbinModel(grainbin, read_temperature=False)) session.close() return
def on_create(self, did, msg): imported_device = pickle.loads(msg[0]) device = Device(imported_device['id'], version=imported_device['version']) device.connected = True device.user_configured = False device.database_service = imported_device['database_service'] device.device_service = imported_device['device_service'] device.grainbin_service = imported_device['grainbin_service'] device.grainbin_count = imported_device['grainbin_count'] session = Session() session.add(device) if device.grainbin_service: grainbins = [] for x in range(device.grainbin_count): id = device.id + '.' + str(x).zfill(2) grainbin = Grainbin(id, device.id, x) grainbins.append(grainbin) session.add_all(grainbins) session.commit() session.close() return "added"
def subscriber_manager(context): global subscriber global publisher global subscriber_topics session = Session() # Set up forwarder device logger.debug("Configuring forwarder device") forwarder_subscriber_address = session.query(RoutingDefiniton.route) \ .filter_by(name="internal_pub") \ .scalar() forwarder_publisher_address = session.query(RoutingDefiniton.route) \ .filter_by(name="internal_sub") \ .scalar() forwarder = ProcessDevice(zmq.FORWARDER, zmq.SUB, zmq.PUB) forwarder.connect_in(forwarder_subscriber_address) forwarder.setsockopt_in(zmq.SUBSCRIBE, "") logger.debug("forwarder connect in: {0}".format(forwarder_subscriber_address)) forwarder.bind_out(forwarder_publisher_address) logger.debug("forwarder bind out: {0}".format(forwarder_publisher_address)) # Set up subscriber listening to Farm-Monitor server subscriber_address = session.query(RoutingDefiniton.route)\ .filter_by(name="subscriber_address").scalar() subscriber_port = session.query(RoutingDefiniton.route)\ .filter_by(name="subscriber_port").scalar() subscriber_address = "tcp://" + subscriber_address + ":" + subscriber_port device_id = session.query(HardwareDefinition.serial_number).scalar() subscriber_topics.append(device_id) subscriber = Subscriber(context, subscriber_address, subscriber_topics) subscriber.subscriber.on_recv(callback=server_subscriber_recv) logger.debug("Subscriber listening to: {0} topics: {1}".format(subscriber_address, subscriber_topics)) # Set up publisher to forward messages from Farm-Monitor to internal publisher_address = session.query(RoutingDefiniton.route)\ .filter_by(name="internal_pub").scalar() publisher = Publisher(context, publisher_address) logger.debug("Publisher configured to: {0}".format(publisher_address)) session.close() try: logger.info("starting forwarder, Subscriber, and Publisher") # Start the forwarder forwarder.start() IOLoop.instance().start() except KeyboardInterrupt: logger.info("stopping forwarder, Subscriber, and Publisher") IOLoop.instance().stop() subscriber.shutdown() publisher.shutdown() return
def add_wifi_network(wifi_name, wifi_password, interface=None): session = Session() if interface is None: interfaces = session.query(InterfaceDefinition).all() for x in interfaces: if x.interface != 'eth0': if x.state == 'dhcp': interface = x.interface if interface is None: logger.error("No interface available to add new wifi network") return None # have an interface. now create a WifiDefinition entry new_wifi = WifiDefinition() new_wifi.wifi_name = wifi_name new_wifi.wifi_password = wifi_password new_wifi.wifi_mode = 'dhcp' new_wifi.interface = interface session.add(new_wifi) session.commit() session.close() return new_wifi
def set_software_info(software_version, device_service=None, grainbin_service=None): """ Set the software version info into the SoftwareDefinition table. software_version is a string representing what revison of software device_service is a Boolean indicating if the device service is enabled grainbin_service is a Boolean indicating if the grainbin service is enabled """ logger.debug("setting version: {0} device: {1} grainbin {2}".format(software_version, device_service, grainbin_service)) session = Session() try: sd = session.query(SoftwareDefinition).one() sd.software_version = software_version if device_service is not None: sd.device_service = device_service if grainbin_service is not None: sd.grainbin_service = grainbin_service except NoResultFound: sd = SoftwareDefinition() sd.software_version = software_version if device_service is not None: sd.device_service = device_service if grainbin_service is not None: sd.grainbin_service = grainbin_service session.add(sd) session.commit() session.close() return
def _backup_database(filepath): table_classes = Base._decl_class_registry.values() session = Session() for table_class in table_classes: if hasattr(table_class, '__tablename__'): query = session.query(table_class) serialized_data = dumps(query.all()) new_filepath = filepath + 'database/' + table_class.__tablename__ with open(new_filepath, 'w') as text_file: text_file.write(serialized_data) return
def __init__(self, entity_id: int, aggregation_length: int, aggregation_type: str, measurement_id: int): super().__init__(entity_id, aggregation_length, aggregation_type) self._measurement_id = measurement_id self._measurement = get_one(Session(), SeriesAttribute, id=measurement_id)
def __init__(self, context, identity): self.socket = context.socket(zmq.DEALER) self.socket.identity = identity.encode('ascii') self.socket.setsockopt(zmq.LINGER, 0) session = Session() dealer_internal = session.query(RoutingDefiniton.route) \ .filter_by(name="internal_dealer") \ .scalar() session.close() self.socket.connect(dealer_internal) self.poll = zmq.Poller() self.poll.register(self.socket, zmq.POLLIN) return
def __init__(self, context): socket = context.socket(zmq.ROUTER) socket.setsockopt(zmq.LINGER, 0) identity = 'router' socket.identity = identity.encode('ascii') session = Session() router_internal = session.query(RoutingDefiniton.route) \ .filter_by(name="internal_router") \ .scalar() session.close() socket.bind(router_internal) ioloop = IOLoop.instance() self.internal_router = ZMQStream(socket, ioloop) self.internal_router.on_recv(callback=self.router_recv) return
def run_assertion(data, entity_type_id, attribute_id=None): attributes = [ m.name for m in get_all(Session(), cls, entity_type_id_fk=entity_type_id) if m.id != attribute_id ] if 'name' in data and data['name'] in attributes: raise ValueError("attribute {} exists for entity type {}".format( data['name'], entity_type_id))
class MetaAttributeHandler(Handler): def __init__(self): self.session = Session() def get(self, entity_type_id, ident=None): entity_type = get_one(self.session, EntityType, id=entity_type_id) if ident is None: return [meta.to_dict() for meta in get_all(self.session, MetaAttribute, entity_type=entity_type)] else: return get_one(self.session, MetaAttribute, entity_type=entity_type, id=ident).to_dict() @requires_validation(assert_attribute_does_not_exist(MetaAttribute), with_route_params=True) @requires_validation(Schema({Required('name'): non_empty_string})) def post(self, entity_type_id): data = self.request.data entity_type = get_one(self.session, EntityType, id=entity_type_id) meta = MetaAttribute(entity_type=entity_type, name=data['name']) self.session.add(meta) self.session.commit() update_last_data_modification_ts(self.session) return { 'success': True, 'ID': meta.id } @requires_validation(assert_attribute_does_not_exist(MetaAttribute), with_route_params=True) @requires_validation(Schema({'name': non_empty_string})) def put(self, entity_type_id, ident): data = self.request.data entity_type = get_one(self.session, EntityType, id=entity_type_id) # check if route is correct meta = get_one(self.session, MetaAttribute, entity_type=entity_type, id=ident) if 'name' in data: meta.name = data['name'] self.session.commit() update_last_data_modification_ts(self.session) return { 'success': True, 'ID': meta.id, } def delete(self, entity_type_id, ident): entity_type = get_one(self.session, EntityType, id=entity_type_id) # check if route is correct meta = get_one(self.session, MetaAttribute, entity_type=entity_type, id=ident) now = time.time() meta.delete_ts = now for entity_meta in get_all(self.session, EntityMeta, attribute=meta): entity_meta.delete_ts = now self.session.commit() update_last_data_modification_ts(self.session) return {'success': True}
def set_sensor_info(interior): """ there should be two 1W sensors connected directly to the device. This gets the sensors and sets which one is interior and exterior into the HardwareDefinition table. interior should be '1' or '2' and specifies which of the two sensors is the interior one. """ logger.debug("setting sensor info for device") interior = int(interior) # get the 1W sensors that are connected directly to the device # theses are the interior and exterior temp sensors sensors = get_connected_sensors() int_sensor = "no_sensor_selected" ext_sensor = "no_sensor_selected" # there should be two sensors if len(sensors) == 2: if interior == 1: int_sensor = sensors[0] ext_sensor = sensors[1] elif interior == 2: int_sensor = sensors[1] ext_sensor = sensors[0] elif len(sensors) == 1: if interior == 1: int_sensor = sensors[0] ext_sensor = "no_sensor_detected" elif interior == 2: int_sensor = "no_sensor_detected" ext_sensor = sensors[0] logger.debug("interior sensor is: {0}".format(int_sensor)) logger.debug("exterior sensor is: {0}".format(ext_sensor)) # now set the sensor info into the tables session = Session() try: hd = session.query(HardwareDefinition).one() hd.interior_sensor = int_sensor hd.exterior_sensor = ext_sensor except NoResultFound: hd = HardwareDefinition() hd.interior_sensor = int_sensor hd.exterior_sensor = ext_sensor session.add(hd) session.commit() session.close() return
def login(): body = request.get_json() username = body.get('username') password = body.get('password') profile = User.objects(username=username).first() if profile and bcrypt.check_password_hash(profile.password, password): token = get_token() Session(username=username, session_id=token).save() return {'token': token}, 200 return {'message': 'Invalid username or password'}, 401
def create(self): session = Session() device = session.query(Device).first() self.id = device.id self.hardware_version = device.hardware_version self.software_version = device.software_version self.database_service = device.database_service self.device_service = device.device_service self.grainbin_service = device.grainbin_service if device.grainbin_service: self.grainbin_count = device.grainbin_count self.grainbin_data = GrainbinInfo(self.source, self.destination) self.grainbin_data.create() session.close() return
def delete_release(repo, name): session = Session() release = session.query(Update).filter_by(repo=repo, name=name).first() if release: if os.path.isdir(release.filepath): shutil.rmtree(release.filepath) session.delete(release) session.commit() session.close() return
def delete_wifi_network(id): session = Session() session.query(WifiDefinition).filter_by(id=id).delete() session.commit() session.close() return
def wifi_info(): logger.debug("getting wifi information") wlan_interfaces = get_interfaces(only_wlan=True) wifi = [] session = Session() for w_interface in wlan_interfaces: try: info = {} interface = session.query(InterfaceDefinition).filter_by(interface=w_interface).one() info['interface'] = interface if interface.state == 'ap': info['clients'] = wifi_ap_clients(interface.interface) info['ssid'] = interface.credentials[0].wifi_name info['password'] = interface.credentials[0].wifi_password else: info['state'] = wifi_dhcp_info(interface.interface) if info['state'] is False: info['state_boolean'] = False else: info['state_boolean'] = True if w_interface in netifaces.interfaces(): address = netifaces.ifaddresses(w_interface) info['address'] = address[netifaces.AF_INET][0]['addr'] if interface.credentials: info['ssid'] = interface.credentials[0].wifi_name info['password'] = interface.credentials[0].wifi_password wifi.append(info) except NoResultFound: pass return wifi
def __init__(self, request_id: int, payload: dict): super().__init__(request_id, payload) self._last_data_timestamp = 0 session = Session() self._measurement = get_one(session, SeriesAttribute, id=self._raw_payload.measurement_id, exception_cls=ValueError) self._entity = get_one(session, Entity, id=self._raw_payload.node_id, exception_cls=ValueError) self._run_assertions()
def create_backup(user_created=True): session = Session() software_version = session.query(SoftwareDefinition.software_version).scalar() serial_number = session.query(HardwareDefinition.serial_number).scalar() timestamp = datetime.now().strftime('%Y.%m.%d.%H.%M.%S') filepath = base_path + serial_number + '.fd.' + timestamp + '/' zip_filepath = filepath[:-1] + '.zip' if not os.path.exists(filepath): os.makedirs(filepath) if not os.path.exists(filepath + 'database/'): os.makedirs(filepath + 'database/') backup = Backup(serial_number, 'farm_device', zip_filepath) backup.software_version = software_version backup.user_created = user_created _backup_database(filepath) backup.database = True # _backup_data(filepath) # backup.data = True # zip the entire backup into one file zipf = zipfile.ZipFile(zip_filepath, 'w', zipfile.ZIP_DEFLATED) for root, dirs, files in os.walk(filepath): for file in files: filename = os.path.join(root, file) zipf.write(filename, os.path.relpath(filename, filepath)) zipf.close() # store the hash of the file sha256 = hashlib.sha256() with open(zip_filepath, 'rb') as f: for block in iter(lambda: f.read(2048), b''): sha256.update(block) backup.sha256 = sha256.hexdigest() # Remove the folder. Just use the zipfile shutil.rmtree(filepath) session.add(backup) session.commit() return backup.index
def __init__(self, request_id: int, payload: dict): super().__init__(request_id, payload) session = Session() self._entity = get_one(session, Entity, id=self._raw_payload.node_id, exception_cls=ValueError) self._requested_data = [ create_measurement_handler(self._raw_payload.node_id, self._raw_payload.aggregation_length, self._raw_payload.aggregation_type, data) for data in self._raw_payload.requested_data ] self._run_assertions()
def on_update(self, did, msg): session = Session() device = session.query(Device).filter_by(id=did).first() if device: if device.user_configured: update = pickle.loads(msg[0]) process_grainbin_update(session, update) else: session.close() return 'device unconfigured' else: session.close() return 'no device' session.close() return 'updated'
def set_ap_mode(): logger.debug("setting wifi into ap mode") session = Session() # get the wlan0 and wlan1 dhcp states try: ap_interface = session.query(InterfaceDefinition).filter_by(state='ap').first() ap_ssid = ap_interface.credentials[0].wifi_name ap_password = ap_interface.credentials[0].wifi_password except NoResultFound: # error. abort logger.warn("No interface with state set to 'ap'. Aborting") return # get info for interface file if ap_interface.interface == 'wlan0': wlan0_dhcp = False wlan1_dhcp = True else: wlan0_dhcp = True wlan1_dhcp = False # get the info for the wpa_supplicant file wifi_defs = session.query(WifiDefinition).filter(WifiDefinition.wifi_mode != 'ap').all() networks = [] for wifi in wifi_defs: new_network = {} new_network['ssid'] = wifi.wifi_name new_network['password'] = wifi.wifi_password networks.append(new_network) # get the information for the iptables_file internal_interface = ap_interface.interface external_interface = get_external_interface() iptables_file(external_interface, internal_interface) interface_file(wlan0_dhcp=wlan0_dhcp, wlan1_dhcp=wlan1_dhcp) wpa_supplicant_file(networks) dhcpcd_file(interface=ap_interface.interface) dnsmasq_file(interface=ap_interface.interface) hostapd_file(ap_interface.interface, ap_ssid, ap_password) install_root = session.query(RoutingDefiniton.route).filter_by(name='install_root').scalar() session.close() path = install_root + 'farm_device/network/ap_script.sh' command = ['sudo', 'sh', path, ap_interface.interface] subprocess.check_call(command) return
def post_update(update_id): session = Session() # update the software info update = session.query(Update).filter_by(id=update_id).one() device = session.query(Device).one() device.software_version = update.name delete_release(update.repo, update.name) session.commit() session.close() return
def _assert_objects_were_not_created(data, entity_type_id): session = Session() existing_objects = { 'tags': [ tag.name for tag in get_all( session, TagAttribute, entity_type_id_fk=entity_type_id) ], 'meta': [ meta.name for meta in get_all( session, MetaAttribute, entity_type_id_fk=entity_type_id) ], 'series': [ series.name for series in get_all( session, SeriesAttribute, entity_type_id_fk=entity_type_id) ] } for object_type in ('tags', 'meta', 'series'): for obj in data.get(object_type, []): if obj in existing_objects[object_type]: raise ValueError('{} is in existing {}'.format( obj, object_type))
def check_all_alerts( on_state_change: Iterable[Callable[[Alert, str], Any]]) -> None: session = Session() for alert in get_all(session, Alert, is_enabled=True): status = check_alert(alert) if status and not alert.last_check_status: [callback(alert, str(status)) for callback in on_state_change ] # call all callbacks in on_state_change alert.last_check_status = True if alert.last_check_status is None or (not status and alert.last_check_status): [callback(alert, str(status)) for callback in on_state_change] alert.last_check_status = False session.commit() session.close()
def set_hardware_info(hardware_version, gb_reader_count): """ Set the hardware info into the HardwareDefinition table. hardware_version is a string representing what revison of hardware wifi_chip is the chip of the wifi adapter gb_reader_count is the number of 1Wire readerchips the FarmDevice has """ logger.debug("setting vers: {0} grainbin_reader: {1}".format(hardware_version, gb_reader_count)) session = Session() device_name = get_device_name() serial_number = getserial() try: hd = session.query(HardwareDefinition).one() hd.hardware_version = hardware_version hd. wifi_chip = None hd.device_name = device_name hd.serial_number = serial_number hd.grainbin_reader_count = int(gb_reader_count) except NoResultFound: hd = HardwareDefinition() hd.hardware_version = hardware_version hd.wifi_chip = None hd.device_name = device_name hd.serial_number = serial_number hd.grainbin_reader_count = int(gb_reader_count) session.add(hd) session.commit() session.close() return
def delete_backup(backup_index, backup_type, serial_number): session = Session() backup = session.query(Backup).filter_by(index=backup_index, backup_type=backup_type, serial_number=serial_number).first() if backup: if os.path.isfile(backup.filepath): try: os.remove(backup.filepath) except OSError as ex: print(ex) session.delete(backup) session.commit() return
def device_service(context): global connection global subscriber global internal_router session = Session() client_address = session.query(RoutingDefiniton.route)\ .filter_by(name="client_address").scalar() client_port = session.query(RoutingDefiniton.route)\ .filter_by(name="client_port").scalar() client_address = "tcp://" + client_address + ":" + client_port device_id = session.query(HardwareDefinition.serial_number).scalar() subscriber_address = session.query(RoutingDefiniton.route)\ .filter_by(name="internal_sub").scalar() session.close() subscriber_topics = ["device"] connection = DeviceConnection(context, device_id, client_address) logger.debug("DeviceConnection connecting to: {0}".format(client_address)) logger.debug("DeviceConnection device id is: {0}".format(device_id)) subscriber = DeviceSubscriber(context, subscriber_address, subscriber_topics) logger.debug("DeviceSubscriber subscribing to: {0}".format(subscriber_address)) logger.debug("DeviceSubscriber topics: {0}".format(subscriber_topics)) internal_router = DeviceRouter(context) try: logger.info("starting DeviceConnection, DeviceSubscriber, and DeviceRouter") IOLoop.instance().start() except KeyboardInterrupt: logger.info("stoping DeviceConnection, DeviceSubscriber, and DeviceRouter") IOLoop.instance().stop() internal_router.shutdown() subscriber.shutdown() connection.shutdown() return
class TagAttributeHandler(Handler): def __init__(self): self.session = Session() def get(self, entity_type_id, ident=None): entity_type = get_one(self.session, EntityType, id=entity_type_id) if ident is None: return [ tag.to_dict() for tag in get_all( self.session, TagAttribute, entity_type=entity_type) ] else: return get_one(self.session, TagAttribute, entity_type=entity_type, id=ident).to_dict() @requires_validation(assert_attribute_does_not_exist(TagAttribute), with_route_params=True) @requires_validation(Schema({Required('name'): non_empty_string})) def post(self, entity_type_id): data = self.request.data entity_type = get_one(self.session, EntityType, id=entity_type_id) tag = TagAttribute(entity_type=entity_type, name=data['name']) self.session.add(tag) self.session.commit() update_last_data_modification_ts(self.session) return {'success': True, 'ID': tag.id} def delete(self, entity_type_id, ident): entity_type = get_one(self.session, EntityType, id=entity_type_id) # check if route is correct tag = get_one(self.session, TagAttribute, entity_type=entity_type, id=ident) now = time.time() tag.delete_ts = now for entity_tag in get_all(self.session, EntityTag, attribute=tag): entity_tag.delete_ts = now self.session.commit() update_last_data_modification_ts(self.session) return {'success': True}
def grainbin_service(context): global service global subscriber global internal_dealer session = Session() client_address = session.query(RoutingDefiniton.route)\ .filter_by(name="client_address").scalar() client_port = session.query(RoutingDefiniton.route)\ .filter_by(name="client_port").scalar() client_address = "tcp://" + client_address + ":" + client_port device_id = session.query(HardwareDefinition.serial_number).scalar() subscriber_address = session.query(RoutingDefiniton.route)\ .filter_by(name="internal_sub").scalar() session.close() subscriber_topics = ["grainbin"] service = GrainBinService(context, device_id, client_address) logger.debug("GrainbinService connecting to: {0}".format(client_address)) subscriber = GrainBinSubscriber(context, subscriber_address, subscriber_topics) logger.debug("GrainbinSubscriber subscribing to: {0}".format(subscriber_address)) logger.debug("GrainbinSubscriber topics: {0}".format(subscriber_topics)) internal_dealer = GrainBinDealer(context, 'grainbin') logger.debug("GrainbinDealer has id 'grainbin'") try: logger.info("starting GrainbinService, GrainbinSubscriber, and GrainbinDealer") IOLoop.instance().start() except KeyboardInterrupt: logger.info("stopping GrainbinService, GrainbinSubscriber, and GrainbinDealer") IOLoop.instance().stop() internal_dealer.shutdown() subscriber.shutdown() service.shutdown() return
def preform_update(update_id, update_type): session = Session() try: update = session.query(Update).filter_by(id=update_id).one() except NoResultFound: session.close() return if update_type == 'farm_device': # now stage and preform the update file_path = update.filepath + update.filename updater_path = session.query(RoutingDefiniton.route).filter_by(name='updater_path').scalar() # launch the updater script in a seperate process. The bash file just calls another # python script in a detached process Popen(['sh', updater_path, str(update_id), file_path, "farm_device", "farm_device"], preexec_fn=os.setpgrp) session.close() return
def on_heartbeat(self, did): if did in self._connected_devices: if self._connected_devices[did].is_alive(): self._connected_devices[did].on_message_received() return self._connected_devices[did].get_state() else: session = Session() device = session.query(Device).filter_by(id=did).first() if not device: session.close() # signals the device to send a create message return 'unknown' else: self._connected_devices[did] = DeviceRep(did, state='connected') device.connected = True session.commit() return 'connected' return
def push_new_measurements(directory, pattern): session = Session() files = os.listdir(directory) for node in get_all(session, Entity): to_read = [ directory + '/' + f for f in files if re.match(pattern.format(ID=node.id), f) and os.path.getmtime(directory + '/' + f) >= node.last_data_fetch_ts ] # get input files sorted by modification time for file in sorted(to_read, key=os.path.getmtime): try: InfluxWriter().write( PointGenerator(node, FileParser(node.id, file)).generate_points()) node.last_data_fetch_ts = int(os.path.getmtime(file)) session.commit() except Exception as err: logging.error(err) session.close()
def __init__(self): self.session = Session()
class SeriesAttributeHandler(Handler): def __init__(self): self.session = Session() def get(self, entity_type_id, ident=None): entity_type = get_one(self.session, EntityType, id=entity_type_id) if ident is None: return [ series.to_dict() for series in get_all( self.session, SeriesAttribute, entity_type=entity_type) ] else: return get_one(self.session, SeriesAttribute, entity_type=entity_type, id=ident).to_dict() @requires_validation(assert_attribute_does_not_exist(SeriesAttribute), with_route_params=True) @requires_validation( Schema({ Required('name'): non_empty_string, 'type': Or('real', 'enum'), 'refresh_time': Or(int, None), 'is_favourite': bool, })) def post(self, entity_type_id): data = self.request.data entity_type = get_one(self.session, EntityType, id=entity_type_id) series = SeriesAttribute(entity_type=entity_type, name=data['name'], type=data.get('type', 'real'), refresh_time=data.get('refresh_time'), is_favourite=data.get('is_favourite', False)) self.session.add(series) self.session.commit() update_last_data_modification_ts(self.session) return {'success': True, 'ID': series.id} @requires_validation( Schema({ 'refresh_time': Or(int, None), 'is_favourite': bool, })) def put(self, entity_type_id, ident): data = self.request.data entity_type = get_one(self.session, EntityType, id=entity_type_id) series = get_one(self.session, SeriesAttribute, entity_type=entity_type, id=ident) if 'refresh_time' in data: series.refresh_time = data['refresh_time'] if 'is_favourite' in data: series.is_favourite = data['is_favourite'] self.session.commit() update_last_data_modification_ts(self.session) return {'success': True, 'ID': series.id} def delete(self, entity_type_id, ident): now = time.time() entity_type = get_one(self.session, EntityType, id=entity_type_id) # check if route is correct series = get_one(self.session, SeriesAttribute, entity_type=entity_type, id=ident) series.delete_ts = now for alert in series.alerts: alert.delete_ts = now self.session.commit() update_last_data_modification_ts(self.session) return {'success': True}
class EntityTypeHandler(Handler): def __init__(self): self.session = Session() def get(self, ident=None): if ident is None: return [ entity_type.to_dict() for entity_type in get_all(self.session, EntityType) ] else: return get_one(self.session, EntityType, id=ident).to_dict() @requires_validation( Schema({ Required('name'): non_empty_string, Required('tags'): And([non_empty_string], Unique()), Required('meta'): And([non_empty_string], Unique()), Required('series'): And([non_empty_string], Unique()), })) def post(self): data = self.request.data entity_type = EntityType(name=data['name']) self.session.add(entity_type) # add tags, meta and series for tag in data['tags']: self.session.add(TagAttribute( entity_type=entity_type, name=tag, )) for meta in data['meta']: self.session.add( MetaAttribute( entity_type=entity_type, name=meta, )) for series in data['series']: self.session.add( SeriesAttribute( entity_type=entity_type, name=series, )) self.session.commit() update_last_data_modification_ts(self.session) return { 'success': True, 'ID': entity_type.id, } @requires_validation(_assert_objects_were_not_created, with_route_params=True) @requires_validation( Schema({ 'name': non_empty_string, 'tags': And([non_empty_string], Unique()), 'meta': And([non_empty_string], Unique()), 'series': And([non_empty_string], Unique()), })) def put(self, ident): data = self.request.data entity_type = get_one(self.session, EntityType, id=ident) if 'name' in data: entity_type.name = data['name'] # add tags, meta and series if 'tags' in data: for tag in data['tags']: self.session.add( TagAttribute( entity_type=entity_type, name=tag, )) if 'meta' in data: for meta in data['meta']: self.session.add( MetaAttribute( entity_type=entity_type, name=meta, )) if 'series' in data: for series in data['series']: self.session.add( SeriesAttribute( entity_type=entity_type, name=series, )) self.session.commit() update_last_data_modification_ts(self.session) return { 'success': True, 'ID': entity_type.id, } def delete(self, ident): entity_type = get_one(self.session, EntityType, id=ident) now = time.time() entity_type.delete_ts = now for tag in entity_type.tags: tag.delete_ts = now for series in entity_type.series: series.delete_ts = now for alert in series.alerts: alert.delete_ts = now for meta in entity_type.meta: meta.delete_ts = now for entity in entity_type.nodes: entity.delete_ts = now for tag in entity.tags: tag.delete_ts = now for meta in entity.meta: meta.delete_ts = now for child in entity.children: child.parent = entity.parent self.session.commit() update_last_data_modification_ts(self.session) return {'success': True}
class AlertHandler(Handler): def __init__(self): self.session = Session() def get(self, ident=None): if ident is None: return [alert.to_dict() for alert in get_all(self.session, Alert)] else: return get_one(self.session, Alert, id=ident).to_dict() @requires_validation( Schema({ Required('entity_id'): int, Required('series_id'): int, Required('alert_predicate_type'): Or('data_delay', 'value_too_low', 'value_too_high'), Required('value'): float, Required('is_enabled'): bool, Required('alert_recipient_email'): Email, })) def post(self): data = self.request.data entity = get_one(self.session, Entity, id=data['entity_id']) series = get_one(self.session, SeriesAttribute, id=data['series_id']) alert = Alert(entity=entity, series=series, alert_predicate_type=data['alert_predicate_type'], value=data['value'], is_enabled=data['is_enabled'], alert_recipient_email=data['alert_recipient_email']) self.session.add(alert) self.session.commit() return {'success': True, 'ID': alert.id} @requires_validation( Schema({ 'entity_id': int, 'series_id': int, 'alert_predicate_type': Or('data_delay', 'value_too_low', 'value_too_high'), 'value': float, 'is_enabled': bool, 'alert_recipient_email': Email, })) def put(self, ident): data = self.request.data alert = get_one(self.session, Alert, id=ident) if 'entity_id' in data: entity = get_one(self.session, Entity, id=data['entity_id']) alert.entity_id_fk = entity.id if 'series_id' in data: series = get_one(self.session, SeriesAttribute, id=data['series_id']) alert.series_id_fk = series.id if 'alert_predicate_type' in data: alert.alert_predicate_type = data['alert_predicate_type'] if 'value' in data: alert.value = data['value'] if 'is_enabled' in data: alert.is_enabled = data['is_enabled'] if 'alert_recipient_email' in data: alert.alert_recipient_email = data['alert_recipient_email'] # reset last check status alert.last_check_status = None self.session.commit() return { 'success': True, 'ID': alert.id, } def delete(self, ident): alert = get_one(self.session, Alert, id=ident) now = time.time() alert.delete_ts = now self.session.commit() update_last_data_modification_ts(self.session) return {'success': True}
def __init__(self, entity_id, filename): self._entity = get_one(Session(), Entity, id=entity_id) self._filename = filename
def __init__(self): self.session = Session() self._cached_tree = dict() self._update_cache()
class EntityHandler(Handler): def __init__(self): self.session = Session() def get(self, ident=None): if ident is None: return [ entity.to_dict(deep=False) for entity in get_all(self.session, Entity) ] else: return get_one(self.session, Entity, id=ident).to_dict(deep=False) @staticmethod def _assert_got_all_needed_tag_and_meta_ids(entity_type, tag_ids, meta_ids): expected_tag_ids = sorted(tag.id for tag in entity_type.tags if tag.delete_ts is None) expected_meta_ids = sorted(meta.id for meta in entity_type.meta if meta.delete_ts is None) if tag_ids != expected_tag_ids: raise HTTP_400('Expected tag IDs {}, got {}'.format( expected_tag_ids, tag_ids)) if meta_ids != expected_meta_ids: raise HTTP_400('Expected meta IDs {}, got {}'.format( expected_meta_ids, meta_ids)) @requires_validation( Schema( { Required('parent_id'): Or(int, None), Required('entity_type_id'): int, }, extra=ALLOW_EXTRA)) def post(self): data = self.request.data entity_type = get_one(self.session, EntityType, id=data['entity_type_id']) # check if we got all tags and meta tag_ids = sorted( int(key.split('_')[1]) for key in data if 'tag_' in key) meta_ids = sorted( int(key.split('_')[1]) for key in data if 'meta_' in key) self._assert_got_all_needed_tag_and_meta_ids(entity_type, tag_ids, meta_ids) entity = Entity( entity_type=entity_type, parent=None if data['parent_id'] is None else get_one( self.session, Entity, id=data['parent_id']), ) self.session.add(entity) # add tags and meta for key in data: if 'tag_' in key: self.session.add( EntityTag( entity=entity, attribute=get_one(self.session, TagAttribute, id=int(key.split('_')[1])), value=data[key], )) elif 'meta_' in key: self.session.add( EntityMeta( entity=entity, attribute=get_one(self.session, MetaAttribute, id=int(key.split('_')[1])), value=data[key], )) self.session.commit() update_last_data_modification_ts(self.session) return { 'success': True, 'ID': entity.id, } def put(self, ident): data = self.request.data entity = get_one(self.session, Entity, id=ident) # to ensure that the entity exists if 'parent_id' in data: get_one(self.session, Entity, id=data['parent_id']) entity.parent_id_fk = data['parent_id'] # add tags and meta for key in data: if 'tag_' in key: tag = get_one(self.session, EntityTag, entity=entity, tag_id_fk=key.split('_')[1]) tag.value = data[key] elif 'meta_' in key: meta = get_one(self.session, EntityMeta, entity=entity, meta_id_fk=key.split('_')[1]) meta.value = data[key] self.session.commit() update_last_data_modification_ts(self.session) return { 'success': True, 'ID': entity.id, } def delete(self, ident): entity = get_one(self.session, Entity, id=ident) now = time.time() entity.delete_ts = now for tag in entity.tags: tag.delete_ts = now for meta in entity.meta: meta.delete_ts = now for alert in entity.alerts: alert.delete_ts = now for child in entity.children: child.parent = entity.parent self.session.commit() update_last_data_modification_ts(self.session) return {'success': True}