def _test_add_nodes_b1(): print("sdfsdfs") # node1 = Node(created_at=datetime.now(), updated_at=datetime.now(), deleted_at=None, management_ip="172.16.29.193", ssh_user="******", ssh_password="******", status="set_ip", node_display_name="controller_01") node2 = models.Node(created_at=datetime.now(), updated_at=datetime.now(), deleted_at=None, management_ip="172.16.29.194", ssh_user="******", ssh_password="******", status="set_ip", node_display_name="controller_02") node3 = models.Node(created_at=datetime.now(), updated_at=datetime.now(), deleted_at=None, management_ip="172.16.29.195", ssh_user="******", ssh_password="******", status="set_ip", node_display_name="controller_03") # node_info = Node_info(node_name="tovanlam1", memory_mb=123) # session.add(node1) session.add(node2) session.add(node3) session.commit() print("OK")
def init_db(): import models db_session = get_db_session() Base.metadata.create_all(bind=engine) node_all = models.Node(id=models.NodeType.ALL.value) node_materials = models.Node(id=models.NodeType.MATERIAL.value, parent=node_all) node_events = models.Node(id=models.NodeType.EVENT.value, parent=node_all) node_topics = models.Node(id=models.NodeType.TOPIC.value, parent=node_all) node_news = models.Node(id=models.NodeType.NEWS.value, parent=node_all) db_session.add_all( (node_all, node_materials, node_events, node_topics, node_news)) db_session.commit()
def node_create(uuid, resource_id, node_resource_uuid, node_ip, username, password): new_node = models.Node(uuid, resource_id, node_ip, username, password) new_node_resource = models.NodeResources(node_resource_uuid, uuid) db.session.add(new_node) db.session.add(new_node_resource) db.session.commit()
def node_view(): class _Form(Form): destination = TextField('destination', validators=[DataRequired()]) contact = TextField('contact', validators=[DataRequired()]) form = _Form() if form.validate_on_submit(): work_flow = yawf.new_work_flow( 'travel application', lambda work_flow: models.Node( name='Submit Travel Application', policy_name="Travel", work_flow=work_flow, handler_group=models.Group.query.filter(models.Group.name == 'Customers').one()), lambda work_flow: do_commit( models.Application(username=current_user.username, destination=form.destination.data, contact=form.contact.data, work_flow=work_flow)).id) try: work_flow.start() except yawf.exceptions.WorkFlowDelayed: flash('You have just submitted an travel application') return redirect(url_for('node_list_view')) return render_template('/request.html', form=form)
def trilaterate(p1: models.Node, d1: float, p2: models.Node, d2: float, p3: models.Node, d3: float, noise: float): i1 = p1.x i2 = p2.x i3 = p3.x j1 = p1.y j2 = p2.y j3 = p3.y d1 += random.randrange(-noise, noise) * d1 / 100 d2 += random.randrange(-noise, noise) * d2 / 100 d3 += random.randrange(-noise, noise) * d3 / 100 if j1 == j2 and j2 != j3: i1, i3 = i3, i1 j1, j3 = j3, j1 predicted_x = (((2 * j3 - 2 * j2) * ((d1 * d1 - d2 * d2) + (i2 * i2 - i1 * i1) + (j2 * j2 - j1 * j1)) - ( 2 * j2 - 2 * j1) * ((d2 * d2 - d3 * d3) + (i3 * i3 - i2 * i2) + (j3 * j3 - j2 * j2))) / ( (2 * i2 - 2 * i3) * (2 * j2 - 2 * j1) - (2 * i1 - 2 * i2) * (2 * j3 - 2 * j2))) predicted_y = ((d1 * d1 - d2 * d2) + (i2 * i2 - i1 * i1) + (j2 * j2 - j1 * j1) + ( predicted_x * (2 * i1 - 2 * i2))) / (2 * j2 - 2 * j1) return models.Node(predicted_x, predicted_y)
def add_host(): if not request.json: abort(400) else: data = request.json old_nodes = session.query( models.Node).filter_by(management_ip=data.get('management_ip')).all() if len(old_nodes) != 0: return {"status": "Node da ton tai"} node = models.Node(created_at=datetime.now(), updated_at=datetime.now(), deleted_at=None, management_ip=data.get('management_ip', ""), ssh_user=data.get('ssh_user', ""), ssh_password=data.get('ssh_password', ""), status="set_ip", node_display_name=data.get('node_display_name', '')) session.add(node) session.commit() new_node = session.query(models.Node).filter_by( node_display_name=str(data.get('node_display_name', ''))).all() #print(models.to_json(new_node, 'Node',True)) return jsonify(models.to_json(new_node, 'Node', True)), 201
def read_swc_file(swc_file_path): swc_file = open(swc_file_path, 'r') neuron = models.Neuron(nodes=[]) neuron.name = os.path.basename(swc_file_path) for line in swc_file: if not line.__contains__('#'): try: input_data = line.replace('\n', '').split(' ') parent_id = int(input_data[6]) parent = neuron.get_node_by_id(parent_id) node = models.Node(id=int(input_data[0]), type=0, x=float(input_data[2]), y=float(input_data[3]), z=float(input_data[4]), r=float(input_data[5]), parent=parent) if parent: parent.children.append(node) neuron.add_node(node) except: print(line) neuron.initialize() return neuron
def createDatabase(self): """Create the Required Database Objects""" session = models.meta.Session() arSession = self.Session() #log.setLevel(logging.WARNING) deploymentName = "archRock" houseName = self.houseAddress theDep = session.query(models.Deployment).filter_by(name=deploymentName).first() log.debug("Checking for existing deployment {0}".format(theDep)) if theDep is None: #Create a new deployment theDep = models.Deployment(name=deploymentName) session.add(theDep) session.flush() log.debug("Adding Deployment to database {0}".format(theDep)) #And check for Houses theHouse = session.query(models.House).filter_by(address=houseName).first() log.debug("Checking for house {0}".format(theHouse)) if theHouse is None: theHouse = models.House(address=houseName, deploymentId = theDep.id) session.add(theHouse) session.flush() log.debug("Adding New House {0}".format(theHouse)) self.theHouse = theHouse emptyRoom = session.query(models.RoomType).filter_by(name="Unocupied").first() nodeMap = {} #Temp storage for <ADDDR> <Node> addrMap = {} #We want to setup nodes / Rooms / Locations based on the node_dimension table log.info("Setting Up Nodes") nodeQry = arSession.query(Ar_Node) for item in nodeQry: #Dont bother with the router if item.name == "archrock-router": continue #Check / Create a node if required nodeId = int(item.addr[8:],16) log.debug("{0} {1} {2}".format(item,item.addr,nodeId)) #nodeId = BASE_NODE_ID + item.short_addr theNode = session.query(models.Node).filter_by(id = nodeId).first() if theNode is None: theNode = models.Node(id=nodeId) session.add(theNode) session.flush() log.debug("Creating New Node {0}".format(theNode)) #Next we create a room / Location roomName = item.name if not roomName == "": log.debug("Room Name is {0}".format(roomName)) theRoom = session.query(models.Room).filter_by(name=roomName).first() if theRoom is None: theRoom = models.Room(name=roomName, roomTypeId=emptyRoom.id) log.debug("Creating Room {0}".format(theRoom)) session.add(theRoom) session.flush() #And now we can turn this room into a Location theLocation = session.query(models.Location).filter_by(houseId=theHouse.id, roomId = theRoom.id).first() if theLocation is None: theLocation = models.Location(houseId = theHouse.id, roomId = theRoom.id) session.add(theLocation) log.debug("Creating Location {0}".format(theLocation)) session.flush() #Last thing we create a mapping between the node and the Location nodeMap[item.node_key] = [theNode,theLocation] addrMap[item.addr] = theNode #log.debug(nodeMap) self.nodeMap = nodeMap #We also need to do mapping for sensor types etc #theQry = arSession.query(Source) #Map the Types we are expecting to types from the database sensorTypeMap = {} log.info("Mapping Sensor Types") for sType in ALLOWED_TYPES: theType = session.query(models.SensorType).filter_by(name=sType[1]).first() log.debug("AR: {0} Local {1}".format(sType,theType)) sensorTypeMap[sType[0]] = theType #log.debug(sensorTypeMap) sensorMap = {} sQry = arSession.query(Source) for item in sQry: thisItem = sensorTypeMap.get(item.source,None) if thisItem: sensorMap[item.datasource_key] = sensorTypeMap[item.source] self.sensorMap = sensorMap log.setLevel(logging.DEBUG) log.info("Commtitting Changes") session.flush() session.commit()
def createNodes(list): # create nodes from a list of parameters nodes = [] for i in range(0, len(list)): nodes.append(models.Node(i, list[i][1], list[i][0])) return np.array(nodes)
def populate_tables(model_auto_update=False): """Add below all tables you want to be initialised""" global table_collection table_collection = [ models.Node, models.Parameter, models.Module, models.Area, models.Zone, models.ZoneArea, models.ZoneCustomRelay, models.TemperatureTarget, models.SchedulePattern, models.HeatSchedule, models.ZoneHeatRelay, models.ZoneSensor, models.ZoneAlarm, models.ZoneThermostat, models.SystemMonitor, models.SystemDisk, models.Sensor, models.DustSensor, models.Ups, models.Rule, models.CommandOverrideRelay, models.PlotlyCache, models.Utility, models.Presence, models.SensorError, models.State, models.People, models.Device, models.PeopleDevice, models.Position, models.PowerMonitor, models.Music, models.MusicLoved, models.Pwm ] # tables that will be cleaned on every app start table_force_clean = [ models.Parameter, models.Zone, models.Presence, models.Module, models.Node, models.Rule, models.ZoneHeatRelay ] # , models.Sensor] for table in table_collection: table_str = utils.get_table_name(table) if table in table_force_clean: read_drop_table(table, "Forcing table clean", drop_without_user_ask=True) else: check_table_schema(table, model_auto_update) if table_str in Constant.db_values_json: default_values = Constant.db_values_json[table_str] if len(table().query_all()) != len(default_values): L.l.info( 'Populating {} with default values as config record count != db count' .format(table_str)) table().delete() commit() for config_record in default_values: new_record = table() # setattr(new_record, config_record, default_values[config_record]) for field in config_record: # if type(getattr(new_record, field)) is bool: # val = config_record[field] setattr(new_record, field, config_record[field]) # Log.logger.info("Adding conf record: {}".format(new_record)) db.session.add(new_record) commit() check_table_schema(models.Node, model_auto_update) # reseting execute_command field to avoid running last command before shutdown node_obj = models.Node.query.filter_by(name=Constant.HOST_NAME).first() if node_obj: node_obj.execute_command = '' else: node_obj = models.Node() node_obj.add_record_to_session() # let this commented for test purposes (make netbook Windows look like PI) if Constant.HOST_NAME != 'netbook': # Log.logger.info("Setting current machine type to {}".format(Constant.HOST_MACHINE_TYPE)) node_obj.machine_type = Constant.HOST_MACHINE_TYPE Constant.HOST_PRIORITY = node_obj.priority commit() node_list = models.Node().query_all() check_table_schema(models.GpioPin, model_auto_update) # todo: worth implementing beabglebone white? # populate all beaglebone black pins in db for all nodes. only used ones are mapped below, extend if more are used # mapping found here: https://insigntech.files.wordpress.com/2013/09/bbb_pinouts.jpg bbb_bcm_map = { 'P9_11': 30, 'P9_12': 60, 'P9_13': 31, 'P9_14': 40, 'P9_15': 48, 'P9_16': 51, 'P9_24': 15, 'P9_23': 49, 'P9_22': 2, 'P9_21': 3, 'P8_07': 66, 'P8_08': 67, 'P8_09': 69, 'P8_11': 45, 'P8_12': 44, 'P8_15': 47, 'P8_16': 46 } for node in node_list: if node.machine_type == Constant.MACHINE_TYPE_BEAGLEBONE: if len( models.GpioPin.query.filter_by( pin_type=Constant.GPIO_PIN_TYPE_BBB, host_name=node.name).all() ) != 46 * 2: # P8_ & P9_ rows have 46 pins models.GpioPin.query.filter_by( pin_type=Constant.GPIO_PIN_TYPE_BBB, host_name=node.name).delete() commit() L.l.info('Populating default {} GpioPins on {} '.format( node.machine_type, node.name)) for rail in range(8, 10): # last range is not part of the loop for pin in range(1, 47): gpio = models.GpioPin() gpio.pin_type = Constant.GPIO_PIN_TYPE_BBB gpio.host_name = node.name pincode = '0' + str(pin) gpio.pin_code = 'P' + str(rail) + '_' + pincode[-2:] if bbb_bcm_map.has_key(gpio.pin_code): gpio.pin_index_bcm = bbb_bcm_map[gpio.pin_code] else: gpio.pin_index_bcm = '' db.session.add(gpio) commit() # fixme: check for other PI revisions elif node.machine_type in [ Constant.MACHINE_TYPE_RASPBERRY, Constant.MACHINE_TYPE_ODROID ]: if len( models.GpioPin.query.filter_by( pin_type=Constant.GPIO_PIN_TYPE_PI_STDGPIO, host_name=node.name).all()) != 40: models.GpioPin.query.filter_by( pin_type=Constant.GPIO_PIN_TYPE_PI_STDGPIO, host_name=node.name).delete() commit() L.l.info('Populating standard {} GpioPins on {} '.format( node.machine_type, node.name)) for pin in range(0, 40): gpio = models.GpioPin() gpio.pin_type = Constant.GPIO_PIN_TYPE_PI_STDGPIO gpio.host_name = node.name gpio.pin_code = str(pin) gpio.pin_index_bcm = pin db.session.add(gpio) if len(models.GpioPin.query.filter_by(pin_type=Constant.GPIO_PIN_TYPE_PI_FACE_SPI, host_name=node.name).all()) \ != 2 * 8 * 4: # input/output * 8 pins * max 4 boards models.GpioPin.query.filter_by( pin_type=Constant.GPIO_PIN_TYPE_PI_FACE_SPI, host_name=node.name).delete() commit() L.l.info('Populating piface {} pins on {} '.format( node.machine_type, node.name)) for board in range(0, 4): for pin_dir in (Constant.GPIO_PIN_DIRECTION_IN, Constant.GPIO_PIN_DIRECTION_OUT): for pin in range(0, 8): # -1 gpio = models.GpioPin() gpio.pin_type = Constant.GPIO_PIN_TYPE_PI_FACE_SPI gpio.host_name = node.name gpio.pin_code = str( board) + ":" + pin_dir + ":" + str( pin) # same as piface.format_pin_code() gpio.pin_index_bcm = pin gpio.board_index = board db.session.add(gpio) commit() else: L.l.warning("Unknown machine type {} for node {}".format( node.machine_type, node))
def createDeployment(self): """ Create a new deployment and house etc on the Transfer Database """ deploymentName = "archRock" houseName = self.houseName session = models.meta.Session() #Check for deployment theDep = session.query(models.Deployment).filter_by(name=deploymentName).first() log.debug("Checking for existing deployment {0}".format(theDep)) if theDep is None: #Create a new deployment theDep = models.Deployment(name=deploymentName) session.add(theDep) session.flush() log.debug("Adding Deployment to database {0}".format(theDep)) #And check for Houses theHouse = session.query(models.House).filter_by(address=houseName).first() log.debug("Checking for house {0}".format(theHouse)) if theHouse is None: theHouse = models.House(address=houseName, deploymentId = theDep.id) session.add(theHouse) session.flush() log.debug("Adding New House {0}".format(theHouse)) self.theHouse = theHouse #Create a location for this particular node theRoom = session.query(models.Room).filter_by(name="External").first() if theRoom is None: theRoom = models.Room(name="External",roomTypeId=1) session.add(theRoom) session.flush() log.debug("External Room is {0}".format(theRoom)) #theLocation = models.Location(houseId = theHouse.id,roomId = theRoom.id) theLocation = session.query(models.Location).filter_by(houseId=theHouse.id, roomId = theRoom.id).first() log.debug("Checking for existing location {0}".format(theLocation)) if theLocation is None: theLocation = models.Location(houseId = theHouse.id, roomId = theRoom.id) session.add(theLocation) session.flush() self.theLocation = theLocation #Node / Node Type theNode = session.query(models.Node).filter_by(id=118118).first() if theNode is None: theNode = models.Node(id=118118) session.add(theNode) session.flush() log.debug("Node is {0}".format(theNode)) self.theNode = theNode sensorType = session.query(models.SensorType).filter_by(name="Power").first() self.avgType = sensorType log.debug("Sensor is {0}".format(sensorType)) sensorType = session.query(models.SensorType).filter_by(name="Power Min").first() self.minType = sensorType sensorType = session.query(models.SensorType).filter_by(name="Power Max").first() self.maxType = sensorType session.commit()