def write_excel_lnbr(features_all, chains, filename): wb = xlwt.Workbook() # print(dir(wb)) for chain in chains: ws = wb.add_sheet(str(len(wb._Workbook__worksheets) + 1)) columns = [ '_001_LnBR', '_002_Bus_from', '_003_Bus_to', '_004_R', '_005_X', '_006_B_1_2', '_007_kVA', '_008_State', ] for col in columns: ws.write(0, columns.index(col), col) for i in chain: row = chain.index(i) + 1 ws.write(row, 0, str(i['lnbr_idx'])) from_obj = _.find(features_all, {'_id': i['from_id']}) to_obj = _.find(features_all, {'_id': i['to_id']}) from_name = from_obj['properties']['name'] from_id = remove_mongo_id(from_obj['_id']) # from_idx = i['from_idx'] to_name = to_obj['properties']['name'] to_id = remove_mongo_id(to_obj['_id']) # to_idx = i['to_idx'] # ws.write(row, 1, from_name) # ws.write(row, 2, to_name) ws.write(row, 1, from_id) ws.write(row, 2, to_id) wb.save(filename)
def loop(aConfig, device_serial_no, key, sensor_type, serial_no, wsurl): global gSensorControl, gWebSocketConnection register(aConfig, device_serial_no, sensor_type, serial_no, wsurl) sensor = _.find(gSensorControl['sensors'], {'serial_no': serial_no}) if sensor: while True: sensor = _.find(gSensorControl['sensors'], {'serial_no': serial_no}) interval = sensor['interval'] if sensor and (sensor['enable'] is True or sensor['enable'].lower() == 'true'): try: if gWebSocketConnection is None: gWebSocketConnection = pi_create_websocket(wsurl) if gWebSocketConnection and not gWebSocketConnection.connected: gWebSocketConnection.connect(wsurl, sslopt={"cert_reqs": ssl.CERT_NONE, "check_hostname": False}) item = get_sensor_data(aConfig, key, device_serial_no, sensor_type, serial_no) if gWebSocketConnection and item: gWebSocketConnection.send(json.dumps(item, ensure_ascii=True)) except Empty: pass except Exception as e: # print('loop error') # print(e) pass finally: try: gWebSocketConnection.send('') except: gWebSocketConnection.close() gevent.sleep(interval) else: print('cannot get sensor:%s' % serial_no)
def write_excel_lnbr(features_all, chains, filename): wb = xlwt.Workbook() # print(dir(wb)) for chain in chains: ws = wb.add_sheet(str(len(wb._Workbook__worksheets) + 1)) columns = [ '_001_LnBR', '_002_Bus_from', '_003_Bus_to', '_004_R', '_005_X', '_006_B_1_2', '_007_kVA', '_008_State', ] for col in columns: ws.write(0, columns.index(col), col) for i in chain: row = chain.index(i) + 1 ws.write(row, 0, str(i['lnbr_idx'])) from_obj = _.find(features_all, {'_id':i['from_id']}) to_obj = _.find(features_all, {'_id':i['to_id']}) from_name = from_obj['properties']['name'] from_id = remove_mongo_id(from_obj['_id']) # from_idx = i['from_idx'] to_name = to_obj['properties']['name'] to_id = remove_mongo_id(to_obj['_id']) # to_idx = i['to_idx'] # ws.write(row, 1, from_name) # ws.write(row, 2, to_name) ws.write(row, 1, from_id) ws.write(row, 2, to_id) wb.save(filename)
def get_template_v(alist, unit, id, key): ret = None children = _.result(_.find(alist, {'unit':unit}), 'children') if children: p0 = _.result(_.find(children, {'id':id}), key) if p0: ret = p0 return ret
def pigpio_loop(key, i2c_addr, serial_no): global gSensorControl, gSensorData, aConfig try: import pigpio except: print('pigpio import error') return if key == 'battery': from INA226 import INA226Device ina226 = None try: ina226 = INA226Device(address=i2c_addr, initdata={'max_current_excepted':12}) except Exception as e: print (e) return if ina226: if not 'battery' in gSensorData: gSensorData['battery'] = {} interval = 1000.0 while True: sensor = _.find(gSensorControl['sensors'], {'serial_no': serial_no}) if sensor and (sensor['enable'] is True or sensor['enable'].lower() == 'true'): gSensorData['battery']['bus_voltage'] = ina226.read_bus_voltage() gSensorData['battery']['current'] = ina226.read_current_by_shuntvolt() gSensorData['battery']['power'] = ina226.read_power() else: interval = 1000.0 gevent.sleep(interval / 1000.0)
def process_ticker_response(ticker_response): btc_dict = py_.find(ticker_response, lambda x: x['counter'] == 'BTC') if float(btc_dict['last']) > .005: ask_request = trade.make_ask_order('VIVA', 'BTC', '100', '0.005') print(ask_request.status_code) if float(btc_dict['last']) < .001: bid_request = trade.make_bid_order('VIVA', 'BTC', '100', '0.001') print(bid_request.status_code)
def execute(self) -> typing.List[FluentFile]: self.changed_files = [] for pair in self.files_dict: ru_relative_file = py_.find(self.files_dict[pair], {'locale': 'ru-RU'}) en_relative_file = py_.find(self.files_dict[pair], {'locale': 'en-US'}) if not en_relative_file or not ru_relative_file: continue ru_file: FluentFile = ru_relative_file.file en_file: FluentFile = en_relative_file.file self.compare_files(en_file, ru_file) return self.changed_files
def process_responses(responses): btc_dict = py_.find(responses[0], lambda x: x['counter'] == 'BTC') current_price = float(btc_dict['last']) process_ticker_response(responses[0]) process_recent_trades_response(responses[1]) process_open_btc_trades_response(responses[2]) process_my_pending_trades_response(responses[3], current_price) process_my_trade_history(responses[4]) process_my_balance_response(responses[5])
def test13(): import re, datetime from pydash import py_ as _ from pymongo import MongoClient from bson.objectid import ObjectId XLS_FILE = ur'G:\2014项目\配电网故障定位\普洱FTU导出数据\10kV线路柱上馈线终端FTU安装台账 (2).xls' book = xlrd.open_workbook(XLS_FILE) startrowidx = 1 startcolidx = 1 recs = [] ids_map = {} for sheet in book.sheets(): if sheet.name.lower() == u'sheet3': ids_map['pzz'] = [] for row in range(startrowidx, sheet.nrows): if sheet.cell_value(row, 0) == '': continue rec = {} rec['_id'] = ObjectId(str(sheet.cell_value(row, 13))) rec[u'device_no'] = sheet.cell_value(row, 4) rec[u'rf_addr'] = sheet.cell_value(row, 5) rec[u'phase'] = {} rec[u'phase'][u'a'] = sheet.cell_value(row, 6) rec[u'phase'][u'b'] = sheet.cell_value(row, 7) rec[u'phase'][u'c'] = sheet.cell_value(row, 8) rec[u'sim'] = sheet.cell_value(row, 9) rec[u'status'] = sheet.cell_value(row, 10) rec[u'engineer'] = sheet.cell_value(row, 11) tmp = sheet.cell_value(row, 12) rec[u'installation_date'] = datetime.datetime.strptime(tmp, '%Y/%m/%d') # print(type(rec['installation_date'])) rec[u'switch_alias'] = int(sheet.cell_value(row, 14)) # rec['line_py'] = sheet.cell_value(row, 14) recs.append(rec) # print(json.dumps(recs, ensure_ascii=False, indent=4)) # print(len(recs)) ids = _.pluck(recs, '_id') # print(ids) client = MongoClient('localhost', 27017) kmgd = client['kmgd_pe'] collection = kmgd['features'] res = list(collection.find({"_id":{'$in':ids}})) # print(len(res)) for item in res: _id = item['_id'] one = _.find(recs, {'_id':_id}) if one: del one['_id'] one[u'type'] = u'ftu' item[u'properties'][u'devices'] = [one, ] # print(item) collection.save(item)
def find_ssh_key_id(client, module): server_id = module.params.get('server_id') user_description = client.describe_user( ServerId=server_id, UserName=module.params.get('user_name')) ssh_key = module.params.get('ssh_key') record = py_.find(user_description["User"]["SshPublicKeys"], {"SshPublicKeyBody": ssh_key}) if record is not None: return record['SshPublicKeyId']
def create_offer(self, service, wallet): current.logger.debug("in strategy: {service}".format(service=service)) bids = service.lend_bids(wallet['currency']) asks = service.lend_asks(wallet['currency']) # force min period for now # TODO: optimize this period min_lend_time = lambda l: l.period == self.min_period #highest_bid = bids[0] #current market dynamics allows this, but it's not a permanent thing highest_bid = py_.find(bids, min_lend_time) or bids[0] lowest_ask = py_.find(asks, min_lend_time) # make offer 5% under best ask offer_rate = self.determine_rate(highest_bid, lowest_ask) # place new offer current.logger.debug( ' highest_bid:{0} lowest_ask:{1} will_offer:{2}'.format( highest_bid.rate, lowest_ask.rate, offer_rate)) return service.new_offer(wallet['currency'], wallet['available'], offer_rate, lowest_ask.period)
def ws_mavlink_offline(websocket): global gWebSocketsClientMap, gListenLoopList v = str(websocket.__hash__()) websocket.close() if 'mavlink' in gWebSocketsClientMap and v in gWebSocketsClientMap['mavlink']: del gWebSocketsClientMap['mavlink'][v] m = _.find(gListenLoopList, {'ws_hash': v}) if m: m['greenlet'].kill() gListenLoopList.remove(m)
def check_has_subunit(alist, line_name, unit): ret = [] children = _.result(_.find(alist, {'unit':unit}), 'children') ids = _.pluck(children, 'id') ids = _.map(ids, lambda x:'unitsub_' + x) for id in ids: p = get_occur_p(line_name, id) if p>0: ret.append(id) return ret
def find_chain(alist, obj): ids = _.pluck(alist, '_id') chainlist = [] while obj: chainlist.append(obj) nst_id = find_next(obj['_id']) if nst_id: obj = _.find(alist, {'_id': nst_id}) else: obj = None return chainlist
def df_to_repr_jsonld(df, fmu, time_is_relative): """Render JSON-LD-representation of DataFrame.""" logger.debug("df:\n{}".format(df)) unit_map = { "W": UNIT.W, "kW.h": UNIT["KiloW-HR"], "deg": UNIT.DEG, } # Read model description desc = fmpy.read_model_description(fmu) # Iterate over columns of dataframe graph = graph_bind_prefixes(rdflib.Graph()) for label, series in df.items(): # Set unit to '1' if it is undefined # https://github.com/CATIA-Systems/FMPy/blob/master/fmpy/model_description.py#L154 model_variable = py_.find(desc.modelVariables, lambda x: x.name == label) if model_variable.unit is not None: unit = model_variable.unit else: unit = "1" logger.debug(f"{model_variable.name} / {unit}") # Define `sosa:ObservableProperty` for each column observable_iri = f"#{label}" observable_uriref = rdflib.URIRef(observable_iri) graph.add((observable_uriref, RDF.type, SOSA.ObservableProperty)) for index, value in series.items(): # Define `sosa:Observation` for each row observation_id = f"#{nanoid.generate(size=8)}" observation_uriref = rdflib.URIRef(observation_id) graph.add((observation_uriref, RDF.type, SOSA.Observation)) graph.add((observation_uriref, SOSA.observedProperty, observable_uriref)) # Define `qudt:QuantityValue` for each value result_uriref = rdflib.URIRef(f"{observation_id}_result") graph.add((result_uriref, RDF.type, QUDT.QuantityValue)) graph.add((result_uriref, QUDT.numericValue, rdflib.Literal(float(value)))) graph.add((result_uriref, QUDT.unit, unit_map[unit])) graph.add((observation_uriref, SOSA.hasResult, result_uriref)) # Define `time:Instant` for each index time_uriref = rdflib.URIRef(f"{observation_id}_time") time_literal = rdflib.Literal(index, datatype=XSD.dateTimeStamp) graph.add((time_uriref, RDF.type, TIME.Instant)) graph.add((time_uriref, TIME.inXSDDateTimeStamp, time_literal)) graph.add((observation_uriref, SOSA.phenomenonTime, time_uriref)) return json.loads(graph.serialize(format="application/ld+json"))
def find_next_by_node(features, collection_edges, alist=[], id=None): if isinstance(id, str): id = add_mongo_id(id) l = _.deep_pluck(list(collection_edges.find({'properties.start':id})),'properties.end') for i in l: obj = _.find(features, {'_id': i}) if obj and obj.has_key('properties'): if obj['properties'].has_key('devices'): alist.append(obj['_id']) else: alist = find_next_by_node(features, collection_edges, alist, obj['_id']) return alist
def write_excel_bus(features_all, chains, filename): wb = xlwt.Workbook() # print(dir(wb)) for chain in chains: ws = wb.add_sheet(str(len(wb._Workbook__worksheets) + 1)) columns = [ '_001_No', '_002_Type', '_003_MW', '_004_Mvar', '_005_GS', '_006_Bs', '_007_Mag', '_008_Deg', ] for col in columns: ws.write(0, columns.index(col), col) for i in chain: row = chain.index(i) + 1 obj = _.find(features_all, {'_id': i['from_id']}) # name = obj['properties']['name'] id = obj['_id'] # from_idx = i['from_idx'] # ws.write(row, 0, name) ws.write(row, 0, remove_mongo_id(id)) # ws.write(row, 0, from_idx) if row == 1: ws.write(row, 1, 3) else: ws.write(row, 1, 1) if row == len(chain): obj1 = _.find(features_all, {'_id': i['to_id']}) # name1 = obj1['properties']['name'] id1 = obj1['_id'] # to_idx = i['to_idx'] # ws.write(row+1, 0, name1) ws.write(row + 1, 0, remove_mongo_id(id1)) # ws.write(row + 1, 0, to_idx) ws.write(row+1, 1, 1) wb.save(filename)
def write_excel_bus(features_all, chains, filename): wb = xlwt.Workbook() # print(dir(wb)) for chain in chains: ws = wb.add_sheet(str(len(wb._Workbook__worksheets) + 1)) columns = [ '_001_No', '_002_Type', '_003_MW', '_004_Mvar', '_005_GS', '_006_Bs', '_007_Mag', '_008_Deg', ] for col in columns: ws.write(0, columns.index(col), col) for i in chain: row = chain.index(i) + 1 obj = _.find(features_all, {'_id': i['from_id']}) # name = obj['properties']['name'] id = obj['_id'] # from_idx = i['from_idx'] # ws.write(row, 0, name) ws.write(row, 0, remove_mongo_id(id)) # ws.write(row, 0, from_idx) if row == 1: ws.write(row, 1, 3) else: ws.write(row, 1, 1) if row == len(chain): obj1 = _.find(features_all, {'_id': i['to_id']}) # name1 = obj1['properties']['name'] id1 = obj1['_id'] # to_idx = i['to_idx'] # ws.write(row+1, 0, name1) ws.write(row + 1, 0, remove_mongo_id(id1)) # ws.write(row + 1, 0, to_idx) ws.write(row + 1, 1, 1) wb.save(filename)
def channel_name_to_id(self, channel_name): """ Return channel id for given channel name. """ channels = self.list_channels() result = py_.find(channels, lambda channel: channel["name"] == channel_name) if result: return result["id"] else: raise ValueError(f"Channel {channel_name} not found")
def find_next_by_node(features, collection_edges, alist=[], id=None): if isinstance(id, str): id = add_mongo_id(id) l = _.deep_pluck(list(collection_edges.find({'properties.start': id})), 'properties.end') for i in l: obj = _.find(features, {'_id': i}) if obj and obj.has_key('properties'): if obj['properties'].has_key('devices'): alist.append(obj['_id']) else: alist = find_next_by_node(features, collection_edges, alist, obj['_id']) return alist
def channel_name_to_id(name: str, client: slack.WebClient) -> str: """ Return slack id for given channel name. """ response = client.users_conversations( types="public_channel,private_channel,mpim,im", exclude_archived=True) channels = response["channels"] result = py_.find(channels, lambda ch: ch["name"] == name) if result: return result["id"] else: raise ValueError(f"Channel {name} not found")
def parse_slnsw_collection_website(self, asset_id: str, data: dict) -> dict: query = {'asset_id': asset_id} mongo = self.get_collection( collection=SLNSWCollectionWebsiteLoader.collection) doc = mongo.find_one(query) out = {} if doc: errors = py_.get(doc, 'props.pageProps.errors', None) if errors is not None: return {'collection_url_error': errors} props = py_.get(doc, 'props.pageProps', {}) # Zoomify URL full_iiif_url = py_.get(props, 'file.image.iiifImageUrl', None) out['iiif_identifier'] = full_iiif_url.split('/')[-1] out['full_iiif_url'] = full_iiif_url # Check if we have the full title title = py_.get(props, 'title', '') if data.get('collection_title', '') != title: out['collection_title'] = title out['collection_title_expanded'] = True notes = py_.get(props, 'recordData.notes', []) date_creation = py_.find(notes, {'type': 'dateCreation'}) if date_creation is not None: date_creation = py_.get(date_creation, 'value', None) out['date_creation'] = date_creation # Extract latest year in the date_creation as option # if year is null if isinstance(date_creation, str): data_creation = date_creation.replace('.', '') bits = data_creation.split('-') try: year = bits[-1] year = year if year != '' else bits[0] out['year_creation'] = int(year) except: out['year_creation'] = None return out
def log_not_exist_en_files(self, en_file, ru_file_parsed, en_file_parsed): for idx, ru_message in enumerate(ru_file_parsed.body): if isinstance(ru_message, ast.ResourceComment) or isinstance( ru_message, ast.GroupComment) or isinstance( ru_message, ast.Comment): continue en_message_analog = py_.find( en_file_parsed.body, lambda en_message: self.find_duplicate_message_id_name( ru_message, en_message)) if not en_message_analog: logging.warning( f'Ключ "{FluentAstAbstract.get_id_name(ru_message)}" не имеет английского аналога по пути {en_file.full_path}"' )
def from_yaml_element(cls, id, value, attributes, parent_id=None, raw_key=False): if not value and not id and not parent_id: return None if not attributes: attributes = [] if len(list(filter(lambda attr: attr.id == 'desc', attributes))) == 0: if parent_id: attributes.append( FluentAstAttribute( 'desc', '{ ' + FluentSerializedMessage.get_key(parent_id) + '.desc' + ' }')) else: attributes.append(FluentAstAttribute('desc', '{ "" }')) if len(list(filter(lambda attr: attr.id == 'suffix', attributes))) == 0: attributes.append(FluentAstAttribute('suffix', '{ "" }')) message = f'{cls.get_key(id, raw_key)} = {cls.get_value(value, parent_id)}\n' if attributes and len(attributes): full_message = message for attr in attributes: full_message = cls.add_attr(full_message, attr.id, attr.value, raw_key=raw_key) desc_attr = py_.find(attributes, lambda a: a.id == 'desc') if not desc_attr and parent_id: full_message = cls.add_attr( full_message, 'desc', '{ ' + FluentSerializedMessage.get_key(parent_id) + '.desc' + ' }') return full_message return cls.to_serialized_message(message)
def write_to_ru_files(self, ru_file, ru_file_parsed, en_file_parsed): for idx, en_message in enumerate(en_file_parsed.body): if isinstance(en_message, ast.ResourceComment) or isinstance( en_message, ast.GroupComment) or isinstance( en_message, ast.Comment): continue ru_message_analog_idx = py_.find_index( ru_file_parsed.body, lambda ru_message: self.find_duplicate_message_id_name( ru_message, en_message)) have_changes = False # Attributes if getattr(en_message, 'attributes', None) and ru_message_analog_idx != -1: if not ru_file_parsed.body[ru_message_analog_idx].attributes: ru_file_parsed.body[ ru_message_analog_idx].attributes = en_message.attributes have_changes = True else: for en_attr in en_message.attributes: ru_attr_analog = py_.find( ru_file_parsed.body[ru_message_analog_idx]. attributes, lambda ru_attr: ru_attr.id.name == en_attr.id.name) if not ru_attr_analog: ru_file_parsed.body[ ru_message_analog_idx].attributes.append( en_attr) have_changes = True # New elements if ru_message_analog_idx == -1: ru_file_body = ru_file_parsed.body if (len(ru_file_body) >= idx + 1): ru_file_parsed = self.append_message( ru_file_parsed, en_message, idx) else: ru_file_parsed = self.push_message(ru_file_parsed, en_message) have_changes = True if have_changes: serialized = serializer.serialize(ru_file_parsed) self.save_and_log_file(ru_file, serialized, en_message)
def find_sftp_server(client, server_name): # Finding a server by name is a little more complicated than I originally expected. Rather than wasting resources # it's much easier to just go find it and then check if the return value of this method is None. # Load all of the server IDs in the account all_server_ids = py_.map(client.list_servers()['Servers'], 'ServerId') all_servers = py_.map_( all_server_ids, (lambda server_id: client.describe_server(ServerId=server_id))) host = py_.find( all_servers, {'Server': { 'Tags': [{ 'Key': SERVER_NAME_KEY, 'Value': server_name }] }}) return host
def load_users_from_user_group(client: slack.WebClient, user_group: str) -> List[User]: """ List users in a given user group handle. """ user_groups = client.usergroups_list(include_users=True)["usergroups"] group = py_.find(user_groups, lambda it: it["handle"] == user_group) user_ids = group["users"] users = [] for i in tqdm(user_ids, desc=f"Collecting user info for group {user_group}"): u = client.users_info(user=i)["user"] users.append(User(u["id"], u["real_name"])) return users
def find_chain(features, collection_edges, alist=[], id=None): _ids = find_next_by_node(features, collection_edges, [], id) for _id in _ids: obj = _.find(features, {'_id':_id}) if obj : from_index = _.find_index(features, {'_id':id}) to_index = _.find_index(features, {'_id':_id}) if obj.has_key('properties') and obj['properties'].has_key('devices'): alist.append({ 'lnbr_idx': len(alist) + 1, 'from_id': add_mongo_id(id), 'to_id': obj['_id'], # 'from_idx': from_index, # 'to_idx': to_index, }) alist = find_chain(features, collection_edges, alist, obj['_id']) return alist
def sortlist(collection_edges, alist): def find_prev(id): ret = None one = collection_edges.find_one({'properties.end': id}) if one: ret = one['properties']['start'] return ret def find_next(id): ret = None one = collection_edges.find_one({'properties.start': id}) if one: ret = one['properties']['end'] return ret def find_first(alist): ids = _.pluck(alist, '_id') id = alist[0]['_id'] prev_id = None while id and id in ids: prev_id = id id = find_prev(prev_id) return prev_id def find_chain(alist, obj): ids = _.pluck(alist, '_id') chainlist = [] while obj: chainlist.append(obj) nst_id = find_next(obj['_id']) if nst_id: obj = _.find(alist, {'_id': nst_id}) else: obj = None return chainlist first_id = find_first(alist) # ids = _.map_(_.pluck(alist, '_id'), lambda x:remove_mongo_id(x)) # print(ids) # print(_.index_of(ids, remove_mongo_id(first_id))) first = _.find(alist, {'_id': first_id}) chainlist = [] if first: chainlist = find_chain(alist, first) return chainlist
def rtimulib_loop(serial_no): global gSensorControl, gSensorData try: import RTIMU except: print('RTIMU import error') return SETTINGS_FILE = "RTIMU_CONFIG" print("Using settings file [%s.ini]" % SETTINGS_FILE) if not os.path.exists(SETTINGS_FILE + ".ini"): print("Settings file does not exist, will be created") setting = RTIMU.Settings(SETTINGS_FILE) imu = RTIMU.RTIMU(setting) pressure = RTIMU.RTPressure(setting) print("IMU Name: " + imu.IMUName()) print("Pressure Name: " + pressure.pressureName()) if (not imu.IMUInit()): print("IMU Init Failed") return else: print("IMU Init Succeeded"); imu.setSlerpPower(0.02) imu.setGyroEnable(True) imu.setAccelEnable(True) imu.setCompassEnable(True) if (not pressure.pressureInit()): print("Pressure sensor Init Failed") return None interval = float(imu.IMUGetPollInterval()) if not 'DOF10' in gSensorData: gSensorData['DOF10'] = {} while True: sensor = _.find(gSensorControl['sensors'], {'serial_no': serial_no}) if sensor and (sensor['enable'] is True or sensor['enable'].lower() == 'true'): if imu.IMURead(): gSensorData['DOF10'] = imu.getIMUData() (gSensorData['DOF10']['pressureValid'], gSensorData['DOF10']['pressure'], gSensorData['DOF10']['temperatureValid'],gSensorData['DOF10']['temperature']) = pressure.pressureRead() else: interval = 1000.0 gevent.sleep(interval / 1000.0)
def report(diff) -> str: data = [] for table_diff in diff["data"]: if table_diff[-1] == "basic": data.append([table_diff[0], f"Changes: {table_diff[1]}"]) elif table_diff[-1] == "grouped": table_name = table_diff[0] headers = py_.find( diff["config"], lambda x: x["table"] == table_name)["groupby"] data.append([ table_name, tabulate([[*row[0], row[1]] for row in table_diff[1]], headers=[*headers, "changes"]) ]) return tpl_number_of_rows.render(data=data)
def find_chain(features, collection_edges, alist=[], id=None): _ids = find_next_by_node(features, collection_edges, [], id) for _id in _ids: obj = _.find(features, {'_id': _id}) if obj: from_index = _.find_index(features, {'_id': id}) to_index = _.find_index(features, {'_id': _id}) if obj.has_key('properties') and obj['properties'].has_key( 'devices'): alist.append({ 'lnbr_idx': len(alist) + 1, 'from_id': add_mongo_id(id), 'to_id': obj['_id'], # 'from_idx': from_index, # 'to_idx': to_index, }) alist = find_chain(features, collection_edges, alist, obj['_id']) return alist
def list_users_from_usergroup(client: slack.WebClient, usergroup: str) -> List[SlackUser]: """ List users in a given usergroup handle. """ usergroups = client.usergroups_list(include_users=True)["usergroups"] group = py_.find(usergroups, lambda it: it["handle"] == usergroup) user_ids = group["users"] users = [] for i in user_ids: u = client.users_info(user=i)["user"] users.append( SlackUser(u["id"], u["profile"]["real_name"], email=u["profile"].get("email"))) return users
def test(): ret = [] book = xlrd.open_workbook(XLSPATH) sheet = book.sheet_by_index(0) startrowidx = 1 idx = 1 units = set() for i in range(startrowidx, sheet.nrows): id = sheet.cell_value(i, 1).strip().lower() unit = sheet.cell_value(i, 2).strip().lower() cat = sheet.cell_value(i, 4).strip() name = sheet.cell_value(i, 5).strip() level = sheet.cell_value(i, 6).strip() base_score = int(sheet.cell_value(i, 7)) weight = int(sheet.cell_value(i, 8)) total_score = int(sheet.cell_value(i, 9)) p0_I = float(sheet.cell_value(i, 10)) p0_II = float(sheet.cell_value(i, 11)) p0_III = float(sheet.cell_value(i, 12)) p0_IV = float(sheet.cell_value(i, 13)) according = sheet.cell_value(i, 14).strip() if not unit in units: units.add(unit) ret.append({'unit':unit, 'children':[]}) uuu = _.find(ret, {'unit':unit}) if uuu: o = {} o['id'] = id o['cat'] = cat o['name'] = name o['level'] = level o['base_score'] = base_score o['weight'] = weight o['total_score'] = total_score o['according'] = according o['strategy'] = u'策略%d' % idx o['desc'] = '' o['p0'] = {'I':p0_I, 'II':p0_II, 'III':p0_III, 'IV':p0_IV,} uuu['children'].append(o) idx += 1 with codecs.open(ur'd:\aaa.json', 'w', 'utf-8-sig' ) as f: f.write(json.dumps(ret, ensure_ascii=False, indent=4))
def sortlist(collection_edges, alist): def find_prev(id): ret = None one = collection_edges.find_one({'properties.end':id}) if one: ret = one['properties']['start'] return ret def find_next(id): ret = None one = collection_edges.find_one({'properties.start':id}) if one: ret = one['properties']['end'] return ret def find_first(alist): ids = _.pluck(alist, '_id') id = alist[0]['_id'] prev_id = None while id and id in ids: prev_id = id id = find_prev(prev_id) return prev_id def find_chain(alist, obj): ids = _.pluck(alist, '_id') chainlist = [] while obj: chainlist.append(obj) nst_id = find_next(obj['_id']) if nst_id: obj = _.find(alist, {'_id': nst_id}) else: obj = None return chainlist first_id = find_first(alist) # ids = _.map_(_.pluck(alist, '_id'), lambda x:remove_mongo_id(x)) # print(ids) # print(_.index_of(ids, remove_mongo_id(first_id))) first = _.find(alist, {'_id': first_id}) chainlist = [] if first: chainlist = find_chain(alist, first) return chainlist
def df_to_repr_json(df, fmu, time_is_relative): """Render JSON-representation of DataFrame.""" logger.trace("df:\n{}".format(df)) # Read model description desc = fmpy.read_model_description(fmu) # Transform columns of dataframe to JSON-object data = [] for cname in df.columns: # Find unit of quantity model_variable = py_.find(desc.modelVariables, lambda x: x.name == cname) if model_variable.unit is not None: unit = model_variable.unit else: unit = "1" # Transform dataframe to timeseries-object ts_value_objects = json.loads( df[cname] .to_json(orient="table") .replace("time", "timestamp") .replace(cname, "value") )["data"] if time_is_relative is False: for x in ts_value_objects: x["datetime"] = pendulum.parse(x["timestamp"]).isoformat() x["timestamp"] = int(pendulum.parse(x["timestamp"]).format("x")) # Join label, unit and data data.append( { "label": cname, "unit": unit, "timeseries": ts_value_objects, } ) # Return JSON-representation of entire dataframe _without_ additional content return data
def report(diff) -> str: data = [] for table_diff in diff["data"]: if table_diff[-1] == "basic": data.append([ table_diff[0], f"{table_diff[1]['removed']} rows removed, {table_diff[1]['added']} added." ]) elif table_diff[-1] == "grouped": table_name = table_diff[0] headers = py_.find( diff["config"], lambda x: x["table"] == table_name)["groupby"] data.append([ table_name, tabulate([[*row[0], row[1]["removed"], row[1]["added"]] for row in table_diff[1]], headers=[*headers, "rows removed", "rows added"]) ]) return tpl_number_of_rows_hash.render(data=data)
def parse_google_calendar(email_id: str, start_time: datetime.datetime, end_time: datetime.datetime) -> List[CalendarEvent]: """ Parse google calendar and return events. End time is not inclusive. """ cal = GoogleCalendar(email_id) events = [] for ev in cal[start_time:end_time]: name = ev.summary start_time = ev.start end_time = ev.end if not ev.attendees: # This is likely a personal event response_status: Optional[str] = "accepted" attendees = [email_id] else: attendee = py_.find(ev.attendees, lambda at: at.email == email_id) try: if attendee.response_status == "needsAction": response_status = None else: response_status = attendee.response_status except AttributeError: response_status = None attendees = [a.email for a in ev.attendees] events.append(CalendarEvent( name=name, start_time=start_time, end_time=end_time, attendees=attendees, response_status=response_status )) return events
def handle_websocket_data(environ, session, user_id): global gListenLoopList ws = get_websocket(environ) ws_mavlink_online(ws, user_id) v = str(ws.__hash__()) m = _.find(gListenLoopList, {'ws_hash': v}) if m is None: g = gevent.spawn(create_ws_recv_loop, aConfig, ws) gListenLoopList.append({'ws_hash':v, 'greenlet':g}) app = aConfig['gConfig']['wsgi']['application'] interval = 10 try: interval = float(aConfig['gConfig']['applications'][app]['websocket']['interval_poll_mavlink']) except: interval = 10 while ws and not ws.closed: try: # ws.send(str.encode('', encoding=ENCODING), binary=True) ws.send('', binary=True) except: ws_mavlink_offline(ws) break gevent.sleep(interval)
def handle_geo_within(session, aConfig, querydict, user_id): statuscode, mimetype, body = 200, 'text/json', '{}' app = aConfig['gConfig']['wsgi']['application'] db, collection = get_collection(aConfig, app, 'main', 'collection_features') limit = 0 if 'limit' in querydict and isinstance(querydict['limit'], int): limit = querydict['limit'] if 'geometry' in querydict: geojsonobj = querydict['geometry'] if isinstance(geojsonobj, str) : try: geojsonobj = json.loads(geojsonobj) except: geojsonobj = {} if 'type'in geojsonobj: cond = {'geometry2d':{'$geoWithin':{'$geometry':geojsonobj}}} if 'webgis_type' in querydict: if isinstance(querydict['webgis_type'], str) : cond['properties.webgis_type'] = querydict['webgis_type'] if isinstance(querydict['webgis_type'], list): cond['properties.webgis_type'] = {'$in': querydict['webgis_type']} arr = list(collection.find(cond).limit(limit)) body = json.dumps(remove_mongo_id(arr), ensure_ascii=True, indent=4) else: body = json.dumps({'result': 'geometry_geojson_required'}, ensure_ascii=True, indent=4) else: body = json.dumps({'result': 'geometry_required'}, ensure_ascii=True, indent=4) return statuscode, mimetype, body statuscode, mimetype, body = 200, 'text/json', '{}' if check_permission(user_id, 'enable_sensor'): device_id = None db, collection = get_collection(aConfig, app, 'main', 'collection_device') if 'tower_id' in querydict: device_id = get_device_from_tower(session, aConfig, querydict['tower_id']) if 'device_id' in querydict: device_id = querydict['device_id'] #add for db device = None if device_id: cond = {'_id': device_id} device = collection.find_one(add_mongo_id(cond)) if 'sensor_id' in querydict and len(querydict['sensor_id']): sensor_id = querydict['sensor_id'] gevent.spawn(savedb, collection, querydict, device, device_id, sensor_id, enable) else: gevent.spawn(savedb, collection, querydict, device, device_id, None, enable) #add for gSensorControlList device = None if device_id: device = _.find(gSensorControlList, {'_id': device_id}) device_index = _.find_index(gSensorControlList, {'_id': device_id}) if device and 'sensors' in device: pass else: device = {} device['_id'] = device_id device['sensors'] = [] if 'sensor_id' in querydict and len(querydict['sensor_id']): sensor_id = querydict['sensor_id'] sensor = _.find(device['sensors'], {'_id': sensor_id}) if sensor: index = _.find_index(device['sensors'], {'_id': sensor_id}) sensor['enable'] = enable if 'interval' in querydict and (isinstance(querydict['interval'], int) or isinstance(querydict['interval'], float)): sensor['interval'] = querydict['interval'] device['sensors'][index] = sensor else: sensor = {} sensor['_id'] = sensor_id sensor['enable'] = enable if 'interval' in querydict and (isinstance(querydict['interval'], int) or isinstance(querydict['interval'], float)): sensor['interval'] = querydict['interval'] device['sensors'].append(sensor) else: for i in range(len(device['sensors'])): device['sensors'][i]['enable'] = enable if 'interval' in querydict and (isinstance(querydict['interval'], int) or isinstance(querydict['interval'], float)): device['sensors'][i]['interval'] = querydict['interval'] if device_index < 0: gSensorControlList.append(device) else: gSensorControlList[device_index] = device else: body = json.dumps({'result': 'cannot_find_device_id'}, ensure_ascii=True, indent=4) else: body = json.dumps({'result':'permission_deny'}, ensure_ascii=True, indent=4) return statuscode, mimetype, body
def test_algorithm(): def find_next_by_node(features, collection_edges, alist=[], id=None): if isinstance(id, str): id = add_mongo_id(id) l = _.deep_pluck(list(collection_edges.find({'properties.start':id})),'properties.end') for i in l: obj = _.find(features, {'_id': i}) if obj and obj.has_key('properties'): if obj['properties'].has_key('devices'): alist.append(obj['_id']) else: alist = find_next_by_node(features, collection_edges, alist, obj['_id']) return alist def find_chain(features, collection_edges, alist=[], id=None): _ids = find_next_by_node(features, collection_edges, [], id) for _id in _ids: obj = _.find(features, {'_id':_id}) if obj : from_index = _.find_index(features, {'_id':id}) to_index = _.find_index(features, {'_id':_id}) if obj.has_key('properties') and obj['properties'].has_key('devices'): alist.append({ 'lnbr_idx': len(alist) + 1, 'from_id': add_mongo_id(id), 'to_id': obj['_id'], # 'from_idx': from_index, # 'to_idx': to_index, }) alist = find_chain(features, collection_edges, alist, obj['_id']) return alist def find_prev(collection_edges, id): ret = None one = collection_edges.find_one({'properties.end': id}) if one: ret = one['properties']['start'] return ret def find_next(collection_edges, id): ret = None one = collection_edges.find_one({'properties.start': id}) if one: ret = one['properties']['end'] return ret def find_first(collection_edges, alist): ids = _.pluck(alist, '_id') id = alist[0]['_id'] prev_id = None while id and id in ids: prev_id = id id = find_prev(collection_edges, prev_id) return prev_id def write_excel_lnbr(features_all, chains, filename): wb = xlwt.Workbook() # print(dir(wb)) for chain in chains: ws = wb.add_sheet(str(len(wb._Workbook__worksheets) + 1)) columns = [ '_001_LnBR', '_002_Bus_from', '_003_Bus_to', '_004_R', '_005_X', '_006_B_1_2', '_007_kVA', '_008_State', ] for col in columns: ws.write(0, columns.index(col), col) for i in chain: row = chain.index(i) + 1 ws.write(row, 0, str(i['lnbr_idx'])) from_obj = _.find(features_all, {'_id':i['from_id']}) to_obj = _.find(features_all, {'_id':i['to_id']}) from_name = from_obj['properties']['name'] from_id = remove_mongo_id(from_obj['_id']) # from_idx = i['from_idx'] to_name = to_obj['properties']['name'] to_id = remove_mongo_id(to_obj['_id']) # to_idx = i['to_idx'] # ws.write(row, 1, from_name) # ws.write(row, 2, to_name) ws.write(row, 1, from_id) ws.write(row, 2, to_id) wb.save(filename) def write_excel_bus(features_all, chains, filename): wb = xlwt.Workbook() # print(dir(wb)) for chain in chains: ws = wb.add_sheet(str(len(wb._Workbook__worksheets) + 1)) columns = [ '_001_No', '_002_Type', '_003_MW', '_004_Mvar', '_005_GS', '_006_Bs', '_007_Mag', '_008_Deg', ] for col in columns: ws.write(0, columns.index(col), col) for i in chain: row = chain.index(i) + 1 obj = _.find(features_all, {'_id': i['from_id']}) # name = obj['properties']['name'] id = obj['_id'] # from_idx = i['from_idx'] # ws.write(row, 0, name) ws.write(row, 0, remove_mongo_id(id)) # ws.write(row, 0, from_idx) if row == 1: ws.write(row, 1, 3) else: ws.write(row, 1, 1) if row == len(chain): obj1 = _.find(features_all, {'_id': i['to_id']}) # name1 = obj1['properties']['name'] id1 = obj1['_id'] # to_idx = i['to_idx'] # ws.write(row+1, 0, name1) ws.write(row + 1, 0, remove_mongo_id(id1)) # ws.write(row + 1, 0, to_idx) ws.write(row+1, 1, 1) wb.save(filename) client = MongoClient('localhost', 27017) db = client['kmgd_pe'] collection_network = db['network'] collection_fea = db['features'] collection_edges = db['edges'] # line_ids = ['570ce0c1ca49c80858320619', '570ce0c1ca49c8085832061a'] # 坪掌寨线 ids0 = collection_network.find_one({'_id':add_mongo_id('570ce0c1ca49c8085832061a')})['properties']['nodes'] features_all = list(collection_fea.find({'_id':{'$in':ids0}})) line_ids = _.pluck(list(collection_network.find({'$and':[{'properties.py': {'$regex': '^pzzx.*$'}}, {'properties.py': {'$not': re.compile('^pzzx$')}}]})), '_id') # print(line_ids) chains = [] for i in line_ids: line = collection_network.find_one({'_id':i}) if line and line['properties'].has_key('nodes'): features = list(collection_fea.find({'_id':{'$in':add_mongo_id(line['properties']['nodes'])}})) first_id = find_first(collection_edges, features) if first_id: first = _.find(features, {'_id': first_id}) if first: chain = find_chain(features, collection_edges, [], first_id) print(first['properties']['name']) print(len(chain)) chains.append(chain) write_excel_lnbr(features_all, chains, 'data_lnbr_pzz.xls') write_excel_bus(features_all, chains, 'data_bus_pzz.xls') # chains = [] # line = collection_network.find_one({'_id': add_mongo_id('570ce0c1ca49c8085832061a')}) # if line and line['properties'].has_key('nodes'): # first_id = add_mongo_id('570ce0b7ca49c8085832018f') # chain = find_chain(features_all, collection_edges, [], first_id) # chains.append(chain) # write_excel_lnbr(features_all, chains, 'data_lnbr_pzz0.xls') # first = ['570ce0b7ca49c8085832018f', '570ce0c1ca49c8085832031b'] #酒房丫口线 chains = [] ids0 = collection_network.find_one({'_id': add_mongo_id('570ce0c1ca49c80858320619')})['properties']['nodes'] features_all = list(collection_fea.find({'_id': {'$in': ids0}})) line_ids = _.pluck(list(collection_network.find( {'$and': [{'properties.py': {'$regex': '^jfykx.*$'}}, {'properties.py': {'$not': re.compile('^jfykx$')}}]})), '_id') # print(line_ids) for i in line_ids: line = collection_network.find_one({'_id': i}) if line and line['properties'].has_key('nodes'): features = list(collection_fea.find({'_id': {'$in': add_mongo_id(line['properties']['nodes'])}})) first_id = find_first(collection_edges, features) if first_id: first = _.find(features, {'_id': first_id}) if first: chain = find_chain(features, collection_edges, [], first_id) print(first['properties']['name']) print(len(chain)) chains.append(chain) write_excel_lnbr(features_all, chains, 'data_lnbr_jfyk.xls') write_excel_bus(features_all, chains, 'data_bus_jfyk.xls')
def process_my_balance_response(my_balance_response): btc_dict = py_.find(my_balance_response, lambda x: x['currency'] == 'BTC') viva_dict = py_.find(my_balance_response, lambda x: x['currency'] == 'VIVA') print("My BTC balance: " + str(btc_dict) + "\nMy VIVA balance: " + str(viva_dict))
def pi_sensor_queue_send_loop(aConfig): global gQueueSensor, gWebSocketConnection, gSensorControl app = aConfig['gConfig']['wsgi']['application'] if 'queue' in aConfig['gConfig']['applications'][app] \ and 'sensor' in aConfig['gConfig']['applications'][app]['queue'] \ and 'websocket' in aConfig['gConfig']['applications'][app]: sensor = aConfig['gConfig']['applications'][app]['queue']['sensor'] interval = float(sensor['queue_consume_interval']) websocketcfg = aConfig['gConfig']['applications'][app]['websocket'] wsurl = '%s://%s:%s%s' % (websocketcfg['remote_protocol'], websocketcfg['remote_host'], websocketcfg['remote_port'], websocketcfg['remote_base']) device_serial_no = None if 'device_info' in aConfig['gConfig']['applications'][app] \ and 'serial_no' in aConfig['gConfig']['applications'][app]['device_info'] \ and len(aConfig['gConfig']['applications'][app]['device_info']['serial_no']): device_serial_no = aConfig['gConfig']['applications'][app]['device_info']['serial_no'] while True: try: if gWebSocketConnection is None: gWebSocketConnection = pi_create_websocket(wsurl) if gWebSocketConnection and not gWebSocketConnection.connected: gWebSocketConnection.connect(wsurl, sslopt={"cert_reqs": ssl.CERT_NONE, "check_hostname": False}) if gQueueSensor and gWebSocketConnection: item = {} if not gQueueSensor.empty(): item = gQueueSensor.get() if 'serial_no' in item: if gSensorControl is None: gSensorControl = {} if device_serial_no and not 'device_serial_no' in gSensorControl: gSensorControl['device_serial_no'] = device_serial_no gSensorControl['sensors'] = [] sensor = _.find(gSensorControl['sensors'], {'serial_no': item['serial_no']}) sensor_index = _.find_index(gSensorControl['sensors'], {'serial_no': item['serial_no']}) if sensor is None: sensor = {} sensor['serial_no'] = item['serial_no'] sensor['type'] = item['type'] sensor['enable'] = True sensor['is_internal'] = False sensor['interval'] = 1.0 gSensorControl['sensors'].append(sensor) else: if 'enable' in item: sensor['enable'] = item['enable'] if 'is_internal' in item: sensor['is_internal'] = item['is_internal'] if 'interval' in item: sensor['interval'] = item['interval'] if 'type' in item: sensor['type'] = item['type'] gSensorControl['sensors'][sensor_index] = sensor if sensor['enable'] is True or sensor['enable'].lower() == 'true': item['device_serial_no'] = device_serial_no gWebSocketConnection.send(json.dumps(item, ensure_ascii=True)) except Empty: pass except Exception as e: pass finally: try: gWebSocketConnection.send('') except: gWebSocketConnection.close() gevent.sleep(interval)
def pi_internal_sensor_queue_send_loop(aConfig): global gQueueSensor, gWebSocketConnection, gSensorControl, gSensorData def computeHeight(pressure): return 44330.8 * (1 - pow(pressure / 1013.25, 0.190263)) def get_sensor_data(aConfig, key, device_serial_no, sensor_type, serial_no): global gSensorData ret = None if key == 'DOF10': if key in gSensorData and len(list(gSensorData[key].keys())): # print(gSensorData['DOF10']) ret = { 'type': sensor_type, 'device_serial_no': device_serial_no, 'serial_no': serial_no, 'timestamp': datetime.datetime.now(), 'value': { 'temp': float('{0:.1f}'.format(gSensorData['DOF10']['temperature'])), 'ypr': { 'yaw': float('{0:.5f}'.format(gSensorData['DOF10']['fusionPose'][2])), 'pitch': float('{0:.5f}'.format(gSensorData['DOF10']['fusionPose'][1])), 'roll': float('{0:.5f}'.format(gSensorData['DOF10']['fusionPose'][0])), }, 'quat':{ 'w':float('{0:.5f}'.format(gSensorData['DOF10']['fusionQPose'][0])), 'x': float('{0:.5f}'.format(gSensorData['DOF10']['fusionQPose'][1])), 'y': float('{0:.5f}'.format(gSensorData['DOF10']['fusionQPose'][2])), 'z': float('{0:.5f}'.format(gSensorData['DOF10']['fusionQPose'][3])), }, 'height': float('{0:.1f}'.format(computeHeight(gSensorData['DOF10']['pressure']))), 'pressure': float('{0:.1f}'.format(gSensorData['DOF10']['pressure'])), }, 'op': OPERATOR['OP_TRANSFER'], } elif key.lower() == 'battery': if key in gSensorData and len(list(gSensorData[key].keys())): ret = { 'type': sensor_type, 'device_serial_no': device_serial_no, 'serial_no': serial_no, 'timestamp': datetime.datetime.now(), 'value': { 'bus_voltage': float('{0:.2f}'.format(gSensorData['battery']['bus_voltage'])), 'current': float('{0:.2f}'.format(gSensorData['battery']['current'])), 'power': float('{0:.2f}'.format(gSensorData['battery']['power'])), }, 'op': OPERATOR['OP_TRANSFER'], } return ret # def get_sensor_device(aConfig, key): # ret = None # if key == 'DOF10': # try: # import RTIMU # except: # print('RTIMU import error') # return ret # SETTINGS_FILE = "RTIMU_CONFIG" # print("Using settings file [%s.ini]" % SETTINGS_FILE) # if not os.path.exists(SETTINGS_FILE + ".ini"): # print("Settings file does not exist, will be created") # setting = RTIMU.Settings(SETTINGS_FILE) # imu = RTIMU.RTIMU(setting) # pressure = RTIMU.RTPressure(setting) # # print("IMU Name: " + imu.IMUName()) # print("Pressure Name: " + pressure.pressureName()) # if (not imu.IMUInit()): # print("IMU Init Failed") # return None # else: # print("IMU Init Succeeded"); # # imu.setSlerpPower(0.02) # imu.setGyroEnable(True) # imu.setAccelEnable(True) # imu.setCompassEnable(True) # # if (not pressure.pressureInit()): # print("Pressure sensor Init Failed") # return None # else: # print("Pressure sensor Init Succeeded") # ret = { # 'imu': imu, # 'pressure': pressure, # } # return ret def register(aConfig, device_serial_no, sensor_type, serial_no, wsurl): global gSensorControl, gWebSocketConnection try: if gWebSocketConnection is None: # print(wsurl) gWebSocketConnection = pi_create_websocket(wsurl) if gWebSocketConnection and not gWebSocketConnection.connected: gWebSocketConnection.connect(wsurl, sslopt={"cert_reqs": ssl.CERT_NONE, "check_hostname": False}) if gWebSocketConnection and gWebSocketConnection.connected: item = { 'device_serial_no': device_serial_no, 'serial_no': serial_no, 'op': OPERATOR['OP_REGISTER'], 'type': sensor_type, 'enable':True, 'is_internal': True, 'interval': 1.0, } gWebSocketConnection.send(json.dumps(item, ensure_ascii=True)) except Exception as e: print(e) print('register failed') def rtimulib_loop(serial_no): global gSensorControl, gSensorData try: import RTIMU except: print('RTIMU import error') return SETTINGS_FILE = "RTIMU_CONFIG" print("Using settings file [%s.ini]" % SETTINGS_FILE) if not os.path.exists(SETTINGS_FILE + ".ini"): print("Settings file does not exist, will be created") setting = RTIMU.Settings(SETTINGS_FILE) imu = RTIMU.RTIMU(setting) pressure = RTIMU.RTPressure(setting) print("IMU Name: " + imu.IMUName()) print("Pressure Name: " + pressure.pressureName()) if (not imu.IMUInit()): print("IMU Init Failed") return else: print("IMU Init Succeeded"); imu.setSlerpPower(0.02) imu.setGyroEnable(True) imu.setAccelEnable(True) imu.setCompassEnable(True) if (not pressure.pressureInit()): print("Pressure sensor Init Failed") return None interval = float(imu.IMUGetPollInterval()) if not 'DOF10' in gSensorData: gSensorData['DOF10'] = {} while True: sensor = _.find(gSensorControl['sensors'], {'serial_no': serial_no}) if sensor and (sensor['enable'] is True or sensor['enable'].lower() == 'true'): if imu.IMURead(): gSensorData['DOF10'] = imu.getIMUData() (gSensorData['DOF10']['pressureValid'], gSensorData['DOF10']['pressure'], gSensorData['DOF10']['temperatureValid'],gSensorData['DOF10']['temperature']) = pressure.pressureRead() else: interval = 1000.0 gevent.sleep(interval / 1000.0) def pigpio_loop(key, i2c_addr, serial_no): global gSensorControl, gSensorData, aConfig try: import pigpio except: print('pigpio import error') return if key == 'battery': from INA226 import INA226Device ina226 = None try: ina226 = INA226Device(address=i2c_addr, initdata={'max_current_excepted':12}) except Exception as e: print (e) return if ina226: if not 'battery' in gSensorData: gSensorData['battery'] = {} interval = 1000.0 while True: sensor = _.find(gSensorControl['sensors'], {'serial_no': serial_no}) if sensor and (sensor['enable'] is True or sensor['enable'].lower() == 'true'): gSensorData['battery']['bus_voltage'] = ina226.read_bus_voltage() gSensorData['battery']['current'] = ina226.read_current_by_shuntvolt() gSensorData['battery']['power'] = ina226.read_power() else: interval = 1000.0 gevent.sleep(interval / 1000.0) def loop(aConfig, device_serial_no, key, sensor_type, serial_no, wsurl): global gSensorControl, gWebSocketConnection register(aConfig, device_serial_no, sensor_type, serial_no, wsurl) sensor = _.find(gSensorControl['sensors'], {'serial_no': serial_no}) if sensor: while True: sensor = _.find(gSensorControl['sensors'], {'serial_no': serial_no}) interval = sensor['interval'] if sensor and (sensor['enable'] is True or sensor['enable'].lower() == 'true'): try: if gWebSocketConnection is None: gWebSocketConnection = pi_create_websocket(wsurl) if gWebSocketConnection and not gWebSocketConnection.connected: gWebSocketConnection.connect(wsurl, sslopt={"cert_reqs": ssl.CERT_NONE, "check_hostname": False}) item = get_sensor_data(aConfig, key, device_serial_no, sensor_type, serial_no) if gWebSocketConnection and item: gWebSocketConnection.send(json.dumps(item, ensure_ascii=True)) except Empty: pass except Exception as e: # print('loop error') # print(e) pass finally: try: gWebSocketConnection.send('') except: gWebSocketConnection.close() gevent.sleep(interval) else: print('cannot get sensor:%s' % serial_no) app = aConfig['gConfig']['wsgi']['application'] if 'queue' in aConfig['gConfig']['applications'][app] \ and 'internal_sensor' in aConfig['gConfig']['applications'][app]['queue'] \ and 'websocket' in aConfig['gConfig']['applications'][app]: internal_sensor = aConfig['gConfig']['applications'][app]['queue']['internal_sensor'] device_serial_no = None if 'device_info' in aConfig['gConfig']['applications'][app] \ and 'serial_no' in aConfig['gConfig']['applications'][app]['device_info'] \ and len(aConfig['gConfig']['applications'][app]['device_info']['serial_no']): device_serial_no = aConfig['gConfig']['applications'][app]['device_info']['serial_no'] websocketcfg = aConfig['gConfig']['applications'][app]['websocket'] wsurl = '%s://%s:%s%s' % (websocketcfg['remote_protocol'], websocketcfg['remote_host'], websocketcfg['remote_port'], websocketcfg['remote_base']) for key in internal_sensor.keys(): serial_no = None chip = None sensor_type = None i2c_addr = None if 'enable' in internal_sensor[key] and internal_sensor[key]['enable'].lower() == 'true': interval = 1.0 if 'i2c_addr' in internal_sensor[key]: i2c_addr = int(internal_sensor[key]['i2c_addr'], 16) if 'queue_consume_interval' in internal_sensor[key]: interval = float(internal_sensor[key]['queue_consume_interval']) if 'type' in internal_sensor[key]: sensor_type = internal_sensor[key]['type'] if isinstance(sensor_type, str): sensor_type = int(sensor_type) elif isinstance(sensor_type, list): sensor_type = _.map_(sensor_type, lambda x:int(x)) if 'serial_no' in internal_sensor[key]: serial_no = internal_sensor[key]['serial_no'] if gSensorControl is None: gSensorControl = {} if device_serial_no: gSensorControl['device_serial_no'] = device_serial_no if not 'sensors' in gSensorControl: gSensorControl['sensors'] = [] if serial_no and sensor_type: sensor = _.find(gSensorControl['sensors'], {'serial_no': serial_no}) sensor_index = _.find_index(gSensorControl['sensors'], {'serial_no': serial_no}) if sensor: sensor['enable'] = True sensor['is_internal'] = True sensor['interval'] = interval sensor['type'] = sensor_type gSensorControl['sensors'][sensor_index] = sensor else: sensor = {} sensor['serial_no'] = serial_no sensor['enable'] = True sensor['is_internal'] = True sensor['interval'] = interval sensor['type'] = sensor_type gSensorControl['sensors'].append(sensor) # device = get_sensor_device(aConfig, key) if key == 'DOF10': gevent.spawn(rtimulib_loop, serial_no) gevent.spawn(loop, aConfig, device_serial_no, key, sensor_type, serial_no, wsurl) elif key in ['battery']: gevent.spawn(pigpio_loop, key, i2c_addr, serial_no) gevent.spawn(loop, aConfig, device_serial_no, key, sensor_type, serial_no, wsurl)
def create_or_update_sftp(client, module): name = module.params.get("name") purge_tags = module.params.get("purge_tags") tags = {} if module.params.get("tags") is not None: tags = module.params.get("tags") endpoint_type = module.params.get("endpoint_type") vpc_id = module.params.get("vpc_id") host_key = module.params.get("host_key") identity_provider_type = module.params.get("identity_provider_type") identity_provider_role = module.params.get("identity_provider_role") identity_provider_url = module.params.get("identity_provider_url") logging_role = module.params.get("logging_role") changed = False result = {} sftp_server = None needs_creation = False # TODO: Eventually, this needs to support all of the endpoint details, including vpc endpoint ids. endpoint_details = None if endpoint_type != 'PUBLIC' and vpc_id is not None: endpoint_details = { # "AddressAllocationIds": [], # "SubnetIds": [], # "VpcEndpointId": "", "VpcId": vpc_id } identity_provider_details = None if identity_provider_url is not None and identity_provider_role is not None: identity_provider_details = { "InvocationRole": identity_provider_role, "Url": identity_provider_url } name_tag = {'Key': SERVER_NAME_KEY, 'Value': name} assigned_tags = [name_tag] try: sftp_server = find_sftp_server(client, name) needs_creation = sftp_server is None except EndpointConnectionError as e: module.fail_json_aws(e, msg="Invalid endpoint provided: %s" % to_text(e)) except (BotoCoreError, ClientError) as e: module.fail_json_aws(e, msg="Failed to check Transfer presence") if needs_creation: result = create_sftp_server(client, endpoint_details, endpoint_type, host_key, identity_provider_details, identity_provider_type, logging_role, name_tag) sftp_server_id = result['ServerId'] changed = True else: sftp_server_id = sftp_server['Server']['ServerId'] if not purge_tags: assigned_tags = sftp_server['Tags'] # Update SFTP Server Details # Update Tags for key, value in tags.items(): item = py_.find(assigned_tags, {'Key': key}) if item: item['Value'] = value else: item = {'Key': key, 'Value': value} assigned_tags.append(item) update_args = build_server_kwargs(endpoint_details, endpoint_type, host_key, identity_provider_details, identity_provider_type, logging_role, name, sftp_server_id, is_update=True) result = client.update_server(**update_args) changed = True module.exit_json(changed=changed, name=name, **result)
def pi_handle_ws_recv_callback(aConfig, data): global gSensorControl def cmd_camera_stream_start_stop(aConfig, start_stop): if start_stop.lower() == 'start': start_stop = 'Start' if start_stop.lower() == 'stop': start_stop = 'Stop' app = aConfig['gConfig']['wsgi']['application'] if 'uv4l' in aConfig['gConfig']['applications'][app] and 'janus' in aConfig['gConfig']['applications'][app]: uv4l = aConfig['gConfig']['applications'][app]['uv4l'] janus = aConfig['gConfig']['applications'][app]['janus'] url = '' if start_stop == 'Start': url = '%s://%s:%s/janus?gateway_url=%s://%s:%s&gateway_root=%s&room=%s&room_pin=%s&username=%s&reconnect=%s&action=%s' % \ (uv4l['protocol'], uv4l['host'], uv4l['port'], janus['protocol'], janus['host'], janus['port'], janus['base_path'], janus['room'], janus['room_pin'], janus['username'], janus['reconnect'], start_stop) elif start_stop == 'Stop': url = '%s://%s:%s/janus?action=%s' % \ (uv4l['protocol'], uv4l['host'], uv4l['port'], start_stop) if url: print(url) check_output(['curl', '-s', url, '>/dev/null']) try: obj = json.loads(data) if isinstance(obj, dict) and 'device_serial_no' in obj and 'op' in obj: if gSensorControl and 'device_serial_no' in gSensorControl and gSensorControl['device_serial_no'] == obj['device_serial_no']: if obj['op'] == OPERATOR['OP_ENABLE_CAMERA']: cmd_camera_stream_start_stop(aConfig, 'start') elif obj['op'] == OPERATOR['OP_DISABLE_CAMERA']: cmd_camera_stream_start_stop(aConfig, 'stop') if 'serial_no' in obj and len(obj['serial_no']): sensor = _.find(gSensorControl['sensors'], {'serial_no':obj['serial_no']}) index = _.find_index(gSensorControl['sensors'], {'serial_no':obj['serial_no']}) if sensor: if obj['op'] == OPERATOR['OP_DISABLE_SENSOR']: gSensorControl['sensors'][index]['enable'] = False elif obj['op'] == OPERATOR['OP_ENABLE_SENSOR']: gSensorControl['sensors'][index]['enable'] = True if 'interval' in obj and (isinstance(obj['interval'], int) or isinstance(obj['interval'], float)): gSensorControl['sensors'][index]['interval'] = obj['interval'] else: for i in range(len(gSensorControl['sensors'])): if obj['op'] == 'OP_DISABLE_SENSOR': gSensorControl['sensors'][i]['enable'] = False elif obj['op'] == 'OP_ENABLE_SENSOR': gSensorControl['sensors'][i]['enable'] = True if 'interval' in obj and (isinstance(obj['interval'], int) or isinstance(obj['interval'], float)): gSensorControl['sensors'][i]['interval'] = obj['interval'] else: print('unknown device_serial_no:[%s]' % obj['device_serial_no']) except Exception as e: print (e) raise
def get_point_id(alist, code): return _.find(alist, lambda x: x['properties'].has_key('function_pos_code') and x['properties']['func_pos_code'] == code)