def put(self): """Modify profile of current corp. """ status = ErrorCode.SUCCESS try: data = DotDict(json_decode(self.request.body)) logging.info("[UWEB] Corp profile request: %s, uid: %s, tid: %s", data, self.current_user.uid, self.current_user.tid) except Exception as e: status = ErrorCode.ILLEGAL_DATA_FORMAT self.write_ret(status) return try: if data.has_key('c_email') and len(data.c_email)>50: status = ErrorCode.ILLEGAL_EMAIL self.write_ret(status, message=u'联系人邮箱的最大长度是50个字符!') return update_corp(data, self.current_user.cid, self.db, self.redis) self.write_ret(status) except Exception as e: logging.exception("[UWEB] Update corp profile failed. cid:%s, tid:%s, Exception: %s", self.current_user.cid, self.current_user.tid, e.args) status = ErrorCode.SERVER_BUSY self.write_ret(status)
def get(self): """Display profile of current operator. """ status = ErrorCode.SUCCESS try: profile = DotDict() # 1: user oper = QueryHelper.get_operator_by_oid(self.current_user.oid, self.db) if not oper: status = ErrorCode.LOGIN_AGAIN logging.error("[UWEB] Operator does not exist, redirect to login.html. oid: %s.", self.current_user.oid) self.write_ret(status) return profile.update(oper) for key in profile.keys(): profile[key] = profile[key] if profile[key] else '' self.write_ret(status, dict_=dict(profile=profile)) except Exception as e: logging.exception("[UWEB] Get corp profile failed. oid:%s, tid:%s, Exception: %s", self.current_user.oid, self.current_user.tid, e.args) status = ErrorCode.SERVER_BUSY self.write_ret(status)
def determine_threshold_trigger(alert_params, events): """Given a threshold alert's params, and a set of events determine if it should fire and if so, resolve it's summary, event snippets, etc. """ # mostCommon the events by the dotted aggregation key counts = mostCommon(events, alert_params["aggregation_key"]) # determine if these events trigger an alert # according to the parameters logger.debug(counts) for i in counts: if i[1] >= alert_params["threshold"]: alert = alert_params alert["triggered"] = True # set the summary via chevron/mustache template # with the alert plus metadata metadata = {"metadata": {"value": i[0], "count": i[1]}} alert = merge(alert, metadata) # limit events to those matching the aggregation_key value # so the alert only gets events that match the count mostCommon results alert["events"] = [] for event in events: dotted_event = DotDict(event) if i[0] == dotted_event.get(alert_params["aggregation_key"]): alert["events"].append(dotted_event) alert["summary"] = chevron.render(alert["summary"], alert) # walk the alert events for any requested event snippets for event in alert["events"][:alert_params["event_sample_count"]]: alert["summary"] += " " + chevron.render( alert_params["event_snippet"], event) yield alert
def post(self): """Insert new items.""" status = ErrorCode.SUCCESS try: data = DotDict(json_decode(self.request.body)) content = data.get('content', '') mobiles = data.get('mobiles', None) logging.info("[UWEB] Announcement request: %s", data) except Exception as e: status = ErrorCode.ILLEGAL_DATA_FORMAT self.write_ret(status) return try: mobiles_ = u'' if mobiles is not None: mobiles_ = ','.join(mobiles) for mobile in mobiles: SMSHelper.send(mobile, content) announcement = dict(cid=self.current_user.cid, content=content, mobiles=mobiles_) record_announcement(self.db, announcement) self.write_ret(status) except Exception as e: status = ErrorCode.SERVER_BUSY logging.exception( "[UWEB] record share failed, Exception: %s", e.args) self.write_ret(status)
def get_tinyurl(url): """Get a tiny url for wap url. """ try: # Baidu dwz #h = httplib2.Http() #msg={'url':url} #url_create = 'http://dwz.cn/create.php' #response, content = h.request(url_create, "POST", urlencode(msg), headers={'Content-Type': 'application/x-www-form-urlencoded'}) #res = DotDict(json_decode(content)) #logging.info("[TINY_URL] response: %s", res) #if res.status == 0: # return res.tinyurl #else: # return None # google h = httplib2.Http() url_create = 'https://www.googleapis.com/urlshortener/v1/url' msg = json_encode({'longUrl': url}) response, content = h.request(url_create, "POST", msg, headers = {'Content-Type': 'application/json'}) res = DotDict(json_decode(content)) logging.info("[TINY_URL] response: %s", res) return res.get('id', None) except Exception as e: logging.exception("Get tiny url failed. Exception: %s", e.args) return None
def get(self): """Display profile of current user. """ status = ErrorCode.SUCCESS try: tid = self.get_argument('tid',None) # check tid whether exist in request and update current_user self.check_tid(tid) profile = DotDict() # 1: user user = QueryHelper.get_user_by_uid(self.current_user.uid, self.db) if not user: status = ErrorCode.LOGIN_AGAIN logging.error("[UWEB] User does not exist, redirect to login.html. uid: %s.", self.current_user.uid) self.write_ret(status) return # 2: car car = QueryHelper.get_car_by_tid(self.current_user.tid, self.db) profile.update(user) profile.update(car) self.write_ret(status, dict_=dict(profile=profile)) except Exception as e: logging.exception("[UWEB] Get user profile failed. uid:%s, tid:%s, Exception: %s", self.current_user.uid, self.current_user.tid, e.args) status = ErrorCode.SERVER_BUSY self.write_ret(status)
def post(self): """Turn on buletooth.""" status = ErrorCode.SUCCESS try: data = DotDict(json_decode(self.request.body)) tid = data.get('tid', None) tids = data.get('tids', None) self.check_tid(tid) logging.info("[BLUETOOTH] kqly request: %s, uid: %s, tids: %s", data, self.current_user.uid, tids) except Exception as e: status = ErrorCode.ILLEGAL_DATA_FORMAT logging.exception("[UWEB] Invalid data format. body:%s, Exception: %s", self.request.body, e.args) self.write_ret(status) return try: tids = str_to_list(tids) tids = tids if tids else [self.current_user.tid, ] tids = [str(t) for t in tids] kqly(self.db, self.redis, tids) self.write_ret(status) except Exception as e: logging.exception("[BLUETOOTH] Kqly failed. uid: %s, tid: %s, Exception: %s. ", self.current_user.uid, self.current_user.tid, e.args) status = ErrorCode.SERVER_BUSY self.write_ret(status)
def post(self): status = WXErrorCode.SUCCESS try: data = DotDict(json_decode(self.request.body)) tid = data.get('tid', None) mannual_status = data.get('mannual_status', None) terminal = self.db.get("SELECT mannual_status, defend_status, service_status FROM T_TERMINAL_INFO " "WHERE tid = %s ", tid) if terminal: service_status = terminal['service_status'] if int(service_status) == 0: status = WXErrorCode.OUTSERVICE self.write_ret(status=status, message=WXErrorCode.ERROR_MESSAGE[status]) return try: self.db.execute("UPDATE T_TERMINAL_INFO SET mannual_status = %s" "WHERE tid = %s", mannual_status, tid) except MySQLdb.Error as e: logging.exception("[WEIXIN] execute update sql terminal:%s mannual_stauts failed ", tid, e.args) status = WXErrorCode.SERVER_BUSY self.write_ret(status=status, message=WXErrorCode.ERROR_MESSAGE[status]) except Exception as e: logging.exception("[WEIXIN] update terminal:%s mannual_stauts failed", tid) status = WXErrorCode.FAILED self.write_ret(status=status, message=WXErrorCode.ERROR_MESSAGE[status])
def retrieve(self, citylist=None): """core for Retrieving and Storing operation.""" #store the complete data of curent month into MongoDB if not citylist: cities = self.mysql_db.query("SELECT DISTINCT region_code AS id" " FROM T_HLR_CITY") citylist = [c.id for c in cities] results = self.retrieve_mixin(citylist) # get the group_id list from mongodb(may include the group_id has been removed) res = self.collection.find({'city_id': { '$in' : citylist }}, {'group_id':1}) ids_mongod = [int(v['group_id']) for v in res] # get the group_id lsit from mysql (the latest) ids_mysql = [int(result['group_id']) for result in results] # get the group_id to be removed and remove them from mongodb ids_move = list(set(ids_mongod) - set(ids_mysql)) self.collection.remove({'group_id': {'$in':ids_move}}) try: for result in results: result = DotDict(result) oldresult = self.collection.find_one({'id': result.id}) if oldresult: result['_id'] = oldresult['_id'] else: result.pop('_id') self.collection.save(result) except: logging.exception('mongodb connected failed') return results
def post(self): status = ErrorCode.SUCCESS try: data = DotDict(json_decode(self.request.body)) tid = data.get("tid", None) self.check_tid(tid) logging.info("[UWEB] terminal request: %s, uid: %s, tid: %s", data, self.current_user.uid, self.current_user.tid) except Exception as e: status = ErrorCode.ILLEGAL_DATA_FORMAT self.write(status) return try: tid = data["tid"] items = self.db.query("SELECT * FROM T_ALERT_SETTING WHERE tid=%s", tid) if len(items) >= 7: status = ErrorCode.FAILED self.write_ret(status) logging.error("[UWEB] terminal %s set too many event periods", tid) else: start_time = data["start_time"] end_time = data["end_time"] week = data["week"] self.db.execute("INSERT INTO T_ALERT_SETTING" " VALUES(NULL, %s, %s, %s, %s)", tid, start_time, end_time, week) logging.info("[UWEB] terminal add event period success: %s", data) self.write_ret(status) except Exception as e: logging.exception("[UWEB] tid:%s insert event period. Exception:%s", tid, e.args) status = ErrorCode.FAILED self.write_ret(status)
def post(self, ecmobile): """Modify a business. """ fields = DotDict(ecname=" name = '%s'", ecmobile=" mobile = '%s'", linkman="linkman = '%s'", address="address = '%s'", email="email = '%s'", bizcode="bizcode = '%s'", type="type = '%s'") for key in fields: v = self.get_argument(key, None) if v is not None: # if not check_sql_injection(v): # call get method # self.get(tmobile) # return fields[key] = fields[key] % v else: fields[key] = None set_cmd = ', '.join([v for v in fields.itervalues() if v is not None]) sql = "UPDATE T_CORP SET " + set_cmd + " WHERE mobile = %s" % ecmobile self.db.execute(sql) self.redirect("/ecbusiness/list/%s" % ecmobile)
def post(self): """Clear the cookie and set defend.""" try: data = DotDict(json_decode(self.request.body)) devid = data.get("devid", "") logging.info("[UWEB] logout request: %s, uid: %s", data, self.current_user.uid) except: self.write_ret(ErrorCode.ILLEGAL_DATA_FORMAT) logging.error("[UWEB] illegal format, body:%s", self.request.body) else: # 1: if there are tids, set defend for tid in data.tids: update_mannual_status(self.db, self.redis, tid, UWEB.DEFEND_STATUS.YES) # 2: remove devid from android_push_list android_push_list_key = get_android_push_list_key( self.current_user.uid) android_push_list = self.redis.getvalue(android_push_list_key) android_push_list = android_push_list if android_push_list else [] if devid in android_push_list: android_push_list.remove(devid) self.redis.set(android_push_list_key, android_push_list) logging.info("[UWEB] uid:%s, android_push_lst: %s", self.current_user.uid, android_push_list) finally: # 3: clear cookie self.clear_cookie(self.app_name) self.write_ret(ErrorCode.SUCCESS)
def get_realtime(self, uid, sim): """Get the location of the current realtime request. workflow: if there is alive memcached, we can get location from it, else get location from db return result to user browser """ ret = DotDict(status=ErrorCode.SUCCESS, message='', location=None) location = QueryHelper.get_location_info(self.current_user.tid, self.db, self.redis) locations = [location,] locations = get_locations_with_clatlon(locations, self.db) location = locations[0] if (location and location.clatitude and location.clongitude): if not location.name: location.name = '' if location.has_key('id'): del location['id'] location['degree'] = float(location.degree) location['tid'] = self.current_user.tid ret.location = location return ret
def post(self): """Create business for ec user. """ fields = DotDict(ecname="", ecmobile="", password="", linkman="", address="", email="", bizcode="", type="") for key in fields.iterkeys(): fields[key] = self.get_argument(key, '').strip() self.db.execute("INSERT INTO T_CORP(cid, name, mobile, password," " linkman, address, email, timestamp, bizcode, type)" " VALUES(%s, %s, %s, password(%s), %s, %s, %s, %s, %s, %s)", fields.ecmobile, fields.ecname, fields.ecmobile, fields.password, fields.linkman, fields.address, fields.email, int(time.time()), fields.bizcode, fields.type) self.db.execute("INSERT INTO T_GROUP(corp_id, name, type)" " VALUES(%s, default, default)", fields.ecmobile) self.redirect("/ecbusiness/list/%s" % fields.ecmobile)
def retrieve(self, city_list=None, end_time=None): """core for Retrieving and Storing operation.""" """This method returns the last day""" if not end_time: d = datetime.datetime.fromtimestamp(time.time()) t = datetime.datetime.combine(datetime.date(d.year, d.month, d.day), datetime.time(0, 0)) # get today 0:00:00 end_time = int(time.mktime(t.timetuple())*1000) if not city_list: cities = self.mysql_db.query("SELECT DISTINCT region_code AS id FROM T_HLR_CITY") city_list = [city.id for city in cities] results = self.retrieve_mixin(city_list, end_time) try: for result in results: result = DotDict(result) query_term = {'city_id': result.city_id, 'timestamp': result.timestamp} oldresult = self.collection.find_one(query_term) if oldresult: result['_id'] = oldresult['_id'] else: result.pop('_id') self.collection.save(result) except Exception as e: logging.exception('mongodb saved failed. Error: %s', e.args) return results
def handle_acc_status_report(info, address, connection, channel, exchange, gw_binding, db, redis): """ S31 ACC_status_report: 0: success, then record new terminal's address 1: invalid SessionID """ try: head = info.head body = info.body dev_id = head.dev_id args = DotDict(success=GATEWAY.RESPONSE_STATUS.SUCCESS, command=head.command) sessionID = QueryHelper.get_terminal_sessionID(dev_id, redis) if sessionID != head.sessionID: args.success = GATEWAY.RESPONSE_STATUS.INVALID_SESSIONID logging.error("[GW] Invalid sessionID, terminal: %s", head.dev_id) else: uap = ACCStatusReportParser(body, head) t_info = uap.ret #NOTE: Just record it in db. db.execute("INSERT INTO T_ACC_STATUS_REPORT(tid, category, timestamp)" " VALUES(%s, %s, %s)", t_info['dev_id'], t_info['category'], t_info['timestamp']) asc = ACCStatusReportComposer(args) request = DotDict(packet=asc.buf, address=address, dev_id=dev_id) append_gw_request(request, connection, channel, exchange, gw_binding) except: logging.exception("[GW] Handle acc status report exception.") GWException().notify()
def get(self): """ """ status = ErrorCode.SUCCESS try: page_number = int(self.get_argument('pagenum')) page_count = int(self.get_argument('pagecnt')) #reserved API fields = DotDict(name="name LIKE '%%%%%s%%%%'") for key in fields.iterkeys(): v = self.get_argument(key, None) if v: if not check_sql_injection(v): status = ErrorCode.SELECT_CONDITION_ILLEGAL self.write_ret(status) return fields[key] = fields[key] % (v,) else: fields[key] = None except Exception as e: status = ErrorCode.ILLEGAL_DATA_FORMAT logging.exception("[UWEB] cid: %s get line data format illegal. Exception: %s", self.current_user.cid, e.args) self.write_ret(status) return try: where_clause = ' AND '.join([v for v in fields.itervalues() if v is not None]) page_size = UWEB.LIMIT.PAGE_SIZE if where_clause: where_clause = ' AND ' + where_clause if page_count == -1: sql = "SELECT count(id) as count FROM T_LINE" + \ " WHERE 1=1 " + where_clause sql += " AND cid = %s" % (self.current_user.cid,) res = self.db.get(sql) count = res.count d, m = divmod(count, page_size) page_count = (d + 1) if m else d sql = "SELECT id AS line_id, name AS line_name FROM T_LINE" +\ " WHERE 1=1 " + where_clause sql += " AND cid = %s LIMIT %s, %s" % (self.current_user.cid, page_number * page_size, page_size) lines = self.db.query(sql) for line in lines: stations = self.db.query("SELECT name, latitude, longitude, seq " " FROM T_STATION " " WHERE line_id = %s", line.line_id) line["stations"] = stations self.write_ret(status, dict_=DotDict(lines=lines, pagecnt=page_count)) except Exception as e: logging.exception("[UWEB] cid: %s get line failed. Exception: %s", self.current_user.cid, e.args) status = ErrorCode.SERVER_BUSY self.write_ret(status)
def handle_fob_info(info, address, connection, channel, exchange, gw_binding, db, redis): """ S19 NOTE: deprecated fob info packet: add or remove fob 0: success, then record new terminal's address 1: invalid SessionID """ try: head = info.head body = info.body dev_id = head.dev_id args = DotDict(success=GATEWAY.RESPONSE_STATUS.SUCCESS) sessionID = QueryHelper.get_terminal_sessionID(dev_id, redis) if sessionID != head.sessionID: args.success = GATEWAY.RESPONSE_STATUS.INVALID_SESSIONID else: fp = FobInfoParser(body, head) fobinfo = fp.ret update_terminal_status(redis, head.dev_id, address) update_fob_info(db, redis, fobinfo) fc = FobInfoRespComposer(args) request = DotDict(packet=fc.buf, address=address, dev_id=dev_id) append_gw_request(request, connection, channel, exchange, gw_binding) except: logging.exception("[GW] Handle fob info report exception.") GWException().notify()
def handle_request(self, data): try: packet = T_CLWCheck(data) command = packet.head.command if command == GATEWAY.T_MESSAGE_TYPE.AGPS: head = packet.head body = packet.body args = DotDict(success=GATEWAY.RESPONSE_STATUS.SUCCESS, agps_data=None) ap = AgpsParser(body, head) agps_sign = self.get_agps_sign(ap.ret, int(head.timestamp)) if agps_sign != int(head.agps_sign, 16): args.success = GATEWAY.RESPONSE_STATUS.INVALID_SESSIONID logging.error("[AGPS] agps_sign invalid.") else: agps = ap.ret args.agps_data = self.get_agps_from_redis(agps) if args.agps_data: ac = AgpsComposer(args) return ac.buf else: logging.error("[AGPS] there's no invalid agps data.") return None except: logging.exception("[AGPS] Handle agps request exception.") return None
def put(self): """Modify profile of current operator. """ status = ErrorCode.SUCCESS try: data = DotDict(json_decode(self.request.body)) logging.info("[UWEB] Operator profile request: %s, oid: %s, tid: %s", data, self.current_user.oid, self.current_user.tid) except Exception as e: status = ErrorCode.ILLEGAL_DATA_FORMAT self.write_ret(status) return try: #if data.has_key('email') and not check_sql_injection(data.email): if data.has_key('email') and len(data.email)>50: status = ErrorCode.ILLEGAL_EMAIL self.write_ret(status, message=u'联系人邮箱的最大长度是50个字符!') return update_operator(data, self.current_user.oid, self.db, self.redis) self.write_ret(status) except Exception as e: logging.exception("[UWEB] Update operator profile failed. oid:%s, Exception: %s", self.current_user.oid, e.args) status = ErrorCode.SERVER_BUSY self.write_ret(status)
def put(self): """Update the parameters of terminal. """ status = ErrorCode.SUCCESS try: data = DotDict(json_decode(self.request.body)) tid = data.get('tid', None) # check tid whether exist in request and update current_user self.check_tid(tid) logging.info("[UWEB] Terminal request: %s, uid: %s, tid: %s", data, self.current_user.uid, self.current_user.tid) except Exception as e: status = ErrorCode.ILLEGAL_DATA_FORMAT self.write_ret(status) return try: terminal = QueryHelper.get_available_terminal( self.current_user.tid, self.db) if not terminal: status = ErrorCode.LOGIN_AGAIN logging.error("[UWEB] The terminal with tid: %s does not exist," " redirect to login.html", self.current_user.tid) self.write_ret(status) return user = QueryHelper.get_user_by_uid(self.current_user.uid, self.db) if not user: status = ErrorCode.LOGIN_AGAIN logging.error("[UWEB] The user with uid: %s does not exist," " redirect to login.html", self.current_user.uid) self.write_ret(status) return # sql injection if data.has_key('corp_cnum') and not check_cnum(data.corp_cnum): status = ErrorCode.ILLEGAL_CNUM self.write_ret(status) return # NOTE: deprecated if data.has_key('white_list'): white_list = ":".join(data.white_list) if not check_sql_injection(white_list): status = ErrorCode.ILLEGAL_WHITELIST self.write_ret(status) return self.update_terminal_db(data) # NOTE: wspush to client if status == ErrorCode.SUCCESS: WSPushHelper.pushS7(tid, self.db, self.redis) self.write_ret(status) except Exception as e: logging.exception("[UWEB] uid:%s, tid:%s update terminal info failed. Exception: %s", self.current_user.uid, self.current_user.tid, e.args) status = ErrorCode.SERVER_BUSY self.write_ret(status)
def update_operator(operator, oid, db, redis): """Update operator status. :arg operator: dict, e.g. { 'address':'', 'email':'', } :arg oid: string :arg db: database instance :arg redis: redis instance """ set_clause_dct = DotDict() fields = DotDict(address="address = '%s'", email="email = '%s'") for key, value in operator.iteritems(): set_clause_dct.setdefault(key, fields[key] % value) set_clause = ','.join( [v for v in set_clause_dct.itervalues() if v is not None]) if set_clause: db.execute("UPDATE T_OPERATOR SET " + set_clause + " WHERE oid = %s", oid)
def insert_location(location, db, redis): """Insert whole-data into T_LOCATION. """ location = DotDict(location) # NOTE: if locate_error is bigger then 500, set it 500 if int(location.locate_error) > 500: location.locate_error = 500 lid = db.execute("INSERT INTO T_LOCATION(tid, latitude, longitude, altitude," " clatitude, clongitude, timestamp, name, category, type," " speed, degree, cellid, locate_error)" " VALUES (%s, %s, %s, %s, %s, %s, %s," " %s, %s, %s, %s, %s, %s, %s)", location.dev_id, location.lat, location.lon, location.alt, location.cLat, location.cLon, location.gps_time, location.name, location.category, location.type, location.speed, location.degree, location.cellid, location.locate_error) if location.lat and location.lon: track_key = get_track_key(location.dev_id) track = redis.get(track_key) # if track is on, just put PVT into redis # maybe put cellid into redis later. # if track and (int(track) == 1) and (location.get("Tid", None) != EVENTER.TRIGGERID.PVT): # return lid # NOTE: if location's type is gps, put it into redis if track and (int(track) == 1) and (location.type != 0): return lid location_key = get_location_key(location.dev_id) last_location = redis.getvalue(location_key) if (last_location and (location.gps_time > last_location['timestamp'])) or\ not last_location: logging.info("[PUBLIC] Keep location in redis. tid: %s, location: %s", location.dev_id, location) mem_location = {'id': lid, 'latitude': location.lat, 'longitude': location.lon, 'type': location.type, 'clatitude': location.cLat, 'clongitude': location.cLon, 'timestamp': location.gps_time, 'name': location.name, 'degree': location.degree, 'speed': location.speed, 'locate_error': location.locate_error} location_key = get_location_key(location.dev_id) redis.setvalue(location_key, mem_location, EVENTER.LOCATION_EXPIRY) if int(location.type) == 0: # gps logging.info("[PUBLIC] Keep gps_location in gps_redis. tid: %s, location: %s", location.dev_id, location) location_key = get_gps_location_key(location.dev_id) redis.setvalue( location_key, mem_location, EVENTER.LOCATION_EXPIRY) return lid
def handle_sleep(info, address, connection, channel, exchange, gw_binding, db, redis): """ S21 sleep status packet: 0-sleep, 1-LQ 0: success, then record new terminal's address 1: invalid SessionID """ try: head = info.head body = info.body dev_id = head.dev_id resend_key, resend_flag = get_resend_flag(redis, dev_id, head.timestamp, head.command) args = DotDict(success=GATEWAY.RESPONSE_STATUS.SUCCESS, command=head.command) sessionID = QueryHelper.get_terminal_sessionID(dev_id, redis) is_sleep = False if sessionID != head.sessionID: args.success = GATEWAY.RESPONSE_STATUS.INVALID_SESSIONID else: if resend_flag: logging.warn("[GW] Recv resend packet, head: %s, body: %s and drop it!", info.head, info.body) else: redis.setvalue(resend_key, True, GATEWAY.RESEND_EXPIRY) hp = AsyncParser(body, head) sleep_info = hp.ret if sleep_info['sleep_status'] == '0': sleep_info['login'] = GATEWAY.TERMINAL_LOGIN.SLEEP #self.send_lq_sms(head.dev_id) #logging.info("[GW] Recv sleep packet, LQ it: %s", head.dev_id) is_sleep = True elif sleep_info['sleep_status'] == '1': sleep_info['login'] = GATEWAY.TERMINAL_LOGIN.ONLINE else: logging.info("[GW] Recv wrong sleep status: %s", sleep_info) del sleep_info['sleep_status'] update_terminal_info(db, redis, sleep_info) update_terminal_status(redis, dev_id, address, is_sleep) if args['success'] == GATEWAY.RESPONSE_STATUS.SUCCESS: acc_status_info_key = get_acc_status_info_key(dev_id) acc_status_info = redis.getvalue(acc_status_info_key) if acc_status_info and (not acc_status_info['t2_status']): # T2(query) is need args['success'] = 3 # acc_status is changed logging.info("[GW] ACC_status is changed, dev_id: %s, acc_status_info: %s", dev_id, acc_status_info) hc = AsyncRespComposer(args) request = DotDict(packet=hc.buf, address=address, dev_id=dev_id) append_gw_request(request, connection, channel, exchange, gw_binding) except: logging.exception("[GW] Handle sleep status report exception.") GWException().notify()
def post(self): """Get a GPS location or cellid location. workflow: if gps: try to get a gps location elif cellid: get a latest cellid and get a cellid location """ status = ErrorCode.SUCCESS try: data = DotDict(json_decode(self.request.body)) tid = data.get('tid',None) # check tid whether exist in request and update current_user self.check_tid(tid, finish=True) logging.info("[UWEB] realtime request: %s, uid: %s, tid: %s", data, self.current_user.uid, self.current_user.tid) except Exception as e: status = ErrorCode.ILLEGAL_DATA_FORMAT logging.exception("[UWEB] Realtime failed. Exception: %s", e.args) self.write_ret(status) self.finish() return current_query = DotDict() current_query.timestamp = int(time()) terminal = QueryHelper.get_available_terminal(self.current_user.tid, self.db) if not terminal: status = ErrorCode.LOGIN_AGAIN logging.error("[UWEB] The terminal with tid: %s does not exist, redirect to login.html", self.current_user.tid) self.write_ret(status) self.finish() return current_query.locate_flag = data.locate_flag def _on_finish(realtime): realtime['cellid_status'] = 1 self.set_header(*self.JSON_HEADER) self.write(json_encode(realtime)) self.finish() def __callback(db): self.db = db self.request_realtime(current_query, callback=_on_finish) #NOTE: deprecated. self.keep_waking(self.current_user.sim, self.current_user.tid) self.queue.put((10, __callback))
def determine_deadman_trigger(alert_params, events): """Given a deadman alert's params and a set of events (or lack thereof) determine if it should fire and resolve summary/snippets, etc Largely the same as a threshold alert, except this accounts for a lack of events (altogether missing, or below a count) as the trigger """ counts = mostCommon(events, alert_params["aggregation_key"]) if not events: # deadman alerts are built to notice # when expected events are missing # but it means we have no events to pass on # make a meta event for the fact that events are missing events = [] meta_event = { "utctimestamp": utcnow().isoformat(), "severity": "INFO", "summary": "Expected event not found", "category": "deadman", "source": "deadman", "tags": ["deadman"], "plugins": [], "details": {}, } events.append(meta_event) if not counts: # make up a metadata count counts = [(alert_params["aggregation_key"], 0)] for i in counts: # lack of events, or event count below the threshold is a trigger if i[1] <= alert_params["threshold"]: alert = alert_params alert["triggered"] = True # set the summary via chevron/mustache template # with the alert plus metadata metadata = {"metadata": {"value": i[0], "count": i[1]}} alert = merge(alert, metadata) # limit events to those matching the aggregation_key value # so the alert only gets events that match the count mostCommon results alert["events"] = [] for event in events: dotted_event = DotDict(event) if i[0] == dotted_event.get(alert_params["aggregation_key"]): alert["events"].append(dotted_event) alert["summary"] = chevron.render(alert["summary"], alert) # walk the alert events for any requested event snippets for event in alert["events"][:alert_params["event_sample_count"]]: alert["summary"] += " " + chevron.render( alert_params["event_snippet"], event) yield alert
def get(self): """ """ status = ErrorCode.SUCCESS try: page_number = int(self.get_argument('pagenum')) page_count = int(self.get_argument('pagecnt')) #reserved API fields = DotDict(name="name LIKE '%%%%%s%%%%'", mobile="mobile LIKE '%%%%%s%%%%'") for key in fields.iterkeys(): v = self.get_argument(key, None) if v: fields[key] = fields[key] % (v,) else: fields[key] = None except Exception as e: status = ErrorCode.ILLEGAL_DATA_FORMAT logging.exception("[UWEB] cid: %s Send message to ios push server data format illegal. Exception: %s", self.current_user.cid, e.args) self.write_ret(status) return try: where_clause = ' AND '.join([v for v in fields.itervalues() if v is not None]) page_size = 20 if where_clause: where_clause = ' AND ' + where_clause if page_count == -1: sql = "SELECT count(id) as count FROM T_PASSENGER" + \ " WHERE 1=1 " + where_clause sql += " AND cid = %s" % (self.current_user.cid,) res = self.db.get(sql) count = res.count d, m = divmod(count, page_size) page_count = (d + 1) if m else d sql = "SELECT id, pid, name, mobile FROM T_PASSENGER" +\ " WHERE 1=1 " + where_clause sql += " AND cid = %s LIMIT %s, %s" % (self.current_user.cid, page_number * page_size, page_size) passengers = self.db.query(sql) for passenger in passengers: for key in passenger.keys(): passenger[key] = passenger[key] if passenger[key] else '' self.write_ret(status, dict_=DotDict(passengers=passengers, pagecnt=page_count)) except Exception as e: logging.exception("[UWEB] cid: %s get passenger failed. Exception: %s", self.current_user.cid, e.args) status = ErrorCode.SERVER_BUSY self.write_ret(status)
def write_ret(self, status, message=None, dict_=None): """ write back ret message: dict(status=status, message=ErrorCode.ERROR_MESSAGE[status], ...) """ ret = DotDict(status=status) if message is None: ret.message = WXErrorCode.ERROR_MESSAGE[status] else: ret.message = message if isinstance(dict_, dict): ret.update(dict_) self.set_header(*self.JSON_HEADER) self.write(json_encode(ret))
def post(self): """Retrieve the log of delegation. """ # check administrator_id start_time = int(self.get_argument("start_time")) end_time = int(self.get_argument("end_time")) select_clause = ( "SELECT T_ADMINISTRATOR.name as administrator, T_ADMINISTRATOR.login," + " T_DELEGATION_LOG.timestamp, T_TERMINAL_INFO.mobile as tmobile," + " T_USER.name as user_name " ) from_table_clause = " FROM T_DELEGATION_LOG, T_ADMINISTRATOR, T_TERMINAL_INFO, T_USER " where_clause = ( " WHERE T_DELEGATION_LOG.timestamp BETWEEN %s AND %s" + " AND T_DELEGATION_LOG.administrator_id = T_ADMINISTRATOR.id" + " AND T_DELEGATION_LOG.uid = T_USER.uid" + " AND T_DELEGATION_LOG.tid = T_TERMINAL_INFO.tid" ) where_clause = where_clause % (start_time, end_time) fields = DotDict( administrator="T_ADMINISTRATOR.name LIKE '%%%%%s%%%%'", login="******", user_name="T_USER.name LIKE '%%%%%s%%%%'", mobile="T_USER.mobile LIKE '%%%%%s%%%%'", tmobile="T_TERMINAL_INFO.mobile LIKE '%%%%%s%%%%'", ) for key in fields.iterkeys(): v = self.get_argument(key, None) if v: if not check_sql_injection(v): self.get() return fields[key] = fields[key] % (v,) else: fields[key] = None terms = [where_clause] + [v for v in fields.itervalues() if v] where_clause = " AND ".join(terms) sql = select_clause + from_table_clause + where_clause sql += " ORDER BY T_DELEGATION_LOG.timestamp DESC" logs = self.db.query(sql) for i, log in enumerate(logs): log["id"] = i + 1 self.render("delegation/log.html", logs=logs, interval=[start_time, end_time])
def put(self): """Modify some settings about mileage notification. """ status = ErrorCode.SUCCESS try: data = DotDict(json_decode(self.request.body)) logging.info("[UWEB] Mileage notification request: %s, uid: %s, tid: %s", data, self.current_user.uid, self.current_user.tid) tid = data.tid except Exception as e: status = ErrorCode.ILLEGAL_DATA_FORMAT self.write_ret(status) return try: distance_notification = data.get('distance_notification', None) day_notification = data.get('day_notification', None) assist_mobile = data.get('assist_mobile', None) if distance_notification is not None: self.db.execute("UPDATE T_MILEAGE_NOTIFICATION" " SET distance_notification = %s," " notify_count = 0," " left_days = 1," " set_time = %s" " WHERE tid = %s", distance_notification, int(time.time()), tid) if day_notification is not None: self.db.execute("UPDATE T_DAY_NOTIFICATION" " SET day_notification = %s," " notify_count = 0," " left_days = 1," " set_time = %s" " WHERE tid = %s", day_notification, int(time.time()), tid) if assist_mobile is not None: self.db.execute("UPDATE T_TERMINAL_INFO" " SET assist_mobile = %s" " WHERE tid = %s", assist_mobile, tid) self.write_ret(status) except Exception as e: logging.exception("[UWEB] Get mileage notification. Exception: %s", self.current_user.cid, self.current_user.oid, e.args) status = ErrorCode.SERVER_BUSY self.write_ret(status)
def __parse(self, packet): head = DotDict() body = [] if packet.startswith('[') and packet.endswith(']'): p_info = packet[1:-1].split(',') keys = ['timestamp', 'command'] if len(p_info) >= len(keys): for i, key in enumerate(keys): head[key] = p_info[i] head.timestamp = int(head.timestamp) if head.timestamp else int(time.time()) body = p_info[len(keys):] else: logging.error("[CLWPARSE] Not a complete packet: %s", packet) else: logging.error("[CLWPARSE] Invalid packet: %s", packet) return head, body
def parse_head(self, packet): head = DotDict() body = [] if packet.startswith('[') and packet.endswith(']'): p_info = packet[1:-1].split(',') keys = ['timestamp', 'agps_sign', 'dev_type', 'softversion', 'dev_id', 'command'] if len(p_info) >= len(keys): for i, key in enumerate(keys): head[key] = p_info[i] head.timestamp = int(head.timestamp) if head.timestamp else int(time.time()) body = p_info[len(keys):] else: logging.error("Not a complete packet: %s", packet) else: logging.error("Invalid packet: %s", packet) return head, body
def generate_metadata(context): metadata = { "lambda_details": { "function_version": context.function_version, "function_arn": context.invoked_function_arn, "function_name": context.function_name.lower(), "memory_size": context.memory_limit_in_mb, }, } return DotDict(metadata)
def test_dict_match(self): complex_dict1 = { "some_key": "some value", "sub_key": { "some_key": "some other value" }, } assert dict_match({"some_key": "some value"}, complex_dict1) complex_dot_dict = DotDict(complex_dict1) assert dict_match({"sub_key.some_key": "some other value"}, complex_dot_dict) assert (dict_match({"sub_key.some_key": "not some other value"}, complex_dot_dict) == False)
def test_lambda_metadata_generation(self): lambda_context = { "function_version": "$LATEST", "invoked_function_arn": "arn:aws:lambda:us-west-2:722455710680:function:processor-prod", "function_name": "processor-prod", "memory_limit_in_mb": "1024", } lambda_context = DotDict(lambda_context) result = generate_metadata(lambda_context) assert type(result.lambda_details) == type(lambda_context) assert "function_version" in result.lambda_details assert "function_arn" in result.lambda_details assert "function_name" in result.lambda_details assert "memory_size" in result.lambda_details
def onMessage(self, message, metadata): # for convenience, make a dot dict version of the message dot_message = DotDict(message) # double check that this is our target message if 'admin#reports#activity' not in dot_message.get('details.kind','')\ or 'id' not in message.get('details','') \ or 'etag' not in message.get('details',''): return (message, metadata) message["source"] = "gsuite" message["tags"].append("gsuite") # clean up ipaddress field if 'ipaddress' in message['details']: message['details']['sourceipaddress'] = message['details'][ 'ipaddress'] del message['details']['ipaddress'] # set the actual time if dot_message.get("details.id.time", None): message['utctimestamp'] = toUTC( message['details']['id']['time']).isoformat() # set the user_name if dot_message.get("details.actor.email", None): message["details"]["user"] = dot_message.get( "details.actor.email", "") # set summary message["summary"] = chevron.render( "{{details.user}} {{details.events.0.name}} from IP {{details.sourceipaddress}}", message) # set category message['category'] = "authentication" #success/failure if 'fail' in message["summary"]: message["details"]["success"] = False if 'success' in message["summary"]: message["details"]["success"] = True #suspicious? suspicious = {"boolvalue": True, "name": "is_suspicious"} for e in dot_message.get("details.events", []): for p in e.get("parameters", []): if dict_match(suspicious, p): message["details"]["suspicious"] = True return (message, metadata)
def test_sub_dict(self): complex_dict1 = { "some_key": "some value", "sub_key": { "some_key": "some other value" }, } result = sub_dict(complex_dict1, ["some_key"], "nothing") assert result == {"some_key": "some value"} result = sub_dict(complex_dict1, ["sub_key.some_key"], "nothing") assert result == {"sub_key.some_key": "nothing"} complex_dot_dict = DotDict(complex_dict1) result = sub_dict(complex_dot_dict, ["sub_key.some_key"], "nothing") assert result == {"sub_key.some_key": "some other value"} result = sub_dict(complex_dot_dict, ["some_key", "sub_key.some_key"]) assert result == { "some_key": "some value", "sub_key.some_key": "some other value", }
def lambda_handler(event, context): config = DotDict({}) config.account = boto3.client("sts").get_caller_identity().get("Account") config.athena_workgroup = os.environ.get("ATHENA_WORKGROUP", "defenda_data_lake") config.athena_database = os.environ.get("ATHENA_DATABASE", "defenda_data_lake") config.athena_table = os.environ.get("ATHENA_TABLE", "events") # query status/wait for response athena_query = get_athena_query(config) logger.debug(athena_query) cursor = connect(work_group=config.athena_workgroup).cursor() cursor.execute(athena_query) logger.debug("Query finished: {}".format(cursor.state)) return
def convert_style(self): self.set_styles(self.styles) self.styles = DotDict(self.styles)
for k, v in test_result.items() ])) if i == 15: break current_scale_indx += 1 if __name__ == '__main__': with open(configFile) as file: try: params = yaml.load(file, Loader=yaml.FullLoader) except yaml.YAMLError as exc: print(exc) sys.exit(0) params = DotDict(params) if not osp.isdir(params.xtra.out_path): os.makedirs(params.xtra.out_path) #SAVING PARAMETERS FOR RESTART....NEEDS WORK #np.save(osp.join(params.ip.out_path, 'params'), params) experiment_id = osp.basename(params.xtra.out_path) print('experiment ID: {}'.format(experiment_id)) #pprint(params) testing(params)
# -*- coding: utf-8 -*- from utils.dotdict import DotDict METHOD = DotDict(DELETE="DELETE", POST="POST", PUT="PUT", GET="GET") ASYNC_REQUEST_TIMEOUT = 45 CONNECT_TIMEOUT = 30
from pygame.draw import * from pygame.display import set_mode, update, set_caption, flip from pygame import event, colordict from pygame import * import pygame as pg from utils.dotdict import DotDict init() SIZE = 600, 600 S = set_mode(SIZE) COLORS = DotDict(colordict.THECOLORS) dragging = False all_lines = [] while True: for e in event.get(): if e.type == QUIT or e.type == KEYDOWN and e.type == K_q: pg.quit() exit() elif e.type == MOUSEMOTION: if all_lines and dragging: all_lines[-1].append(e.pos) elif e.type == MOUSEBUTTONDOWN: all_lines.append([]) dragging = True elif e.type == MOUSEBUTTONUP: dragging = False for line in all_lines: if len(line) > 1:
from pygame.display import * from pygame_util import get_clock, random_color from pygameapps.trackers.classes import Position from utils.dotdict import DotDict init() SIZE = (600, 600) S = set_mode(SIZE) CENTER = SIZE[0] // 2, SIZE[1] // 2 CLOCK = get_clock() DISTANCE = 100 SPEED = 10 colors = DotDict(THECOLORS) obj = Position(0, 0) joystick.init() joy = joystick.Joystick(0) joy.init() while True: for e in event.get(): if e.type == QUIT: exit() elif e.type == KEYDOWN: if e.key == K_q: exit() elif e.type == JOYAXISMOTION: print(f'JOY {e.joy} AXIS {e.axis} VALUE {e.value}')
# for all sequence alerts in the DB # if slots are all met, create alert and remove inflight record create_sequence_alerts(db) # for all sequence alerts in the DB # expire any un-met inflight alerts that have exceeded their window expire_sequence_alerts(db) sys.exit() if __name__ == "__main__": # config options from alerts.yaml or -c <filename> parser = argparse.ArgumentParser() parser.add_argument("-c", "--config", help="Specify a configuration file") args = parser.parse_args() with open(args.config or "{}".format(sys.argv[0].replace(".py", ".yml"))) as fd: config = DotDict(yaml.safe_load(fd)) logging_config_file_path = Path(__file__).parent.joinpath( config.logging_config) with open(logging_config_file_path, "r") as fd: logging_config = yaml.safe_load(fd) logging.config.dictConfig(logging_config) logger = logging.getLogger() logger.debug("Logging configured") logger.debug(f"Configurated as {config}") main(config)
def observation(self, observation): # Convert everything to torch tensors for k in observation: dtype = torch.long if '_index' in k else torch.float observation[k] = torch.tensor(observation[k], dtype=dtype) return DotDict(observation)
def lambda_handler(event, context): """ Called on a PUT to s3 Make every attempt to read in json records from the s3 source """ metadata = generate_metadata(context) logger.debug("Event is: {}".format(event)) # make the event easier to traverse event = DotDict(event) # test harnesses if event == {"test": "true"}: return {"Hello": "from s3_to_firehose"} elif event == {"metadata": "name"}: return metadata elif "Records" in event: # should be triggered by s3 Put/Object created events s3 = boto3.client("s3") for record in event.Records: record = DotDict(record) s3_bucket = record.s3.bucket.name s3_key = record.s3.object.key # a new bucket will fire for folders *and* files, early exit if it's a folder if s3_key.endswith("/"): continue # assume the file is just good ol json source = "s3json" # if the file name is cloudtrail-ish if is_cloudtrail(s3_key): source = "cloudtrail" # up to 5 attempts to get the object ( in case s3 file commit on write is lagging) s3_response = None for x in range(1, 6): try: s3_response = s3.get_object(Bucket=s3_bucket, Key=s3_key) break except Exception as e: logger.error( f"Attempt {x}: {e} while attempting to get_object {s3_bucket} {s3_key}" ) sleep(1) continue if not s3_response: logger.error( f"5 attempts to retrieve {s3_bucket} {s3_key} failed, moving on" ) continue s3_data = "" # gunzip if zipped if s3_key[-3:] == ".gz": s3_raw_data = s3_response["Body"].read() with gzip.GzipFile( fileobj=BytesIO(s3_raw_data)) as gzip_stream: s3_data += "".join( TextIOWrapper(gzip_stream, encoding="utf-8")) else: s3_data = s3_response["Body"].read().decode("utf-8") # create our list of records to append out findings to s3_records = [] s3_dict = None try: # load the json we have from either a .json file or a gunziped file s3_dict = json.loads(s3_data) except JSONDecodeError: # file isn't well formed json, see if we can interpret json from it for block in emit_json_block(StringIO(s3_data)): if block: record = json.loads(block) record["source"] = source s3_records.append(record) # if this is a dict of a single 'Records' list, unroll the list into # it's sub records if s3_dict and "Records" in s3_dict: if type(s3_dict["Records"]) is list: for record in s3_dict["Records"]: record["source"] = source s3_records.append(record) # maybe it's just a list already? elif s3_dict and type(s3_dict) is list: # a list of dicts for record in s3_dict: record["source"] = source s3_records.append(record) elif s3_dict and type(s3_dict) is dict: # a single dict, but lets add it to a list # for consistent handling s3_dict["source"] = source s3_records.append(s3_dict) logger.debug("pre-plugins s3_records is: {}".format(s3_records)) # send off to firehose for further processing if s3_records: send_to_firehose(s3_records) return