def iot_device_data_array(sn=None, vsn=None): sn = sn or frappe.form_dict.get('sn') vsn = vsn or sn doc = frappe.get_doc('IOT Device', sn) doc.has_permission("read") if vsn != sn: if vsn not in iot_device_tree(sn): return "" cfg = iot_device_cfg(sn, vsn) if not cfg: return "" client = redis.Redis.from_url(IOTHDBSettings.get_redis_server() + "/2") hs = client.hgetall(vsn) data = [] if cfg.has_key("nodes"): nodes = cfg.get("nodes") for node in nodes: tags = node.get("tags") for tag in tags: name = tag.get('name') tt = hs.get(name + ".TM") timestr = '' if tt: timestr = str( convert_utc_to_user_timezone( datetime.datetime.utcfromtimestamp( int(int(tt) / 1000))).replace(tzinfo=None)) data.append({ "NAME": name, "PV": hs.get(name + ".PV"), # "TM": hs.get(name + ".TM"), "TM": timestr, "Q": hs.get(name + ".Q"), "DESC": tag.get("desc"), }) if cfg.has_key("tags"): tags = cfg.get("tags") for tag in tags: name = tag.get('name') tt = hs.get(name + ".TM") timestr = '' if tt: timestr = str( convert_utc_to_user_timezone( datetime.datetime.utcfromtimestamp(int( int(tt) / 1000))).replace(tzinfo=None)) data.append({ "NAME": name, "PV": hs.get(name + ".PV"), # "TM": hs.get(name + ".TM"), "TM": timestr, "Q": hs.get(name + ".Q"), "DESC": tag.get("desc"), }) return data
def device_info(sn): valid_auth_code() device = frappe.get_doc('IOT Device', sn) if not device.has_permission("read"): raise frappe.PermissionError device = { 'sn': device.sn, 'name': device.dev_name, 'desc': device.description, 'company': device.company, 'location': 'UNKNOWN', # TODO: Get device location 'beta': device.use_beta, 'is_beta': device_is_beta(sn), 'status': device.device_status, } client = redis.Redis.from_url(IOTHDBSettings.get_redis_server() + "/12", decode_responses=True) if client.exists(sn): info = client.hgetall(sn) if info: device['version'] = info.get("version/value") device['skynet_version'] = info.get("skynet_version/value") _starttime = info.get("starttime/value") device['start_time'] = str( convert_utc_to_user_timezone(datetime.datetime.utcfromtimestamp(int(_starttime))).replace( tzinfo=None)) device['uptime'] = int(info.get("uptime/value") / 1000) # convert to seconds device['platform'] = info.get("platform/value") return device
def __init__(self, content): """Parses headers, content, attachments from given raw message. :param content: Raw message.""" import email, email.utils import datetime self.raw = content self.mail = email.message_from_string(self.raw) self.text_content = '' self.html_content = '' self.attachments = [] self.cid_map = {} self.parse() self.set_content_and_type() self.set_subject() # gmail mailing-list compatibility # use X-Original-Sender if available, as gmail sometimes modifies the 'From' _from_email = self.mail.get("X-Original-From") or self.mail["From"] self.from_email = extract_email_id(_from_email) self.from_real_name = email.utils.parseaddr(_from_email)[0] if self.mail["Date"]: utc = email.utils.mktime_tz( email.utils.parsedate_tz(self.mail["Date"])) utc_dt = datetime.datetime.utcfromtimestamp(utc) self.date = convert_utc_to_user_timezone(utc_dt).strftime( '%Y-%m-%d %H:%M:%S') else: self.date = now()
def __init__(self, content): """Parses headers, content, attachments from given raw message. :param content: Raw message.""" import email, email.utils import datetime self.raw = content self.mail = email.message_from_string(self.raw) self.text_content = '' self.html_content = '' self.attachments = [] self.cid_map = {} self.parse() self.set_content_and_type() self.set_subject() self.from_email = extract_email_id(self.mail["From"]) self.from_real_name = email.utils.parseaddr(self.mail["From"])[0] if self.mail["Date"]: utc = email.utils.mktime_tz( email.utils.parsedate_tz(self.mail["Date"])) utc_dt = datetime.datetime.utcfromtimestamp(utc) self.date = convert_utc_to_user_timezone(utc_dt).strftime( '%Y-%m-%d %H:%M:%S') else: self.date = now()
def __init__(self, content): """Parses headers, content, attachments from given raw message. :param content: Raw message.""" import email, email.utils import datetime self.raw = content self.mail = email.message_from_string(self.raw) self.text_content = '' self.html_content = '' self.attachments = [] self.cid_map = {} self.parse() self.set_content_and_type() self.set_subject() # gmail mailing-list compatibility # use X-Original-Sender if available, as gmail sometimes modifies the 'From' _from_email = self.mail.get("X-Original-From") or self.mail["From"] self.from_email = extract_email_id(_from_email) self.from_real_name = email.utils.parseaddr(_from_email)[0] if self.mail["Date"]: utc = email.utils.mktime_tz(email.utils.parsedate_tz(self.mail["Date"])) utc_dt = datetime.datetime.utcfromtimestamp(utc) self.date = convert_utc_to_user_timezone(utc_dt).strftime('%Y-%m-%d %H:%M:%S') else: self.date = now()
def __init__(self, content): """Parses headers, content, attachments from given raw message. :param content: Raw message.""" if six.PY2: self.mail = email.message_from_string(safe_encode(content)) else: if isinstance(content, bytes): self.mail = email.message_from_bytes(content) else: self.mail = email.message_from_string(content) self.text_content = '' self.html_content = '' self.attachments = [] self.cid_map = {} self.parse() self.set_content_and_type() self.set_subject() self.set_from() self.message_id = (self.mail.get('Message-ID') or "").strip(" <>") if self.mail["Date"]: try: utc = email.utils.mktime_tz( email.utils.parsedate_tz(self.mail["Date"])) utc_dt = datetime.datetime.utcfromtimestamp(utc) self.date = convert_utc_to_user_timezone(utc_dt).strftime( '%Y-%m-%d %H:%M:%S') except: self.date = now() else: self.date = now() if self.date > now(): self.date = now()
def __init__(self, content): """Parses headers, content, attachments from given raw message. :param content: Raw message.""" if six.PY2: self.mail = email.message_from_string(safe_encode(content)) else: if isinstance(content, bytes): self.mail = email.message_from_bytes(content) else: self.mail = email.message_from_string(content) self.text_content = '' self.html_content = '' self.attachments = [] self.cid_map = {} self.parse() self.set_content_and_type() self.set_subject() self.set_from() self.message_id = (self.mail.get('Message-ID') or "").strip(" <>") if self.mail["Date"]: try: utc = email.utils.mktime_tz(email.utils.parsedate_tz(self.mail["Date"])) utc_dt = datetime.datetime.utcfromtimestamp(utc) self.date = convert_utc_to_user_timezone(utc_dt).strftime('%Y-%m-%d %H:%M:%S') except: self.date = now() else: self.date = now() if self.date > now(): self.date = now()
def add_device_event(event=None): valid_auth_code() event = event or get_post_json_data() device = event.get("device") if not device: throw(_("Request fields not found. fields: device")) if not IOTDevice.check_sn_exists(device): throw(_("Device {0} not found.").format(device)) dev_doc = frappe.get_doc("IOT Device", device) event_utc_time = datetime.datetime.strptime(event.get("time"), DATETIME_FORMAT) local_time = str( convert_utc_to_user_timezone(event_utc_time).replace(tzinfo=None)) doc = frappe.get_doc({ "doctype": "IOT Device Event", "device": device, "event_level": int(event.get("level") or 0), "event_type": event.get("type"), "event_info": event.get("info"), "event_data": event.get("data"), "event_time": local_time, "event_device": event.get("device"), "event_source": event.get("source"), "owner_type": dev_doc.owner_type, "owner_id": dev_doc.owner_id, "owner_company": dev_doc.company, "wechat_notify": 1, }).insert().as_dict() return doc
def __init__(self, content): """Parses headers, content, attachments from given raw message. :param content: Raw message.""" import email, email.utils import datetime self.raw = content self.mail = email.message_from_string(self.raw) self.text_content = '' self.html_content = '' self.attachments = [] self.cid_map = {} self.parse() self.set_content_and_type() self.set_subject() self.from_email = extract_email_id(self.mail["From"]) self.from_real_name = email.utils.parseaddr(self.mail["From"])[0] if self.mail["Date"]: utc = email.utils.mktime_tz(email.utils.parsedate_tz(self.mail["Date"])) utc_dt = datetime.datetime.utcfromtimestamp(utc) self.date = convert_utc_to_user_timezone(utc_dt).strftime('%Y-%m-%d %H:%M:%S') else: self.date = now()
def data(gateway, name=None): try: valid_auth_code() doc = frappe.get_doc('IOT Device', gateway) if not doc.has_permission("read"): throw("has_no_permission") if not name: name = gateway if name and name != gateway: if name not in gateway_device_list(gateway): throw("no_such_device_in_gateway") cfg = gateway_device_info(gateway, name) if not cfg: throw("device_info_empty") client = redis.Redis.from_url(IOTHDBSettings.get_redis_server() + "/12", decode_responses=True) hs = client.hgetall(name) device_data = [] if "inputs" in cfg: inputs = cfg.get("inputs") for input in inputs: input_name = input.get('name') s = hs.get(input_name + "/value") if not s: device_data.append({ "name": input_name, "pv": None, "tm": '', "q": -1, "vt": input.get('vt'), "desc": input.get("desc"), "unit": input.get('unit'), }) else: val = json.loads(hs.get(input_name + "/value")) ts = datetime.datetime.utcfromtimestamp(int(val[0])) time_str = str( convert_utc_to_user_timezone(ts).replace(tzinfo=None)) device_data.append({ "name": input_name, "pv": val[1], "tm": time_str, "q": val[2], "vt": input.get('vt'), "desc": input.get("desc"), "unit": input.get('unit'), }) frappe.response.update({"ok": True, "data": device_data}) except Exception as ex: frappe.response.update({"ok": False, "error": str(ex)})
def single_device_event_type_statistics(device): valid_auth_code() companies = list_user_companies(frappe.session.user) if len(companies) == 0: return company = companies[0] inf_server = IOTHDBSettings.get_influxdb_server() if not inf_server: frappe.logger(__name__).error("InfluxDB Configuration missing in IOTHDBSettings") return query = 'SELECT sum("SYS"), sum("DEV"), sum("COMM"), sum("DATA"), sum("APP")' query = query + ' FROM "single_device_event_type_statistics" WHERE time > now() - 7d' query = query + ' AND "owner"=\'' + company + '\'' + ' AND "iot"=\'' + device + '\' GROUP BY time(1d) FILL(0)' domain = frappe.get_value("Cloud Company", company, "domain") r = requests.session().get(inf_server + "/query", params={"q": query, "db": domain + '.statistics'}, timeout=10) if r.status_code == 200: ret = r.json() if not ret: return frappe.logger(__name__).error("Got Single Device Event Type Count {0}".format(json.dumps(ret))) results = ret['results'] if not results or len(results) < 1: return series = results[0].get('series') if not series or len(series) < 1: return res = series[0].get('values') if not res: return taghis = [] for i in range(0, len(res)): hisvalue = {} # print('*********', res[i][0]) try: utc_time = datetime.datetime.strptime(res[i][0], UTC_FORMAT1) except Exception as err: pass try: utc_time = datetime.datetime.strptime(res[i][0], UTC_FORMAT2) except Exception as err: pass local_time = str(convert_utc_to_user_timezone(utc_time).replace(tzinfo=None)) hisvalue = {'name': 'single_device_event_type_statistics', 'time': local_time, 'owner': company, 'device': device} hisvalue['系统'] = res[i][1] or 0 hisvalue['设备'] = res[i][2] or 0 hisvalue['通讯'] = res[i][3] or 0 hisvalue['数据'] = res[i][4] or 0 hisvalue['应用'] = res[i][5] or 0 taghis.append(hisvalue) return taghis else: return r.txt
def add_job(j, name): if j.kwargs.get('site')==frappe.local.site: jobs.append({ 'job_name': j.kwargs.get('kwargs', {}).get('playbook_method') \ or str(j.kwargs.get('job_name')), 'status': j.status, 'queue': name, 'creation': format_datetime(convert_utc_to_user_timezone(j.created_at)), 'color': colors[j.status] }) if j.exc_info: jobs[-1]['exc_info'] = j.exc_info
def device_status_statistics(): valid_auth_code() companies = list_user_companies(frappe.session.user) if len(companies) == 0: return company = companies[0] inf_server = IOTHDBSettings.get_influxdb_server() if not inf_server: frappe.logger(__name__).error("InfluxDB Configuration missing in IOTHDBSettings") return query = 'SELECT "online", "offline" FROM "device_status_statistics" WHERE time > now() - 12h AND "owner"=\'' + company + '\'' domain = frappe.get_value("Cloud Company", company, "domain") r = requests.session().get(inf_server + "/query", params={"q": query, "db": domain + '.statistics'}, timeout=10) if r.status_code == 200: ret = r.json() if not ret: return results = ret['results'] if not results or len(results) < 1: return series = results[0].get('series') if not series or len(series) < 1: return res = series[0].get('values') if not res: return taghis = [] for i in range(0, len(res)): hisvalue = {} # print('*********', res[i][0]) try: utc_time = datetime.datetime.strptime(res[i][0], UTC_FORMAT1) except Exception as err: pass try: utc_time = datetime.datetime.strptime(res[i][0], UTC_FORMAT2) except Exception as err: pass local_time = str(convert_utc_to_user_timezone(utc_time).replace(tzinfo=None)) hisvalue = {'name': 'device_status_statistics', 'online': res[i][1], 'time': local_time, 'offline': res[i][2], 'owner': company} taghis.append(hisvalue) return taghis else: return r.txt
def upload_device_conf(conf=None): valid_auth_code() conf = conf or get_post_json_data() ts = datetime.datetime.utcfromtimestamp(int(conf.get("timestamp"))) ts = convert_utc_to_user_timezone(ts).replace(tzinfo=None) dev_conf = { "doctype": "IOT Device Conf", "device": conf.get("sn"), "timestamp": ts, "data": conf.get("data"), "hashing": conf.get("md5") } doc = frappe.get_doc(dev_conf).insert(ignore_permissions=True) return True
def add_job(job: 'Job', name: str) -> None: if job.kwargs.get('site') == frappe.local.site: job_info = { 'job_name': job.kwargs.get('kwargs', {}).get('playbook_method') or job.kwargs.get('kwargs', {}).get('job_type') or str(job.kwargs.get('job_name')), 'status': job.get_status(), 'queue': name, 'creation': format_datetime(convert_utc_to_user_timezone(job.created_at)), 'color': JOB_COLORS[job.get_status()] } if job.exc_info: job_info['exc_info'] = job.exc_info jobs.append(job_info)
def taghisdata(sn=None, vsn=None, fields=None, condition=None): vsn = vsn or sn fields = fields or "*" doc = frappe.get_doc('IOT Device', sn) doc.has_permission("read") inf_server = IOTHDBSettings.get_influxdb_server() if not inf_server: frappe.logger(__name__).error("InfluxDB Configuration missing in IOTHDBSettings") return 500 query = 'SELECT ' + fields + ' FROM "' + vsn + '"' if condition: query = query + " WHERE " + condition else: query = query + " LIMIT 1000" domain = frappe.get_value("Cloud Company", doc.company, "domain") r = requests.session().get(inf_server + "/query", params={"q": query, "db": domain}, timeout=10) if r.status_code == 200: try: res = r.json()["results"][0]['series'][0]['values'] taghis = [] for i in range(0, len(res)): hisvalue = {} #print('*********', res[i][0]) try: utc_time = datetime.datetime.strptime(res[i][0], UTC_FORMAT1) except Exception as err: pass try: utc_time = datetime.datetime.strptime(res[i][0], UTC_FORMAT2) except Exception as err: pass #local_time = utc2local(utc_time).strftime("%Y-%m-%d %H:%M:%S") local_time = str(convert_utc_to_user_timezone(utc_time).replace(tzinfo=None)) #print('#######', local_time) if res[i][2] == '1': hisvalue = {'name': res[i][1], 'value': res[i][4], 'time': local_time, 'quality': 0} elif res[i][2] == '2': hisvalue = {'name': res[i][1], 'value': res[i][3], 'time': local_time, 'quality': 0} taghis.append(hisvalue) #print(taghis) return taghis except Exception as err: return r.json()
def iot_device_data_array(sn=None, vsn=None): sn = sn or frappe.form_dict.get('sn') vsn = vsn or sn doc = frappe.get_doc('IOT Device', sn) if not doc.has_permission("read"): raise frappe.PermissionError if vsn != sn: if vsn not in iot_device_tree(sn): return "" cfg = iot_device_cfg(sn, vsn) if not cfg: return "" client = redis.Redis.from_url(IOTHDBSettings.get_redis_server() + "/12", decode_responses=True) hs = client.hgetall(vsn) data = [] if "inputs" in cfg: inputs = cfg.get("inputs") for input in inputs: input_name = input.get('name') s = hs.get(input_name + "/value") if not s: continue val = json.loads(hs.get(input_name + "/value")) ts = datetime.datetime.utcfromtimestamp(int(val[0])) timestr = str( convert_utc_to_user_timezone(ts).replace(tzinfo=None)) data.append({ "name": input_name, "pv": val[1], "tm": timestr, "q": val[2], "vt": input.get('vt'), "desc": input.get("desc") }) return data
def add_job(job: "Job", name: str) -> None: if job.kwargs.get("site") == frappe.local.site: job_info = { "job_name": job.kwargs.get("kwargs", {}).get("playbook_method") or job.kwargs.get("kwargs", {}).get("job_type") or str(job.kwargs.get("job_name")), "status": job.get_status(), "queue": name, "creation": format_datetime(convert_utc_to_user_timezone(job.created_at)), "color": JOB_COLORS[job.get_status()], } if job.exc_info: job_info["exc_info"] = job.exc_info jobs.append(job_info)
def get_timezone_naive_datetime(gcalendar_date_object): iso_date = arrow.get(gcalendar_date_object.get("dateTime")) naive = iso_date.to("UTC").naive return convert_utc_to_user_timezone(naive).strftime('%Y-%m-%d %H:%M:%S')
def get_data(from_date, to_date, node, speed, mOnOff): site_name = cstr(frappe.local.site) speed = speed.replace(",", "").strip() mOnOff = mOnOff.replace(",", "").strip() sg_speed = frappe.get_all("Signal", filters={ "parent": node, "label": speed }, fields=['ip', 'min', 'max'])[0] sg_mOnOff = frappe.get_all("Signal", filters={ "parent": node, "label": mOnOff }, fields=['ip'])[0] sg_speed_str = sg_speed.ip.replace('.', '_') + '.' + speed.replace( " ", "_") sg_mOnOff_str = sg_mOnOff.ip.replace('.', '_') + '.' + mOnOff.replace( " ", "_") es = Elasticsearch([frappe.get_conf().get("elastic_server")], scheme="https", port=443) # doc = {"size":0,"query":{"constant_score":{"filter":{"range":{"id":{"gte":from_date,"lte":to_date,"format":"yyyy-MM-dd","time_zone":"+07:00"}}}}},"aggs":{"machine_performance":{"date_histogram":{"field":"id","interval":"8h","format":"yy-MM-dd HH:mm","time_zone":"+07:00","offset":"+0h"},"aggs":{"max_output1":{"max":{"field":"192_168_1_128.PM1_Line_Speed"}}}}}} query = { "size": 0, "query": { "constant_score": { "filter": { "range": { "id": { "gte": from_date, "lte": to_date, "format": "yyyy-MM-dd", "time_zone": "+07:00" } } } } }, "aggs": { "machine_performance": { "date_histogram": { "field": "id", "interval": "1d", "format": "yy-MM-dd HH:mm", "time_zone": "+07:00", "offset": "+0h" }, "aggs": { "avg_output1": { "avg": { "field": sg_speed_str } }, "avg_pm_on1": { "avg": { "field": sg_mOnOff_str } }, "pm_output": { "bucket_script": { "buckets_path": { "tavg_output1": "avg_output1", "tavg_pm_on1": "avg_pm_on1" }, "script": "params.tavg_output1 * params.tavg_pm_on1" } } } } } } res = es.search(index=site_name, body=query) res = res['aggregations']['machine_performance']['buckets'] wss = frappe.get_doc("Work Shift Settings") t_start_shift1 = datetime.strptime(wss.shift_1_start, "%H:%M:%S").time() t_start_shift2 = datetime.strptime(wss.shift_2_start, "%H:%M:%S").time() t_start_shift3 = datetime.strptime(wss.shift_3_start, "%H:%M:%S").time() label, data, d, avail, perf = [], [], {}, {}, {} timespan = 8 * 60 # in minutes since the speed is in minutes for r in res: if r['avg_pm_on1']['value'] == None: continue utc_dt = datetime.utcfromtimestamp(r['key'] / 1000 - 1) t = convert_utc_to_user_timezone(utc_dt) date_str = t.strftime('%y-%m-%d') if date_str not in d: d[date_str] = [0, 0, 0, 0.0, 0.0, 0.0] avail[date_str] = 0 # helper to averaging availability perf[date_str] = 0 # Averaging Availibility, avail[date_str] += float(r['avg_pm_on1']['value']) perf[date_str] += float(r['avg_output1']['value']) d[date_str][3] = round(avail[date_str] * 100 / 3, 1) # 3 is the shift sum is 3 d[date_str][4] = round(perf[date_str] * 100 / sg_speed.max / 3, 1) d[date_str][5] = round( d[date_str][3] * d[date_str][4] / 100, 2 ) # OEE without defects calculation, divide by 100 to compensate one of percentage if get_time_delta( t.time(), t_start_shift1 ) < 2: #and (t_old.tm_mon != t.tm_mon or t_old.tm_mday != t.tm_mday or t_old.tm_year != t.tm_year) d[date_str][0] = int(r['pm_output']['value']) * timespan # print('shift 1: ' + str(r['pm_output']['value'])) elif get_time_delta(t.time(), t_start_shift2) < 2: d[date_str][1] = int(r['pm_output']['value']) * timespan # print('shift 2: ' + str(r['pm_output']['value'])) elif get_time_delta( t.time(), t_start_shift3) > 86280 or get_time_delta( t.time(), t_start_shift3) < 2: # 86280 = 23 hours*3600 + 58 min* 60 if date_str not in d: d[date_str] = [0, 0, 0] d[date_str][2] = int(r['pm_output']['value']) * timespan # print('shift 3: ' + str(r['pm_output']['value'])) od = collections.OrderedDict(sorted(d.items())) data = [[k] + v for k, v in od.items()] return label, data
def __init__(self, content): """Parses headers, content, attachments from given raw message. :param content: Raw message.""" self.raw = content self.mail = email.message_from_string(self.raw) self.text_content = '' self.html_content = '' self.attachments = [] self.cid_map = {} self.parse() self.set_content_and_type() self.set_subject() self.set_from() self.message_id = self.mail.get('Message-ID') self.unique_id = get_unique_id(self.mail) # gmail mailing-list compatibility # use X-Original-Sender if available, as gmail sometimes modifies the 'From' # _from_email = self.mail.get("X-Original-From") or self.mail["From"] # # self.from_email = extract_email_id(_from_email) # if self.from_email: # self.from_email = self.from_email.lower() # # #self.from_real_name = email.utils.parseaddr(_from_email)[0] # # _from_real_name = decode_header(email.utils.parseaddr(_from_email)[0]) # self.from_real_name = decode_header(email.utils.parseaddr(_from_email)[0])[0][0] or "" # # try: # if _from_real_name[0][1]: # self.from_real_name = self.from_real_name.decode(_from_real_name[0][1]) # else: # # assume that the encoding is utf-8 # self.from_real_name = self.from_real_name.decode("utf-8") # except UnicodeDecodeError,e: # print e # pass #self.from_real_name = email.Header.decode_header(email.utils.parseaddr(_from_email)[0])[0][0] self.To = self.mail.get("To") if self.To: to = u"" for name, encoding in decode_header(self.To): if encoding: to += name.decode(encoding) else: to += name self.To = to.lower() self.CC = self.mail.get("CC") if self.CC: self.CC = self.CC.lower() if self.mail["Date"]: try: utc = email.utils.mktime_tz(email.utils.parsedate_tz(self.mail["Date"])) utc_dt = datetime.datetime.utcfromtimestamp(utc) self.date = convert_utc_to_user_timezone(utc_dt).strftime('%Y-%m-%d %H:%M:%S') except: self.date = now() else: self.date = now() if self.date > now(): self.date = now()
def get_data(from_date, to_date, interval, node, signal): site_name = cstr(frappe.local.site) # print("#####################################################################") # print(site_name) signal = signal.replace(",", "").strip() sg_signal = frappe.get_all("Signal", filters={ "parent": node, "label": signal }, fields=['ip', 'min', 'max'])[0] sg_signal_str = sg_signal.ip.replace('.', '_') + '.' + signal.replace( " ", "_") es = Elasticsearch([frappe.get_conf().get("elastic_server")], scheme="https", port=443) # doc = {"size":0,"query":{"constant_score":{"filter":{"range":{"id":{"gte":from_date,"lte":to_date,"format":"yyyy-MM-dd","time_zone":"+07:00"}}}}},"aggs":{"signal":{"date_histogram":{"field":"id","interval":"8h","format":"yy-MM-dd HH:mm","time_zone":"+07:00","offset":"+0h"},"aggs":{"max_output1":{"max":{"field":"192_168_1_128.PM1_Line_signal"}}}}}} query = { "size": 0, "query": { "constant_score": { "filter": { "range": { "id": { "gte": from_date, "lte": to_date, "format": "yyyy-MM-dd", "time_zone": "+07:00" } } } } }, "aggs": { "signal": { "date_histogram": { "field": "id", "interval": interval, "format": "yy-MM-dd HH:mm", "time_zone": "+07:00", "offset": "+0h" }, "aggs": { "signal_avg": { "avg": { "field": sg_signal_str } }, "signal_max": { "max": { "field": sg_signal_str } }, "signal_min": { "min": { "field": sg_signal_str } } } } } } res = es.search(index=site_name, body=query) res = res['aggregations']['signal']['buckets'] label, data, d, avail, perf = [], [], {}, {}, {} timespan = 8 * 60 # in minutes since the signal is in minutes for r in res: if r['signal_avg']['value'] == None: continue utc_dt = datetime.utcfromtimestamp(r['key'] / 1000) t = convert_utc_to_user_timezone(utc_dt) if interval == "24h": date_str = t.strftime('%y-%m-%d') else: date_str = t.strftime('%y-%m-%d %H:%M') d[date_str] = [ round(float(r['signal_avg']['value']), 2), round(float(r['signal_min']['value']), 2), round(float(r['signal_max']['value']), 2), ] od = collections.OrderedDict(sorted(d.items())) data = [[k] + v for k, v in od.items()] return label, data