def create_alert(channel, req_data): actor_id = channel['triggers_with_actions'][0]['action']['actor_id'] logger.debug('actor_id:' + actor_id) abaco_base_url = channel['triggers_with_actions'][0]['action'][ 'abaco_base_url'] abaco_nonce = channel['triggers_with_actions'][0]['action']['nonces'] abaco_url = abaco_base_url + '/actors/v2/' + actor_id + '/messages?x-nonce=' + abaco_nonce logger.debug('abaco_url: ' + abaco_url) #prepare request for abaco headers = {'accept': 'application/json'} message_data = {} message_data['message'] = req_data message_data['message']['channel_id'] = channel['channel_id'] result = meta.fetch_instrument_index( channel["triggers_with_actions"][0]['inst_ids'][0]) message_data['message']['project_id'] = result[0]['project_id'] message_data['message']['site_id'] = result[0]['site_id'] message_data['message']['inst_id'] = result[0]['instrument_id'] cond_key = channel['triggers_with_actions'][0]['condition']['key'].split( ".") message_data['message']['var_id'] = cond_key[1] logger.debug('message_data: ' + str(message_data)) try: res = requests.post(abaco_url, json=message_data, headers=headers, verify=False) except Exception as e: msg = f"Got exception trying to post message to Abaco actor: {actor_id}; exception: {e}" raise errors.BaseTapyException(msg=msg, request=res.request) logger.debug('abaco response:' + res.text) logger.debug('abaco response status code:' + str(res.status_code)) if res.status_code == 200: abaco_res = json.loads(res.text) #TODO create alert summary and alert object with all details execution_id = abaco_res['result']['executionId'] alert = {} alert['alert_id'] = str(uuid.uuid4()) alert['channel_name'] = channel['channel_name'] alert['channel_id'] = channel['channel_id'] alert['actor_id'] = actor_id alert['execution_id'] = execution_id alert['message'] = req_data['message'] alert['create_time'] = str(datetime.datetime.utcnow()) logger.debug(alert) alert_result, msg = meta.create_alert(alert) logger.debug(alert_result) result = meta.strip_meta(alert_result) logger.debug(result) return result, msg else: msg = f"Abaco Actor: {actor_id} unable to perform the execution on the message: {message_data}. Check the Actor Status and the message" raise errors.ResourceError(msg=msg)
def post(self): body = request.json logger.debug(body) if 'inst_id' in body: result = meta.fetch_instrument_index(body['inst_id']) logger.debug(result) if len(result) > 0: #check SK logger.debug("YES") logger.debug(result[0]['chords_inst_id']) resp = chords.create_measurement(result[0]['chords_inst_id'], body) logger.debug(resp) return resp
def convert_conditions_to_vars(req_body): logger.debug("CONVERTING condition to vars ...") triggers_with_actions = {} triggers_with_actions = req_body['triggers_with_actions'][0] inst_chords_id = {} inst_var_chords_ids = {} # inst_id.var_id cond_key = [] cond_key = triggers_with_actions['condition']['key'].split(".") # fetch chords id for the instrument result = meta.fetch_instrument_index(cond_key[0]) logger.debug(result) if len(result) > 0: logger.debug(" chords instrument_ id: " + str(result[0]['chords_inst_id'])) inst_chords_id[cond_key[0]] = result[0]['chords_inst_id'] # fetch chords id for the variable result_var, message = meta.get_variable(result[0]['project_id'], result[0]['site_id'], result[0]['instrument_id'], cond_key[1]) logger.debug("variable chords id : " + str(result_var['chords_id'])) inst_var_chords_ids[triggers_with_actions['condition'] ['key']] = result_var['chords_id'] # create vars dictionary vars = {} vars['crit'] = {} vars['crit']['type'] = "lambda" # value is of the form : "value":"(\"var\" == '1') AND (\"value\" > 91.0)" vars['crit']['value'] = "(\"var\" == '" + str( inst_var_chords_ids[triggers_with_actions['condition']['key']]) + "') AND (\"value\"" + \ triggers_with_actions['condition']['operator'] + str( triggers_with_actions['condition']['val']) + ")" # channel id information is added for later processing of the alerts vars["channel_id"] = {"type": "string", "value": req_body['channel_id']} return vars
def post(self): body = request.json logger.debug(body) instrument = {} if 'inst_id' in body: result = meta.fetch_instrument_index(body['inst_id']) logger.debug(result) if len(result) > 0: logger.debug(result[0]['chords_inst_id']) #get isntrument site_result, site_msg = meta.get_site(result[0]['project_id'], result[0]['site_id']) if 'instruments' in site_result: for inst in site_result['instruments']: if inst['inst_id'] == body['inst_id']: instrument = inst logger.debug(site_result) #resp = chords.create_measurement(result[0]['chords_inst_id'], body) resp = influx.write_measurements(site_result['chords_id'], instrument, body) logger.debug(resp) return utils.ok(result=[], msg="Measurements Saved")
def get(self, instrument_id): result = [] msg = "" logger.debug("top of GET /measurements") #inst_result = meta.get_instrument(project_id,site_id,instrument_id) inst_index = meta.fetch_instrument_index(instrument_id) logger.debug(inst_index) if len(inst_index[0]) > 0: site, msg = meta.get_site(inst_index[0]['project_id'], inst_index[0]['site_id']) logger.debug("in IF") js = influx.query_measurments([{ "inst": str(inst_index[0]['chords_inst_id']) }, { "start_date": request.args.get('start_date') }, { "end_date": request.args.get('end_date') }]) logger.debug(js) if len(js) > 1 and len(js['series']) > 0: df = pd.DataFrame(js['series'][0]['values'], columns=js['series'][0]['columns']) pv = df.pivot(index='time', columns='var', values=['value']) df1 = pv df1.columns = df1.columns.droplevel(0) df1 = df1.reset_index().rename_axis(None, axis=1) replace_cols = {} logger.debug(site) for inst in site['instruments']: logger.debug(inst) if inst['inst_id'] == instrument_id: instrument = inst logger.debug(inst) for v in inst['variables']: logger.debug(v) replace_cols[str(v['chords_id'])] = v['var_id'] logger.debug(replace_cols) df1.rename(columns=replace_cols, inplace=True) df1.set_index('time', inplace=True) if request.args.get('format') == "csv": logger.debug("CSV") # csv_response = Response(result, mimetype="text/csv") # si = StringIO.StringIO() #cw = csv.write(si) # cw.writerows(csvList) output = make_response(df1.to_csv()) output.headers[ "Content-Disposition"] = "attachment; filename=export.csv" output.headers["Content-type"] = "text/csv" return output else: result = json.loads(df1.to_json()) result['measurements_in_file'] = len(df1.index) result['instrument'] = instrument site.pop('instruments', None) result['site'] = meta.strip_meta(site) return utils.ok(result=result, msg="Measurements Found") else: return utils.ok(result=[], msg="No Measurements Founds")