class Delete(Common): """Class for Delete""" # INITIALIZE def __init__(self): """The Constructor for Delete class""" self.postgres = PostgreSQL() super(Delete, self).__init__() def delete_image(self): """ This API is for Deleting Images --- tags: - Vessel produces: - application/json parameters: - name: token in: header description: Token required: true type: string - name: userid in: header description: User ID required: true type: string - name: query in: body description: Vessel IDs required: true schema: id: Delete Vessel IDs images properties: vessel_ids: types: array example: [] responses: 500: description: Error 200: description: Delete Vessel Images """ data = {} # GET JSON REQUEST query_json = request.get_json(force=True) # GET HEADER token = request.headers.get('token') userid = request.headers.get('userid') # GET QUERY vessel_ids = query_json["vessel_ids"] # CHECK TOKEN token_validation = self.validate_token(token, userid) if not token_validation: data["alert"] = "Invalid Token" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) if not self.delete_vessel_images(vessel_ids): data["alert"] = "Please check your query! | Failed to Delete Image" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) data['message'] = "Images successfully Deleted!" data['status'] = "ok" return self.return_data(data) def delete_vessel_images(self, vessel_ids): """Delete Vessel Images""" for vessel_id in vessel_ids: # VERIFY IF IMAGE EXISTS FOR VESSEL sql_str = "SELECT * FROM vessel_image" sql_str += " WHERE vessel_id='{0}'".format(vessel_id) sql_str += " AND status = 'active'" vessel = self.postgres.query_fetch_one(sql_str) if vessel: # INIT CONDITION conditions = [] # CONDITION FOR QUERY conditions.append({ "col": "vessel_image_id", "con": "=", "val": vessel['vessel_image_id'] }) update_column = {} update_column['update_on'] = time.time() update_column['status'] = "inactive" if self.postgres.update('vessel_image', update_column, conditions): return 1 return 0 return 0
class DeviceList(Common): """Class for DeviceImages""" # INITIALIZE def __init__(self): """The Constructor for DeviceImages class""" self.postgres = PostgreSQL() self.couch_query = Queries() self.aws3 = AwsS3() super(DeviceList, self).__init__() def get_list(self): """ This API is for Getting All Vessel Device List --- tags: - Devices produces: - application/json parameters: - name: token in: header description: Token required: true type: string - name: userid in: header description: User ID required: true type: string - name: limit in: query description: Limit required: true type: integer - name: page in: query description: Page required: true type: integer responses: 500: description: Error 200: description: Vessel Device List """ data = {} # GET DATA token = request.headers.get('token') userid = request.headers.get('userid') limit = int(request.args.get('limit')) page = int(request.args.get('page')) # CHECK TOKEN token_validation = self.validate_token(token, userid) if not token_validation: data['alert'] = "Invalid Token" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) # COUNT sql_str = "SELECT COUNT(*) FROM vessel" count = self.postgres.query_fetch_one(sql_str) total_rows = count['count'] offset = int((page - 1) * limit) # DATA sql_str = "SELECT * FROM vessel LIMIT {0} OFFSET {1} ".format( limit, offset) vessels = self.postgres.query_fetch_all(sql_str) rows = [] if vessels: vessel_ids = [x['vessel_id'] for x in vessels] for vessel_id in vessel_ids: vessel_name = self.get_vessel_name(vessel_id) devices = self.get_vessel_devices(vessel_id) rows.append({ "vessel_id": vessel_id, "vessel_name": vessel_name, "devices": devices }) total_page = int(math.ceil(int(total_rows - 1) / limit)) + 1 data['data'] = rows data['total_page'] = total_page data['limit'] = int(limit) data['page'] = int(page) data['total_rows'] = total_rows data['status'] = 'ok' return self.return_data(data) def get_vessel_name(self, vessel_id): """ Return Vessel Name """ assert vessel_id, "Vessel ID is required." values = self.couch_query.get_complete_values(vessel_id, "PARAMETERS") if values: vessel_name = values['PARAMETERS']['INFO']['VESSELNAME'] else: sql_str = "SELECT vessel_name FROM vessel WHERE vessel_id='{0}'".format( vessel_id) vname = self.postgres.query_fetch_one(sql_str) vessel_name = "" if vname: vessel_name = vname['vessel_name'] return vessel_name def get_vessel_devices(self, vessel_id): """ Return Vessel Devices """ assert vessel_id, "Vessel ID is required." sql_str = "SELECT device_id, device FROM device" sql_str += " WHERE vessel_id = '{0}'".format(vessel_id) sql_str += " AND device NOT IN ('PARAMETERS', 'COREVALUES'," sql_str += " 'FAILOVER', 'NTWCONF', 'NTWPERF1')" devices = self.postgres.query_fetch_all(sql_str) if devices: for device in devices: device['image_url'] = self.aws3.get_device_image( vessel_id, device['device_id']) return devices
def __init__(self): """The Constructor for CalculateOperatorResult class""" self.postgres = PostgreSQL() self.couch_query = Queries() super(CalculateOperatorResult, self).__init__()
class UpdateCompany(Common): """Class for UpdateCompany""" # INITIALIZE def __init__(self): """The Constructor for UpdateCompany class""" self.postgres = PostgreSQL() self._couch_db = CouchDatabase() self.couch_query = Queries() self.log = Log() super(UpdateCompany, self).__init__() def update_company(self): """ This API is for Updating Company --- tags: - Company produces: - application/json parameters: - name: token in: header description: Token required: true type: string - name: userid in: header description: User ID required: true type: string - name: query in: body description: Updating Company required: true schema: id: Updating Company properties: company_id: type: string company_name: type: string vessel_ids: types: array example: [] responses: 500: description: Error 200: description: Updating Company """ data = {} # GET JSON REQUEST query_json = request.get_json(force=True) # GET HEADER token = request.headers.get('token') userid = request.headers.get('userid') # GET COMPANY_ID company_id = query_json.get('company_id') vessel_ids = query_json.get('vessel_ids') del query_json['vessel_ids'] # CHECK TOKEN token_validation = self.validate_token(token, userid) if not token_validation: data["alert"] = "Invalid Token" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) vessel_ids = self.check_vessel_existence(vessel_ids) if not self.update_companies(query_json): data["alert"] = "Please check your query! , Company update FAILED." data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) # GET CURRENT VESSELS ID # current_vessels = self.get_current_vessels(company_id) # removed_vessel, added_vessel = self.set_vessels_data(current_vessels, vessel_ids) self.get_users(company_id) # self.update_users_vessel(user_ids, removed_vessel, added_vessel) # DELETE CURRENT VESSELS if not self.delete_current_vessels(company_id): data["alert"] = "Problem in updating the vessels." data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) if len(vessel_ids) > 0: #INSERT NEW VESSELS if not self.add_vessels(company_id, vessel_ids): data["alert"] = "Problem in updating the vessels." data['status'] = "Failed" # RETURN ALERT return self.return_data(data) data['message'] = "Company successfully updated!" data['status'] = "ok" return self.return_data(data) def update_companies(self, query_json): """Update Companies""" query_json['update_on'] = time.time() conditions = [] conditions.append({ "col": "company_id", "con": "=", "val": query_json['company_id'] }) data = self.remove_key(query_json, "company_id") if self.postgres.update('company', data, conditions): return 1 return 0 def check_vessel_existence(self, vessel_ids): """Check Vessel Existence""" ids = [] for _id in vessel_ids: res = self.couch_query.get_by_id(_id) if res.get('_id', False): ids.append(_id) return ids def delete_current_vessels(self, company_id): """Delete Current Vessels""" status = False try: conditions = [] conditions.append({ "col": "company_id", "con": "=", "val": company_id }) if self.postgres.delete('company_vessels', conditions): status = True except Exception as error: self.log.critical(error) status = False return status def add_vessels(self, company_id, vessel_ids): """Add Vessels""" status = '' for vessel_id in vessel_ids: data = {} data['company_id'] = company_id data['vessel_id'] = vessel_id try: self.postgres.insert('company_vessels', data, 'company_id') status = 'ok' except Exception as err: self.log.critical(err) status = 'Failed' return status # def get_current_vessels(self, company_id): # sql_str = "SELECT * FROM company_vessels" # sql_str += " WHERE company_id={0}".format(company_id) # rows = self.postgres.query_fetch_all(sql_str) # if rows: # return [x['vessel_id'] for x in rows] # return [] # def set_vessels_data(self, current_vessels, vessel_ids): # current_vessels = set(current_vessels) # vessel_ids = set(vessel_ids) # rep_current_vessels = current_vessels.copy() # rep_vessel_ids = vessel_ids.copy() # rep_vessel_ids.difference_update(current_vessels) # rep_current_vessels.difference_update(vessel_ids) # return [rep_current_vessels, rep_vessel_ids] def get_users(self, company_id): """Get Users""" sql_str = "SELECT * FROM account_company WHERE company_id={0}".format(company_id) rows = self.postgres.query_fetch_all(sql_str) if rows: for row in rows: # INIT CONDITION conditions = [] # CONDITION FOR QUERY conditions.append({ "col": "id", "con": "=", "val": row['account_id'] }) updates = {} updates['vessel_vpn_state'] = 'pending' # UPDATE VESSEL VPN STATE self.postgres.update('account', updates, conditions) return [x['account_id'] for x in rows] return []
def __init__(self): """The Constructor for Permission class""" self.postgresql_query = PostgreSQL() super(Permission, self).__init__()
class EmailVesselInfo(Common): """Class for EmailVesselInfo""" # INITIALIZE def __init__(self): """The Constructor for EmailVesselInfo class""" self.postgres = PostgreSQL() self.couch_query = Queries() self.epoch_default = 26763 super(EmailVesselInfo, self).__init__() def email_vessel_info(self): """Email Vessel Information""" # DONE EMAIL ID's email_ids = self.get_done_email_ids() # GET ALL ID's TO EMAIL email_schedules = self.get_email_schedule_ids(email_ids) for email_schedule in email_schedules or []: # SEND EMAIL self.email_sender(email_schedule) return 1 def get_done_email_ids(self): """Return Done Email IDs""" # GET CURRENT UTC DATE utc_date = datetime.utcnow().strftime("%Y%m%d") # INIT SQL QUERY sql_str = "SELECT email_schedule_id FROM email_log" sql_str += " WHERE date_today=" + str(utc_date) # FETCH ALL res = self.postgres.query_fetch_all(sql_str) # RETURN return res def get_email_schedule_ids(self, email_ids): """Return Email Schedule IDs""" # SET TIME FORMAT time_format = '%H:%M:%S' # SET GET UTC DATE utc_date = datetime.now(tz=pytz.utc) # GET UTC TIME new_time = utc_date.strftime(time_format) # SPLIT TIME new_time = new_time.split(":") # SET DELTA TIME tdel = timedelta(hours=int(new_time[0]), minutes=int(new_time[1]), seconds=int(new_time[2])) # GET TOTAL SECONDS td_seconds = tdel.total_seconds() # INIT SQL QUERY cols = 'email_vessel_id, email_schedule_id, schedule' sql_str = "SELECT " + cols + " FROM email_schedule" sql_str += " WHERE utc_time<= " + str(td_seconds) # CHECK IF ANY OLD SEND EMAIL ID's if email_ids: email_ids = [x['email_schedule_id'] for x in email_ids] # CEHCK LENTH OF OLD SEND EMAIL ID's if len(email_ids) == 1: # ADD CONDITION FOR SQL QUERY sql_str += " AND email_schedule_id != " + str(email_ids[0]) else: # ADD CONDITION FOR SQL QUERY sql_str += " AND email_schedule_id NOT IN " + str( tuple(email_ids)) # FETCH ALL res = self.postgres.query_fetch_all(sql_str) # RETURN return res def email_sender(self, email_schedule): """Email Sender""" # INIT SQL QUERY cols = 'vessel_id, email' sql_str = "SELECT " + cols + " FROM email_vessel" sql_str += " WHERE mail_enable=true AND email_vessel_id=" sql_str += str(email_schedule['email_vessel_id']) # FETCH ONE res = self.postgres.query_fetch_one(sql_str) if res: vessel_id = res['vessel_id'] sql_str = "SELECT * FROM report_temp WHERE vessel_id = '" + str( vessel_id) + "'" report_temp_res = self.postgres.query_fetch_one(sql_str) if report_temp_res: report_data = report_temp_res['report_data'] report_data = report_data.splitlines() else: report_data = "General Info: \n" report_data += """VESSEL: *["PARAMETERS"]["INFO"]["VESSELNAME"]*\n""" report_data += "TIME: *TIMESTAMP*\n" report_data = report_data.split("\\n") pattern = r"\*(.*)\*" last_update = self.get_last_update_with_option(vessel_id) epoch_time = int(time.time()) final_data = [] for line in report_data: match = None temp_data = {} str_line = line for match in re.finditer(pattern, line): start_match = match.start() end_match = match.end() pat = r"[a-zA-Z0-9_ ]+" device = re.findall(pat, line[start_match:end_match]) if device[0].upper() in ['TIME', 'TIMESTAMP']: pass else: temp_data['label'] = str_line.split(":")[0] values = self.couch_query.get_complete_values( vessel_id, device[0]) if values: try: temp_data['value'] = values[device[0]][ device[1]][device[2]] except: temp_data['value'] = "" final_data.append(temp_data) else: temp_data['value'] = "" final_data.append(temp_data) if str_line.split(":")[0].upper() == 'STATUS': status = "Offline" if self.check_time_lapse(epoch_time, last_update) == "green": status = "Online" temp_data['label'] = 'STATUS' temp_data['value'] = status final_data.append(temp_data) elif match is None: temp_data['label'] = str_line.split(":")[0] temp_data['value'] = "" final_data.append(temp_data) temp_message = copy.deepcopy(final_data) utc = datetime.utcfromtimestamp(last_update).strftime( '%A, %d %B %Y %I:%M:%S %p') last_date = utc + " UTC" values = self.couch_query.get_complete_values( vessel_id, "PARAMETERS") email = res['email'] email_temp = VesselReport() emailer = Email() message = email_temp.vessel_report_temp(final_data, last_date) subject = "Scheduled Reporting Event - " + values['PARAMETERS'][ 'INFO']['VESSELNAME'] emailer.send_email(email, message, subject) date_today = datetime.utcnow().strftime("%Y%m%d") # INIT NEW VESSEL EMAIL temp = {} temp['email_schedule_id'] = email_schedule['email_schedule_id'] temp['email_vessel_id'] = email_schedule['email_vessel_id'] temp['message'] = json.dumps(temp_message) temp['data_date'] = last_date temp['date_today'] = date_today temp['update_on'] = time.time() temp['created_on'] = time.time() # INSERT NEW VESSEL EMAIL self.postgres.insert('email_log', temp) else: return 0 return 1 # LATE UPDATE WITH OPTION def get_last_update_with_option(self, vessel_id): """Return Last Updated with Option""" values = self.couch_query.get_complete_values(vessel_id, "COREVALUES", flag='one_doc') if values: return values['timestamp'] return 0 def get_device(self, devices, pattern): """Return Device""" data = [] for device in devices: if re.findall(r'' + pattern + r'\d', device['doc']['device']): data.append(device['doc']) data = sorted(data, key=lambda i: i['device']) return data
class Upload(Common): """Class for Vessels""" # INITIALIZE def __init__(self): """The Constructor for Vessel Upload class""" self.couch_query = Queries() self.postgres = PostgreSQL() self.aws3 = AwsS3() super(Upload, self).__init__() # GET VESSEL FUNCTION def file_upload(self): """ This API is for Uploading Vessel File --- tags: - Vessel produces: - application/json parameters: - name: token in: header description: Token required: true type: string - name: userid in: header description: User ID required: true type: string - name: vessel_id in: query description: Vessel ID required: true type: string responses: 500: description: Error 200: description: Vessel File Upload """ # INIT DATA data = {} # VESSEL ID vessel_id = request.args.get('vessel_id') # # GET DATA token = request.headers.get('token') userid = request.headers.get('userid') # CHECK TOKEN token_validation = self.validate_token(token, userid) if not token_validation: data["alert"] = "Invalid Token" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) # RH_<VesselIMO>_<ImageID> parameters = self.couch_query.get_complete_values( vessel_id, "PARAMETERS" ) # VESSEL IMO vessel_imo = parameters['PARAMETERS']['INFO']['IMO'] file_upload = [] filenames = request.files.getlist('upfile') for filename in filenames: try: file_name = filename.filename # ext = file_name.split(".")[-1] # if not self.allowed_file_type(file_name): # data["alert"] = "File Type Not Allowed!" # data['status'] = 'Failed' # return self.return_data(data) except ImportError: data["alert"] = "No image!" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) file_name = self.rename_file(vessel_id, file_name) vimg_data = {} vimg_data['vessel_id'] = vessel_id vimg_data['vessel_imo'] = vessel_imo vimg_data['file_name'] = file_name vimg_data['status'] = "active" vimg_data['created_on'] = time.time() # ADD FILE TO VESSEL FILE TABLE self.postgres.insert('vessel_file', vimg_data, 'vessel_file_id') # FILE NAME # file_name_upload = str(vessel_file_id) + "." + ext # upload_file = 'VesselFiles/' + "RH_" + vessel_imo + "_" + file_name_upload upload_file = 'VesselFiles/' + vessel_imo +"/" + file_name body = request.files['upfile'] # SAVE TO S3 url = "" if self.aws3.save_file(upload_file, body): url = self.aws3.get_url(upload_file) file_upload.append({ "filename": file_name, "url": url }) data["status"] = "ok" data["data"] = file_upload # RETURN return self.return_data(data) def allowed_file_type(self, filename): """ Check Allowed File Extension """ allowed_extensions = set(['txt', 'pdf']) return '.' in filename and filename.rsplit('.', 1)[1].lower() in allowed_extensions def rename_file(self, vessel_id, filename): """ Rename File """ sql_str = "SELECT * FROM vessel_file" sql_str += " WHERE vessel_id='{0}'".format(vessel_id) sql_str += " AND file_name='{0}'".format(filename) vessel_file = self.postgres.query_fetch_one(sql_str) if vessel_file: new_name = self.file_replace(vessel_file['file_name']) return self.rename_file(vessel_id, new_name) return filename
class AwsS3(Common): """Class for AwsS3""" # INITIALIZE def __init__(self): """The Constructor for S3 class""" self.postgres = PostgreSQL() # INIT CONFIG self.config = ConfigParser() # CONFIG FILE self.config.read("config/config.cfg") super(AwsS3, self).__init__() def get_vessel_image(self, vessel_id): """ Return Vessel Image URL""" assert vessel_id, "Vessel ID is required." # DATA sql_str = "SELECT * FROM vessel_image" sql_str += " WHERE vessel_id='{0}'".format(vessel_id) sql_str += " AND status = 'active'" vessel = self.postgres.query_fetch_one(sql_str) image_url = "" if vessel: filename = vessel['image_name'] ext = filename.split(".")[-1] # IMAGE FILE NAME image_name = str(vessel['vessel_image_id']) + "." + ext key_file = 'Vessel/' + "RH_" + vessel[ 'vessel_imo'] + "_" + image_name image_url = self.get_url(key_file) return image_url def get_vessel_file(self, vessel_id, file_name): """ Return Vessel File URL""" assert vessel_id, "Vessel ID is required." # DATA sql_str = "SELECT * FROM vessel_file" sql_str += " WHERE vessel_id='{0}'".format(vessel_id) sql_str += " AND file_name ='{0}'".format(file_name) sql_str += " AND status = 'active'" vessel = self.postgres.query_fetch_one(sql_str) url = "" if vessel: filename = vessel['file_name'] # ext = filename.split(".")[-1] # IMAGE FILE NAME # fname = str(vessel['vessel_file_id']) + "." + ext # key_file = 'VesselFiles/' + "RH_" + vessel['vessel_imo'] + "_" + fname key_file = 'VesselFiles/' + vessel['vessel_imo'] + "/" + filename url = self.get_url(key_file) return url def get_url(self, key): """ Return S3 URL """ assert key, "Key is required." # AWS ACCESS aws_access_key_id = config_section_parser(self.config, "AWS")['aws_access_key_id'] aws_secret_access_key = config_section_parser( self.config, "AWS")['aws_secret_access_key'] region_name = config_section_parser(self.config, "AWS")['region_name'] # CONNECT TO S3 s3_client = boto3.client('s3', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=region_name) s3_params = { 'Bucket': config_section_parser(self.config, "AWS")['bucket'], 'Key': key } expiration = config_section_parser(self.config, "AWS")['image_expires'] url = s3_client.generate_presigned_url('get_object', Params=s3_params, ExpiresIn=expiration, HttpMethod='GET') return url def save_file(self, key_file, body_request): """ Save File to S3 Bucket """ # AWS ACCESS aws_access_key_id = config_section_parser(self.config, "AWS")['aws_access_key_id'] aws_secret_access_key = config_section_parser( self.config, "AWS")['aws_secret_access_key'] region_name = config_section_parser(self.config, "AWS")['region_name'] # CONNECT TO S3 s3_resource = boto3.resource( 's3', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=region_name) # SAVE TO S3 save_to_bucket = s3_resource.Bucket('rh.fileserver').put_object( Key=key_file, Body=body_request) if save_to_bucket: return 1 return 0 def get_device_image(self, vessel_id, device_id): """ Return Device Image URL""" assert vessel_id, "Vessel ID is required." assert device_id, "Device ID is required." # DATA sql_str = "SELECT * FROM device_image" sql_str += " WHERE vessel_id='{0}'".format(vessel_id) sql_str += " AND device_id='{0}'".format(device_id) sql_str += " AND status = 'active'" device = self.postgres.query_fetch_one(sql_str) image_url = "" if device: filename = device['image_name'] ext = filename.split(".")[-1] # IMAGE FILE NAME image_name = str(device['device_image_id']) + "." + ext key_file = 'Device/' + "RH_" + device['vessel_imo'] + "_" + str( device_id) + image_name image_url = self.get_url(key_file) return image_url
def __init__(self): """The Constructor for NoonReport class""" self.postgresql_query = PostgreSQL() super(NoonReport, self).__init__()
class Graph(Common): """Class for Graph""" # INITIALIZE def __init__(self): """The Constructor for Graph class""" self.postgres = PostgreSQL() self._couch_db = CouchDatabase() self.couch_query = Queries() super(Graph, self).__init__() def graph(self): """ This API is for Getting Graph --- tags: - Graph produces: - application/json parameters: - name: token in: header description: Token required: true type: string - name: userid in: header description: User ID required: true type: string - name: vessel_id in: query description: Vessel ID required: true type: string - name: device_id in: query description: Device ID required: true type: string - name: hours in: query description: Hours required: false type: integer - name: keys in: query description: Keys required: true type: string - name: combine in: query description: combine required: true type: boolean - name: start_time in: query description: Epoch start required: false type: string - name: end_time in: query description: Epoch end required: false type: string responses: 500: description: Error 200: description: Vessel Device Info """ data = {} # VESSEL ID vessel_id = request.args.get('vessel_id') device_id = request.args.get('device_id') keys = request.args.get('keys') combine = request.args.get('combine') hours = request.args.get('hours') start_time = request.args.get('start_time') end_time = request.args.get('end_time') # GET DATA token = request.headers.get('token') userid = request.headers.get('userid') # CHECK TOKEN token_validation = self.validate_token(token, userid) if not token_validation: data["alert"] = "Invalid Token" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) if not vessel_id: data["alert"] = "Please complete parameters!" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) if device_id in ['COREVALUES', 'FAILOVER', 'NTWPERF1']: all_devices = self.couch_query.get_all_devices(vessel_id) for dev in all_devices: if dev['doc']['device'] == device_id: device = dev['doc'] break else: device = self.couch_query.get_by_id(device_id) # hours_ago_epoch_ts = int(hours_ago.timestamp()) # current_time = int(ctime.timestamp()) if not start_time and not end_time: ctime = datetime.datetime.now() hours_ago = ctime - datetime.timedelta(hours=int(hours)) start_time = int(hours_ago.timestamp()) end_time = int(ctime.timestamp()) values = self.couch_query.get_complete_values( vessel_id, device['device'], str(start_time), str(end_time), 'all' ) # temp_available_options = [] # str_available_options = [] # available_options = [] opt = [] parameters = self.couch_query.get_complete_values( vessel_id, "PARAMETERS" ) if device['device'] == "COREVALUES": device_type = "COREVALUES" elif device['device'] == "NTWPERF1": device_type = "NTWPERF1" elif device['device'] == 'FAILOVER': device_type = 'FAILOVER' else: device_type = parameters['PARAMETERS'][device['device']]['TYPE'] if combine.upper() == 'TRUE': final_data = [] for value in values: timestamp = float(value['timestamp']) dev_value = value['value'][device['device']] # if re.findall(r'VDR\d', device['device']): # print("ALP ", dev_value) # pass if re.findall(r'NMEA\d', device['device']): nmea_data = self.get_nmea_data(dev_value) opt = list(set(self.get_graph_options(nmea_data))) options = self.get_device_options(nmea_data) if keys: final_data.append(self.set_values(options, keys, timestamp)) else: opt = list(set(self.get_graph_options(dev_value))) options = self.get_device_options(dev_value) if keys: final_data.append(self.set_values(options, keys, timestamp)) final_data = sorted(final_data, key=lambda i: i['name']) for fdata in final_data: fdata['name'] = time.strftime('%d/%m/%Y %H:%M:%S', time.localtime( float(fdata['name']))) data['data'] = final_data data['statistics'] = self.get_stat(final_data, flag=True) else: # FINAL DATA fnl_data = {} # SET DYNAMIC VARIABLE if keys: for key in keys.split(","): fnl_data[key] = [] for value in values: timestamp = float(value['timestamp']) dev_value = value['value'][device['device']] # if re.findall(r'VDR\d', device['device']): # print("ALP ", dev_value) # pass if re.findall(r'NMEA\d', device['device']): nmea_data = self.get_nmea_data(dev_value) opt = list(set(self.get_graph_options(nmea_data))) options = self.get_device_options(nmea_data) for key in keys.split(","): fnl_data[key].append(self.set_values(options, key, timestamp)) else: opt = list(set(self.get_graph_options(dev_value))) options = self.get_device_options(dev_value) for key in keys.split(","): fnl_data[key].append(self.set_values(options, key, timestamp)) for key in keys.split(","): fnl_data[key] = sorted(fnl_data[key], key=lambda i: i['name']) for fdata in fnl_data[key]: fdata['name'] = time.strftime('%d/%m/%Y %H:%M:%S', time.localtime(float( fdata['name']))) data['data'] = fnl_data data['statistics'] = self.get_stat(fnl_data, flag=False) # SET SQL QUERY sql_str = "SELECT * FROM selected_label" sql_str += " WHERE device_type='{0}' and select_type != 'combine'".format(device_type) default_selected = self.postgres.query_fetch_all(sql_str) # SET SQL QUERY sql_str = "SELECT * FROM selected_label" sql_str += " WHERE device_type='{0}' and select_type = 'combine'".format(device_type) default_selected_combine = self.postgres.query_fetch_all(sql_str) data['default_selected_combine'] = self.set_select_values(default_selected_combine) data['default_selected'] = self.set_select_values(default_selected) data['available_options'] = self.format_filter(opt) data['status'] = 'ok' return self.return_data(data) def set_values(self, options, keys, timestamp): """Set Values""" temp_data = {} for key in keys.split(","): if key in options.keys(): if key in ['AntennaStatus', 'TxMode']: if options[key].upper() in ['TRACKING', 'ON']: temp_data[key] = 1 else: temp_data[key] = 0 elif key == 'error': if options[key] is True: temp_data[key] = 1 else: temp_data[key] = 0 else: temp_data[key] = options[key] else: temp_data[key] = None temp_data['name'] = timestamp return temp_data def get_available_options(self, options, temp_available_options, str_available_options): """Return Available Options""" # print("Options --------------------------- >>") # print(options) # print("Options --------------------------- >>") available_options = [] for opt in options.keys(): if opt not in temp_available_options and opt not in str_available_options: if not options[opt]: continue if type(options[opt]) == 'str': if options[opt].isdigit(): options[opt] = float(options[opt]) if self.isfloat(options[opt]) or self.isint(options[opt]) or opt in [ 'AntennaStatus', 'TxMode']: temp_available_options.append(opt) temp = {} temp['value'] = opt temp['label'] = opt available_options.append(temp) # else: # str_available_options.append(op) return available_options def get_nmea_data(self, datas): """ Return NMEA Data """ temp_data = {} # keys = [data for data in datas.keys()] # No need for key in list(datas.keys()): temp_data.update(datas[key]) return temp_data def get_graph_options(self, datas): """Return Options for Graph""" options = [] # modules = [data for data in datas] # No need for mod in list(datas): options += self.get_opt_available(datas[mod]) return options def get_opt_available(self, options): """Return Available Options""" available_options = [] for opt in options.keys(): if not options[opt]: continue if type(options[opt]) == 'str': if options[opt].isdigit(): options[opt] = float(options[opt]) if self.isfloat(options[opt]) or self.isint(options[opt]) or opt in [ 'AntennaStatus', 'TxMode']: available_options.append(opt) return available_options def get_device_options(self, datas): """ Return All Device Options Data """ tmp = [] temp = {} # modules = [data for data in datas] # No need for mod in list(datas): tmp.append(datas[mod]) for dta in tmp: temp.update(dta) return temp def set_select_values(self, datas): """Set Select Values""" final_data = [] for data in datas: temp = {} temp['value'] = data['label'] temp['label'] = data['label'] final_data.append(temp) return final_data def get_stat(self, datas, flag): """ Return Min and Max """ temp = {} tmp = [] if datas: if flag is True: temp_data = {} for data in datas[0].keys(): if data != "name": temp_data[data] = tuple(dta[data] for dta in datas) # keys = [data for data in temp_data.keys()] # for key in keys: # if all(res is None for res in temp_data[key]): # pass # else: # data = self.get_stat_range(temp_data, key) # temp.update({ # key : data # }) for dta in temp_data.values(): tmp += dta if tmp: tmp = [dta for dta in tmp if dta is not None] tmp = sorted(tmp, key=float) data = self.get_stat_range(tmp) temp.update({ "combine" : data }) else: # keys = [data for data in datas.keys()] for key in list(datas.keys()): result = [data[key] for data in datas[key] if data[key] is not None] result = sorted(result, key=float) # if all(res is None for res in result): # pass # else: if result: data = self.get_stat_range(result) temp.update({ key : data }) return temp def get_stat_range(self, data): """ Return Stat Range """ tmp = {} tmp['min'] = data[0] tmp['max'] = data[-1] # tmp['min'] = min(data) # tmp['max'] = max(data) return tmp
def __init__(self): """The Constructor for Graph class""" self.postgres = PostgreSQL() self._couch_db = CouchDatabase() self.couch_query = Queries() super(Graph, self).__init__()
class BlockageData(Common): """Class for BlockageData""" def __init__(self): """The Constructor for BlockageData class""" self.blocks = Blockages() self.postgres = PostgreSQL() self.couch_query = Queries() self.epoch_default = 26763 super(BlockageData, self).__init__() def run(self): """Run Blockage Data""" current_date = self.epoch_day(time.time()) epoch_time = self.days_update(current_date) epoch_time -= 1 datas = self.get_device_type() for data in datas: self.add_blockage_data(data['vessel_id'], data['device_id'], data['device'], data['device_type'], epoch_time) def get_device_type(self): """ Return Device Type """ sql_str = "SELECT DISTINCT vessel_id, device_id, device, device_type" sql_str += " FROM device WHERE device_type IN" sql_str += " ('Intellian_V100_E2S', 'Intellian_V110_E2S'," sql_str += " 'Intellian_V80_IARM', 'Intellian_V100_IARM'," sql_str += " 'Intellian_V100', 'Intellian_V80_E2S'," sql_str += " 'Sailor_900', 'Cobham_500')" # sql_str += " AND vessel_id IN ('be942b8ee9e97f4962258c727c0006bd')" device_type = self.postgres.query_fetch_all(sql_str) return device_type def get_device_data(self, vessel_id, device, start, end): """ Return Device Data """ values = self.couch_query.get_complete_values( vessel_id, device, start=str(start), end=str(end), flag='all') if values: return values return 0 def add_blockage_data(self, vessel_id, device_id, device, dtype, epoch_time): """ Insert to Blockage Data """ sql_str = "SELECT epoch_date FROM blockage_data WHERE " sql_str += "vessel_id='{0}' AND ".format(vessel_id) sql_str += "device_id='{0}' ".format(device_id) sql_str += "ORDER BY epoch_date DESC LIMIT 1" epoch_date = self.postgres.query_fetch_one(sql_str) timestamp = 0 if epoch_date: timestamp = epoch_date['epoch_date'] else: self.log.low("NEW BLOCKAGE DATA!") values = self.couch_query.get_complete_values( vessel_id, device, start=str(9999999999), end=str(self.epoch_default), flag='one_doc', descending=False ) timestamp = values['timestamp'] late_et = self.days_update(timestamp, 1, True) late_st = self.days_update(late_et, 1) new_et = late_et while int(new_et) <= int(epoch_time): late_et = self.days_update(late_et, 1, True) late_st = self.days_update(late_et, 1) if late_st > epoch_time: break new_et = late_et - 1 datas = self.get_device_data(vessel_id, device, late_st, late_et) try: blockage = self.blocks.get_blockage_data(device, datas) block_zone = self.blocks.get_blockage_zones(device, dtype, datas) # GET ANTENNA STATUS antenna = set(data['antenna_status'] for data in blockage) coordinates = self.blocks.get_xydata(blockage, antenna, 0, 0, remarks='cron') antenna_status = ["{}".format(tmp.capitalize()) for tmp in antenna] # INSERT TO BLOCKAGE DATA data = {} data["vessel_id"] = vessel_id data["device_id"] = device_id data["antenna_status"] = json.dumps(antenna_status) data["coordinates"] = json.dumps(coordinates) data["blockzones"] = json.dumps(block_zone) data["epoch_date"] = int(late_st) data["created_on"] = time.time() data["update_on"] = time.time() self.postgres.insert('blockage_data', data) # self.log.medium("datas: {0}".format(data)) late_et += 1 except: pass # print("Start Time: {0} End Time: {1} No Data Found ".format(late_st, late_et)) return 1 def get_blockzones(self, data, zone_number): """ Return Block Zones """ assert data, "Data is required." blockzone = [] blocks = {} if zone_number: try: az_start_val = None el_start_val = None az_end_val = None el_end_val = None bz_type_val = None bz_val = None for num in range(1, zone_number + 1): bzstatus = "bz{0}Status".format(num) az_start = "bz{0}AzStart".format(num) el_start = "bz{0}ElStart".format(num) az_end = "bz{0}AzEnd".format(num) el_end = "bz{0}ElEnd".format(num) bz_type = "bz{0}Type".format(num) blockzones = "Blockzone{0}".format(num) if bzstatus in data['General']: bz_val = data['General'][bzstatus] if az_start in data['General']: az_start_val = data['General'][az_start] if el_start in data['General']: el_start_val = data['General'][el_start] if az_end in data['General']: az_end_val = data['General'][az_end] if el_end in data['General']: el_end_val = data['General'][el_end] if bz_type in data['General']: bz_type_val = data['General'][bz_type] if blockzones.lower() in data: if not bz_val: bz_val = data[blockzones.lower()]['Status'] if not az_start_val: az_start_val = data[blockzones.lower()]['AzStart'] if not el_start_val: az_end_val = data[blockzones.lower()]['AzEnd'] if not el_end_val: el_end_val = data[blockzones.lower()]['ElEnd'] bz_item = {bzstatus: bz_val, az_start: az_start_val, az_end: az_end_val, el_start: el_start_val, el_end: el_end_val, bz_type: bz_type_val} blocks[blockzones] = bz_item blockzone.append({"blockage": blocks}) except ValueError: print("No Timestamp found") return blockzone
def vpn_terminal_receiver(): ''' usage: python script_vpn_real_vessel_manual_creation.py -vessel_number <vessel-number> -vessel_name <vessel_name> -account_os <vessel-os> eg. [terminal command]: python script_vpn_real_vessel_manual_creation.py -vessel_number 1 -vessel_name LABO1 -vessel_os LINUX help: python script_vpn_real_vessel_manual_creation.py -h ''' print('\n***[START] SCRIPT_VPN_REAL_VESSEL_MANUAL_CREATION. \n\n') parser = argparse.ArgumentParser() parser.add_argument('-token', action='store', dest='token', help='Token') parser.add_argument('-callback_url', action='store', dest='callback_url', help='Callback - must be URL') parser.add_argument('-job_id', action='store', dest='job_id', help='Job ID - must be unique') parser.add_argument('-vessel_number', action='store', dest='vessel_number', help='Vessel Number - must be unique') parser.add_argument('-vessel_name', action='store', dest='vessel_name', help="Vessel Name") parser.add_argument('-vessel_os', action='store', dest="vessel_os", help='Account OS - LINUX/WINDOWS') results = parser.parse_args() token = results.token callback_url = results.callback_url job_id = results.job_id vessel_number = int(results.vessel_number) vessel_name = results.vessel_name vessel_os = results.vessel_os assert token assert callback_url assert job_id assert vessel_number assert vessel_name assert vessel_os # GET ID OF DEFAULT TABLE-ID OF MANUAL-INSERTED REAL-VESSEL BASED ON JOB-ID'S -3 postgresql_query = PostgreSQL() manual_realvessel_tbl_id = None postgresql_query.connection() sql_str = "select * from vpn_access_requests where job_id = -3" result = postgresql_query.query_fetch_one(sql_str) postgresql_query.close_connection() if hasattr(result, 'get'): manual_realvessel_tbl_id = result['id'] ##### FINAL DATA id_vpn_access_requests = manual_realvessel_tbl_id # DEFAULT FOR MANUAL CREATION - [DO NOT CHANGE THIS] vpn_type = 'VESSEL' # DEFAULT - [DO NOT CHANGE THIS] account_id = vessel_number account_name = vessel_name account_os = vessel_os vpn_access_create = Vpn_Access_Create(id_vpn_access_requests, account_id, account_name, vpn_type, account_os) vpn_access_create.create_static_ip() if vpn_access_create.current_ip_1: print('\n\n') print('-' * 50) print('Data: {}'.format(vpn_access_create)) print('ZIP FILE PATH: {}'.format(vpn_access_create.zip_file_path)) print('-' * 50) vessel_ip_address = vpn_access_create.current_ip_1 directory = vpn_access_create.zip_file_path callback('active', 'ok', directory, callback_url, token, job_id, vessel_ip_address) print('\n\n***[DONE] SCRIPT_VPN_REAL_VESSEL_MANUAL_CREATION.\n') else: print("Invalid Data!!!") print('-' * 50) print(token) print(callback_url) print(job_id) print(vessel_number) print(vessel_name) print(vessel_os) print('-' * 50)
class NoonReportCSV(Common): """Class for NoonReportCSV""" # INITIALIZE def __init__(self): """The Constructor for NoonReportCSV class""" self.postgresql_query = PostgreSQL() self.unit_conversion = UnitConversion() super(NoonReportCSV, self).__init__() def noon_report_csv(self): """ This API is to get noon report --- tags: - Email produces: - application/json parameters: - name: token in: header description: Token required: true type: string - name: userid in: header description: User ID required: true type: string - name: mail_log_ids in: query description: Mail log ID's required: true type: string responses: 500: description: Error 200: description: Permission """ data = {} # GET DATA token = request.headers.get('token') userid = request.headers.get('userid') mail_log_ids = request.args.get('mail_log_ids') # CHECK TOKEN token_validation = self.validate_token(token, userid) if not token_validation: data["alert"] = "Invalid Token" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) outfile = "csv/" + str(int(time.time())) + "_noon_report.csv" self.generate_csv(mail_log_ids, outfile) datas = {} datas['location'] = outfile datas['status'] = 'ok' return self.return_data(datas) def generate_csv(self, mail_log_ids, outfile): """Generate CSV""" # MAIL LOG IDs if len(mail_log_ids.split(",")) == 1: mlis = "(" + str(mail_log_ids) + ")" else: mlis = tuple(mail_log_ids.split(",")) sql_str = "SELECT el.mail_log_id,el.message,el.data_date,el.created_on,ev.email FROM" sql_str += " email_log el INNER JOIN email_vessel ev ON" sql_str += " el.email_vessel_id=ev.email_vessel_id WHERE" sql_str += " mail_log_id in " + str(mlis) rows = self.postgresql_query.query_fetch_all(sql_str) headers = ["DATE SENT"] headers = headers + self.get_headers(rows) self.set_values(headers, rows, outfile) return 0 def set_values(self, headers, rows, outfile): """ Set Values """ with open(outfile, 'w', newline='') as file: writer = csv.DictWriter(file, fieldnames=headers) writer.writeheader() for row in rows: data = {} for mssg in row['message']: defaults = ["General Info", "VSAT Info", "Failover Info"] if mssg['label'] and mssg['label'] not in defaults: data[mssg['label']] = mssg['value'] utc = datetime.utcfromtimestamp( row['created_on']).strftime('%d %B %Y %I:%M:%S %p') date_send = utc + " UTC" data['DATE SENT'] = str(date_send) data['RECIPIENT'] = row['email'] if row['data_date']: data['AS of'] = row['data_date'].split(",")[1] writer.writerow(data) def get_headers(self, rows): """ Get Headers """ headers = set() headers.add("AS of") headers.add("RECIPIENT") for row in rows: headers.update({x['label'] for x in row['message']}) headers.discard("General Info") headers.discard("VSAT Info") headers.discard("Failover Info") return list(filter(None, headers))
def __init__(self): """The Constructor for NoonReportPDF class""" self.postgresql_query = PostgreSQL() self.unit_conversion = UnitConversion() super(NoonReportPDF, self).__init__()
class NoonReport(Common): """Class for NoonReport""" # INITIALIZE def __init__(self): """The Constructor for NoonReport class""" self.postgresql_query = PostgreSQL() super(NoonReport, self).__init__() def noon_report(self): """ This API is to get noon report --- tags: - Email produces: - application/json parameters: - name: token in: header description: Token required: true type: string - name: userid in: header description: User ID required: true type: string - name: vessel_id in: query description: Vessel ID required: true type: string - name: limit in: query description: Limit required: true type: integer - name: page in: query description: Page required: true type: integer responses: 500: description: Error 200: description: Permission """ data = {} # GET DATA token = request.headers.get('token') userid = request.headers.get('userid') page = int(request.args.get('page')) limit = int(request.args.get('limit')) vessel_id = request.args.get('vessel_id') # CHECK TOKEN token_validation = self.validate_token(token, userid) if not token_validation: data["alert"] = "Invalid Token" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) # COMPANY DATA datas = self.get_noon_report(vessel_id, page, limit) datas['status'] = 'ok' return self.return_data(datas) def get_noon_report(self, vessel_id, page, limit): """Return Noon Report""" offset = int((page - 1) * limit) # COUNT sql_str = "SELECT COUNT(*) " sql_str += " FROM email_log el INNER JOIN email_vessel ev ON" sql_str += " el.email_vessel_id=ev.email_vessel_id WHERE" sql_str += " ev.vessel_id='" + vessel_id + "'" count = self.postgresql_query.query_fetch_one(sql_str) total_rows = count['count'] # DATA sql_str = "SELECT el.mail_log_id, el.created_on, ev.email" sql_str += " FROM email_log el INNER JOIN email_vessel ev ON" sql_str += " el.email_vessel_id=ev.email_vessel_id WHERE" sql_str += " ev.vessel_id='{0}' LIMIT {1} OFFSET {2} ".format(vessel_id, limit, offset) rows = self.postgresql_query.query_fetch_all(sql_str) total_page = int(math.ceil(int(total_rows - 1) / limit)) data = {} data['rows'] = rows data['total_rows'] = total_rows data['total_page'] = total_page data['limit'] = limit data['page'] = page return data # GET VESSELS OF COMPANY def get_company_vessels(self, company_id): #, user=None): """Return Company Vessels""" assert company_id, "CompanyID is required." # DATA vessels = [] sql_str = "SELECT * FROM company_vessels WHERE company_id={0}".format(company_id) vessels = self.postgres.query_fetch_all(sql_str) data = {} data['rows'] = vessels return data
class Reinvite(Common, ShaSecurity): """Class for Reinvite""" # pylint: disable=too-many-instance-attributes # INITIALIZE def __init__(self): """The Constructor for Reinvite class""" self._couch_db = CouchDatabase() self.couch_query = Queries() self.postgres = PostgreSQL() self.epoch_default = 26763 # INIT CONFIG self.config = ConfigParser() # CONFIG FILE self.config.read("config/config.cfg") self._couch_db = CouchDatabase() self.couch_query = Queries() self.postgres = PostgreSQL() self.epoch_default = 26763 self.vpn_db_build = config_section_parser(self.config, "VPNDB")['build'] super(Reinvite, self).__init__() if self.vpn_db_build.upper() == 'TRUE': self.my_ip = config_section_parser(self.config, "IPS")['my'] self.my_protocol = config_section_parser(self.config, "IPS")['my_protocol'] self.user_vpn = config_section_parser(self.config, "IPS")['user_vpn'] self.user_protocol = config_section_parser(self.config, "IPS")['user_protocol'] self.vessel_vpn = config_section_parser(self.config, "IPS")['vessel_vpn'] self.vessel_protocol = config_section_parser( self.config, "IPS")['vessel_protocol'] self.vpn_token = '269c2c3706886d94aeefd6e7f7130ab08346590533d4c5b24ccaea9baa5211ec' # GET VESSEL FUNCTION def reinvite(self): """ This API is for Sending reinvite --- tags: - User produces: - application/json parameters: - name: token in: header description: Token required: true type: string - name: userid in: header description: User ID required: true type: string - name: query in: body description: Reinvite required: true schema: id: Reinvite properties: user_ids: types: array example: [] responses: 500: description: Error 200: description: Sending reinvitaion """ # INIT DATA data = {} # GET DATA token = request.headers.get('token') userid = request.headers.get('userid') # GET JSON REQUEST query_json = request.get_json(force=True) # GET REQUEST PARAMS user_ids = query_json["user_ids"] # CHECK TOKEN token_validation = self.validate_token(token, userid) if not token_validation: data["alert"] = "Invalid Token" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) for user_id in user_ids: # CREATE SQL QUERY sql_str = "SELECT * FROM account WHERE id='" + str(user_id) + "'" res = self.postgres.query_fetch_one(sql_str) password = self.generate_password() items = {} items['password'] = self.string_to_sha_plus(password) # INSERT INVITATION self.update_invitation(user_id, items) # SEND INVITATION self.send_invitation(res['email'], password, res['url']) sql_str = "SELECT * FROM job WHERE job_id in (" sql_str += "SELECT job_id FROM account_vpn WHERE " sql_str += "account_id={0} and status=true)".format(user_id) jobs = self.postgres.query_fetch_all(sql_str) if self.vpn_db_build.upper() == 'TRUE': for job in jobs: email = res['email'] filename = job['directory'].split("/")[-1] vpn_dir = '/home/admin/all_vpn/' + filename emailer = Email() email_temp = Message() message = email_temp.message_temp( "Please see attached VPN.") subject = "Web VPN" if job['vpn_type'] in ['VCLIENT', 'VRH']: subject = "Vessel VPN" instruction = './Instructions_OPENVPN.pdf' emailer.send_email(email, message, subject, [vpn_dir, instruction]) data = {} data['message'] = "Re: Invitation successfully sent!" data['status'] = "ok" return self.return_data(data) def update_invitation(self, account_id, query_json): """Update Invitation""" # INIT CONDITION conditions = [] # CONDITION FOR QUERY conditions.append({"col": "id", "con": "=", "val": account_id}) self.postgres.update('account', query_json, conditions) return 1 def send_invitation(self, email, password, url): """Send Invitation""" email_temp = Invitation() emailer = Email() message = email_temp.invitation_temp(password, url) subject = "Re-Invitation" emailer.send_email(email, message, subject, image="") return 1 def generate_password(self): """Generate Password""" char = string.ascii_uppercase char += string.ascii_lowercase char += string.digits return self.random_str_generator(8, char)
#!/usr/bin/env python3 # coding: utf-8 # pylint: disable=bare-except """Delete Vessel""" import sys import time import json import requests from library.couch_database import CouchDatabase from library.postgresql_queries import PostgreSQL COUCHDB = CouchDatabase() POSTGRES = PostgreSQL() # couch_query = COUCHDB.couch_db_link() def single_delete(doc_id, rev): """Single Delete""" url = COUCHDB.couch_db_link() url += '/' + doc_id + '?' + 'rev=' + rev headers = {"Content-Type" : "application/json"} response = requests.delete(url, headers=headers) response = response.json() return response def bulk_delete(query): """Bulk Delete""" count = 1
class Upload(Common): """Class for Vessels""" # INITIALIZE def __init__(self): """The Constructor for Vessels class""" self.couch_query = Queries() self.postgres = PostgreSQL() self.aws3 = AwsS3() super(Upload, self).__init__() # GET VESSEL FUNCTION def image_upload(self): """ This API is for Uploading Vessel Image --- tags: - Vessel produces: - application/json parameters: - name: token in: header description: Token required: true type: string - name: userid in: header description: User ID required: true type: string - name: vessel_id in: query description: Vessel ID required: true type: string - name: upfile in: formData description: Vessel image required: true type: file consumes: - multipart/form-data responses: 500: description: Error 200: description: Vessel Information """ # INIT DATA data = {} # VESSEL ID vessel_id = request.args.get('vessel_id') # # GET DATA token = request.headers.get('token') userid = request.headers.get('userid') # CHECK TOKEN token_validation = self.validate_token(token, userid) if not token_validation: data["alert"] = "Invalid Token" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) # RH_<VesselIMO>_<ImageID> parameters = self.couch_query.get_complete_values( vessel_id, "PARAMETERS") # VESSEL IMO vessel_imo = vessel_id if parameters: vessel_imo = parameters['PARAMETERS']['INFO']['IMO'] try: filename = request.files['upfile'].filename ext = filename.split(".")[-1] if not self.allowed_image_type(filename): data["alert"] = "File Type Not Allowed!" data['status'] = 'Failed' return self.return_data(data) except ImportError: data["alert"] = "No image!" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) filename = self.rename_image(vessel_id, filename) vimg_data = {} vimg_data['vessel_id'] = vessel_id vimg_data['vessel_imo'] = vessel_imo vimg_data['image_name'] = filename vimg_data['status'] = "active" vimg_data['created_on'] = time.time() # VERIFY IF IMAGE EXISTS FOR VESSEL sql_str = "SELECT * FROM vessel_image" sql_str += " WHERE vessel_id='{0}'".format(vessel_id) sql_str += " AND status = 'active'" vessel = self.postgres.query_fetch_one(sql_str) if vessel: # INIT CONDITION conditions = [] # CONDITION FOR QUERY conditions.append({ "col": "vessel_image_id", "con": "=", "val": vessel['vessel_image_id'] }) update_column = {} update_column['status'] = "inactive" self.postgres.update('vessel_image', update_column, conditions) vessel_image_id = self.postgres.insert('vessel_image', vimg_data, 'vessel_image_id') # IMAGE FILE NAME img_file = 'Vessel/' + "RH_" + vessel_imo + "_" + str( vessel_image_id) + "." + ext body = request.files['upfile'] # SAVE TO S3 image_url = "" if self.aws3.save_file(img_file, body): image_url = self.aws3.get_url(img_file) data["status"] = "ok" data["image_url"] = image_url # RETURN return self.return_data(data) def allowed_image_type(self, filename): """ Check Allowed File Extension """ allowed_extensions = set(['png', 'jpg', 'jpeg', 'gif']) return '.' in filename and filename.rsplit( '.', 1)[1].lower() in allowed_extensions def rename_image(self, vessel_id, filename): """Rename Image""" sql_str = "SELECT * FROM vessel_image" sql_str += " WHERE vessel_id='{0}'".format(vessel_id) sql_str += " AND image_name='{0}'".format(filename) vessel_image = self.postgres.query_fetch_one(sql_str) if vessel_image: new_name = self.file_replace(vessel_image['image_name']) return self.rename_image(vessel_id, new_name) return filename
class UpdateINIFiles(Common): """Class for Update INI Files""" # INITIALIZE def __init__(self): """The Constructor for Update INI Files class""" self.postgres = PostgreSQL() super(UpdateINIFiles, self).__init__() def update_inifiles(self): """ This API is for Update INI Files --- tags: - INI Files produces: - application/json parameters: - name: token in: header description: Token required: true type: string - name: userid in: header description: User ID required: true type: string - name: query in: body description: Update INI Files required: true schema: id: Update INI Files properties: datas: type: json example: {} vessel_id: type: string responses: 500: description: Error 200: description: Update INI Files """ data = {} # GET JSON REQUEST query_json = request.get_json(force=True) vessel_id = query_json['vessel_id'] inidatas = query_json['datas'] # GET HEADER token = request.headers.get('token') userid = request.headers.get('userid') # CHECK TOKEN token_validation = self.validate_token(token, userid) if not token_validation: data["alert"] = "Invalid Token" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) if not self.update_ini_datas(vessel_id, inidatas): data["alert"] = "Please check your query!" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) data['message'] = "INI Files successfully updated!" data['status'] = "ok" return self.return_data(data) def update_ini_datas(self, vessel_id, inidatas): """Update INI Files""" update_time = time.time() updates = True for vdata in inidatas.keys(): conditions = [] # CONDITION FOR QUERY conditions.append({ "col": "vessel_id", "con": "=", "val": vessel_id }) conditions.append({"col": "dir", "con": "=", "val": vdata}) new_data = {} new_data['content'] = json.dumps(inidatas[vdata].split("\n")) new_data['lastupdate'] = update_time new_data['vessel_lastupdate'] = update_time # UPDATE INI FILES if not self.postgres.update('ini_files', new_data, conditions): updates = False if not updates: # RETURN return 0 # RETURN return 1
class DeviceImages(Common): """Class for DeviceImages""" # INITIALIZE def __init__(self): """The Constructor for DeviceImages class""" self.postgres = PostgreSQL() self.couch_query = Queries() self.aws3 = AwsS3() super(DeviceImages, self).__init__() def get_images(self): """ This API is for Getting All Vessel Device Images --- tags: - Devices produces: - application/json parameters: - name: token in: header description: Token required: true type: string - name: userid in: header description: User ID required: true type: string - name: vessel_id in: query description: Vessel ID required: true type: string responses: 500: description: Error 200: description: Device Images """ data = {} # GET DATA token = request.headers.get('token') userid = request.headers.get('userid') vessel_id = request.args.get('vessel_id') # CHECK TOKEN token_validation = self.validate_token(token, userid) if not token_validation: data['alert'] = "Invalid Token" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) device_images = self.get_device_images(vessel_id) for d_image in device_images: d_image['image_url'] = self.aws3.get_device_image( d_image['vessel_id'], d_image['device_id']) data['device_images'] = device_images data['status'] = 'ok' return self.return_data(data) def get_device_images(self, vessel_id): """Get Device Images""" assert vessel_id, "Vessel ID is required." sql_str = "SELECT * FROM device_image" sql_str += " WHERE vessel_id = '{0}'".format(vessel_id) sql_str += " AND status = 'active'" device_images = self.postgres.query_fetch_all(sql_str) return device_images
class UpdateSubCategory(Common): """Class for UpdateSubCategory""" # INITIALIZE def __init__(self): """The Constructor for UpdateSubCategory class""" self.postgresql_query = PostgreSQL() self._couch_db = CouchDatabase() self.couch_query = Queries() super(UpdateSubCategory, self).__init__() def update_subcategory(self): """ This API is for Updating Sub Category --- tags: - Category produces: - application/json parameters: - name: token in: header description: Token required: true type: string - name: userid in: header description: User ID required: true type: string - name: query in: body description: SubCategory Update required: true schema: id: SubCategory Update properties: subcategory_id: type: integer subcategory_name: type: string options: types: array example: [] responses: 500: description: Error 200: description: Update Sub Category """ data = {} # GET JSON REQUEST query_json = request.get_json(force=True) # GET HEADER token = request.headers.get('token') userid = request.headers.get('userid') # CHECK TOKEN token_validation = self.validate_token(token, userid) if not token_validation: data["alert"] = "Invalid Token" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) # UPDATE SUBCATEGORY if not self.update(query_json): data[ "alert"] = "Please check your query! | Sub Category Update Failed." data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) if query_json['options']: if not self.update_subcategory_options(query_json): data[ "alert"] = "Please check your query! | Failed to link options subcategory." data['status'] = 'Failed' return self.return_data(data) data['message'] = "Sub Category successfully updated!" data['status'] = "ok" return self.return_data(data) def update(self, query_json): """Update SubCategory""" query_json['update_on'] = time.time() conditions = [] conditions.append({ "col": "subcategory_id", "con": "=", "val": query_json['subcategory_id'] }) # data = self.remove_key(query_json, "subcategory_id") data = {} data['subcategory_name'] = query_json['subcategory_name'] if self.postgres.update('subcategory', data, conditions): return 1 return 0 def update_subcategory_options(self, query_json): """Update SubCategory Options""" subcategory_id = query_json['subcategory_id'] conditions = [] conditions.append({ "col": "subcategory_id", "con": "=", "val": subcategory_id }) self.postgresql_query.delete('subcategory_options', conditions) data = {} data['subcategory_id'] = subcategory_id for opt in query_json['options']: data['option'] = opt subcategory = self.postgresql_query.insert( 'subcategory_options', data, str(query_json['subcategory_id'])) if not subcategory: return 0 return subcategory
class Common(): # RETURN DATA def return_data(self, data): # RETURN return jsonify(data) # SQL QUERY RETURN CONVERT TO JSON def convert_to_json(self, data): # INITIALIZE json_data = [] # LOOP DATA for result in data: # APPEND JSON DATA json_data.append(dict(zip(row_headers, result))) # RETURN return json.dumps(json_data) # REMOVE KEY def remove_key(self, data, item): # CHECK DATA if item in data: # REMOVE DATA del data[item] # RETURN return data # GET INFO def get_info(self, columns, table): # CHECK IF COLUMN EXIST,RETURN 0 IF NOT if not columns: return 0 # INITIALIZE cols = '' count = 1 # LOOP COLUMNS for data in columns: # CHECK IF COUNT EQUAL COLUMN LENGHT if len(columns) == count: # ADD DATA cols += data else: # ADD DATA cols += data + ", " # INCREASE COUNT count += 1 # CREATE SQL QUERY sql_str = "SELECT " + cols + " FROM " + table # INITIALIZE DATABASE INFO self.my_db = MySQL_DATABASE() # CONNECT TO DATABASE self.my_db.connection_to_db(self.my_db.database) # CALL FUNCTION QUERY ONE ret = self.my_db.query_fetch_one(sql_str) # CLOSE CONNECTION self.my_db.close_connection() # RETURN return ret # GET INFOS def get_infos(self, columns, table): # CHECK IF COLUMN EXIST,RETURN 0 IF NOT if not columns: return 0 # INITIALIZE cols = '' count = 1 # LOOP COLUMNS for data in columns: # CHECK IF COUNT EQUAL COLUMN LENGHT if len(columns) == count: # ADD DATA cols += data else: # ADD DATA cols += data + ", " # INCREASE COUNT count += 1 # CREATE SQL QUERY sql_str = "SELECT " + cols + " FROM " + table # INITIALIZE DATABASE INFO self.my_db = MySQL_DATABASE() # CONNECT TO DATABASE self.my_db.connection_to_db(self.my_db.database) # CALL FUNCTION QUERY ONE ret = self.my_db.query_fetch_all(sql_str) # CLOSE CONNECTION self.my_db.close_connection() # RETURN return ret # GET USER INFO def get_user_info(self, columns, table, user_id, token): # CHECK IF COLUMN EXIST,RETURN 0 IF NOT if not columns: return 0 # INITIALIZE cols = '' count = 1 # LOOP COLUMNS for data in columns: # CHECK IF COUNT EQUAL COLUMN LENGHT if len(columns) == count: # ADD DATA cols += data else: # ADD DATA cols += data + ", " # INCREASE COUNT count += 1 # CREATE SQL QUERY sql_str = "SELECT " + cols + " FROM " + table + " WHERE " sql_str += " token = '" + token + "'" sql_str += " AND id = '" + user_id + "'" # INITIALIZE DATABASE INFO # self.my_db = MySQL_DATABASE() self.postgres = PostgreSQL() # CONNECT TO DATABASE self.postgres.connection() # CALL FUNCTION QUERY ONE ret = self.postgres.query_fetch_one(sql_str) # CLOSE CONNECTION self.postgres.close_connection() # RETURN return ret # VALIDATE TOKEN def validate_token(self, token, user_id): import datetime import dateutil.relativedelta # CHECK IF COLUMN EXIST,RETURN 0 IF NOT if not token: return 0 # SET COLUMN FOR RETURN columns = ['username', 'update_on'] # CHECK IF TOKEN EXISTS user_data = self.get_user_info(columns, "account", user_id, token) data = {} data['update_on'] = time.time() #datetime.fromtimestamp(time.time()) condition = [] temp_con = {} temp_con['col'] = 'id' temp_con['val'] = user_id temp_con['con'] = "=" condition.append(temp_con) self.postgres = PostgreSQL() self.postgres.update('account', data, condition) # CHECK IF COLUMN EXIST,RETURN 0 IF NOT if user_data: dt1 = datetime.datetime.fromtimestamp(user_data['update_on']) dt2 = datetime.datetime.fromtimestamp(time.time()) rd = dateutil.relativedelta.relativedelta(dt2, dt1) # print(rd.years, rd.months, rd.days, rd.hours, rd.minutes, rd.seconds) if rd.years or rd.months or rd.days or rd.hours: return 0 if rd.minutes > 30: return 0 else: return 0 # RETURN return 1 def device_complete_name(self, name, number=''): # SET READABLE DEVICE NAMES humanize_array = {} humanize_array['NTWCONF'] = 'Network Configuration' humanize_array['NTWPERF'] = 'Network Performance ' + str(number) humanize_array['COREVALUES'] = 'Core Values' humanize_array['IOP'] = 'Irridium OpenPort ' + str(number) humanize_array['VDR'] = 'VDR ' + str(number) humanize_array['VSAT'] = 'V-SAT ' + str(number) humanize_array['MODEM'] = 'MODEM ' + str(number) humanize_array['FBB'] = 'FleetBroadBand ' + str(number) humanize_array['VHF'] = 'VHF ' + str(number) humanize_array['SATC'] = 'SAT-C ' + str(number) # RETURN return humanize_array[name] # COUNT DATA def count_data(self, datas, column, item): # INITIALIZE count = 0 # LOOP DATAS for data in datas: # CHECK OF DATA if data[column] == item: # INCREASE COUNT count += 1 # RETURN return count # REMOVE KEY def remove_data(self, datas, remove): ret_data = [] # CHECK DATA for data in datas: if not data['device'] in remove: ret_data.append(data) # RETURN return ret_data def set_return(self, datas): ret_data = {} ret_data['data'] = [] for data in datas: ret_data['data'].append(data['value']) return ret_data def check_time_lapse(self, current, timestamp): from datetime import datetime struct_now = time.localtime(current) new_time = time.strftime("%m/%d/%Y %H:%M:%S %Z", struct_now) vessel_time = time.localtime(timestamp) vessel_time = time.strftime("%m/%d/%Y %H:%M:%S %Z", vessel_time) vessel_time = vessel_time.split(' ') v_time = vessel_time[1] v_date = vessel_time[0] new_time = new_time.split(' ') n_time = new_time[1] n_date = new_time[0] start_date = datetime.strptime(v_date, "%m/%d/%Y") end_date = datetime.strptime(n_date, "%m/%d/%Y") # if not abs((start_date-start_date).days): if not abs((start_date - end_date).days): FMT = '%H:%M:%S' tdelta = datetime.strptime(str(n_time), FMT) - datetime.strptime( str(v_time), FMT) tdelta = str(tdelta).split(":") try: if int(tdelta[0]): return 'red' if int(tdelta[1]) < 10: return 'green' if int(tdelta[1]) < 20: return 'orange' except: return 'red' return 'red' def get_ids(self, key, datas): module_ids = [] for data in datas or []: module_ids.append(data['module']) return module_ids def check_request_json(self, query_json, important_keys): query_json = simplejson.loads(simplejson.dumps(query_json)) for imp_key in important_keys.keys(): if type(query_json.get(imp_key)): if not type(query_json[imp_key]) == type( important_keys[imp_key]): return 0 else: return 0 return 1 def milli_to_sec(self, millis): # SET TO INT millis = int(millis) # CONVERT seconds = (millis / 1000) # RETURN return int(seconds)
def __init__(self): """The Constructor for UpdateRole class""" self.postgres = PostgreSQL() super(UpdateRole, self).__init__()
class Permission(Common): """Class for Permission""" # INITIALIZE def __init__(self): """The Constructor for Permission class""" self.postgresql_query = PostgreSQL() super(Permission, self).__init__() def permission(self): """ This API is for Getting Permission --- tags: - Permission produces: - application/json parameters: - name: token in: header description: Token required: true type: string - name: userid in: header description: User ID required: true type: string - name: limit in: query description: Limit required: true type: integer - name: page in: query description: Page required: true type: integer responses: 500: description: Error 200: description: Permission """ data = {} # GET DATA token = request.headers.get('token') userid = request.headers.get('userid') page = int(request.args.get('page')) limit = int(request.args.get('limit')) # CHECK TOKEN token_validation = self.validate_token(token, userid) if not token_validation: data["alert"] = "Invalid Token" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) datas = self.get_permissions(page, limit) datas['status'] = 'ok' return self.return_data(datas) def get_permissions(self, page, limit): """Return Permissions""" offset = int((page - 1) * limit) # COUNT sql_str = "SELECT COUNT(*) FROM permission" count = self.postgresql_query.query_fetch_one(sql_str) total_rows = count['count'] # DATA sql_str = "SELECT * FROM permission LIMIT {0} OFFSET {1} ".format( limit, offset) res = self.postgresql_query.query_fetch_all(sql_str) total_page = int(math.ceil(int(total_rows - 1) / limit)) + 1 data = {} data['rows'] = res data['total_rows'] = total_rows data['total_page'] = total_page data['limit'] = limit data['page'] = page return data
class UpdateRole(Common): """Class for UpdateRole""" # INITIALIZE def __init__(self): """The Constructor for UpdateRole class""" self.postgres = PostgreSQL() super(UpdateRole, self).__init__() def update_role(self): """ This API is for Updating Role --- tags: - Role produces: - application/json parameters: - name: token in: header description: Token required: true type: string - name: userid in: header description: User ID required: true type: string - name: query in: body description: Updating Role required: true schema: id: Updating Role properties: role_id: type: string role_name: type: string role_details: type: string permission_ids: types: array example: [] responses: 500: description: Error 200: description: Updating Role """ data = {} # GET JSON REQUEST query_json = request.get_json(force=True) # GET HEADER token = request.headers.get('token') userid = request.headers.get('userid') # CHECK TOKEN token_validation = self.validate_token(token, userid) if not token_validation: data["alert"] = "Invalid Token" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) if not self.update_roles(query_json): data["alert"] = "Please check your query!" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) data['message'] = "Role successfully updated!" data['status'] = "ok" return self.return_data(data) def update_roles(self, query_json): """Update Roles""" # GET CURRENT TIME query_json['update_on'] = time.time() # GET VALUES ROLE AND PERMISSION role_id = query_json['role_id'] permission_ids = query_json['permission_ids'] # INIT CONDITION conditions = [] # CONDITION FOR QUERY conditions.append({ "col": "role_id", "con": "=", "val": role_id }) # REMOVE KEYS query_json = self.remove_key(query_json, "role_id") query_json = self.remove_key(query_json, "permission_ids") # UPDATE ROLE if self.postgres.update('role', query_json, conditions): # INIT CONDITION conditions = [] # CONDITION FOR QUERY conditions.append({ "col": "role_id", "con": "=", "val": role_id }) # DELETE OLD PERMISSION self.postgres.delete('role_permission', conditions) # LOOP NEW PERMISSIONS for permission_id in permission_ids: # INIT NEW PERMISSION temp = {} temp['role_id'] = role_id temp['permission_id'] = permission_id # INSERT NEW PERMISSION OF ROLE self.postgres.insert('role_permission', temp) # RETURN return 1 # RETURN return 0
def __init__(self): """The Constructor for DeviceImages class""" self.postgres = PostgreSQL() self.couch_query = Queries() self.aws3 = AwsS3() super(DeviceList, self).__init__()
class NoonReportPDF(Common): """Class for NoonReportPDF""" # INITIALIZE def __init__(self): """The Constructor for NoonReportPDF class""" self.postgresql_query = PostgreSQL() self.unit_conversion = UnitConversion() super(NoonReportPDF, self).__init__() def noon_report_pdf(self): """ This API is to get noon report --- tags: - Email produces: - application/json parameters: - name: token in: header description: Token required: true type: string - name: userid in: header description: User ID required: true type: string - name: mail_log_ids in: query description: Mail log ID's required: true type: string responses: 500: description: Error 200: description: Permission """ data = {} # GET DATA token = request.headers.get('token') userid = request.headers.get('userid') mail_log_ids = request.args.get('mail_log_ids') # CHECK TOKEN token_validation = self.validate_token(token, userid) if not token_validation: data["alert"] = "Invalid Token" data['status'] = 'Failed' # RETURN ALERT return self.return_data(data) output_file = "pdf/" + str(int(time.time())) + "_noon_report.pdf" pdf = self.generate_pdf(mail_log_ids) pdf.output(output_file, "F") datas = {} datas['location'] = output_file datas['status'] = 'ok' return self.return_data(datas) def generate_pdf(self, mail_log_ids): """Generate PDF""" # PDF SIZE Y, X pdf = FPDF('L', 'mm', (279.4, 215.9)) # MAIL LOG IDs if len(mail_log_ids.split(",")) == 1: mlis = "(" + str(mail_log_ids) + ")" else: mlis = tuple(mail_log_ids.split(",")) sql_str = "SELECT el.mail_log_id,el.message,el.data_date,el.created_on,ev.email FROM" sql_str += " email_log el INNER JOIN email_vessel ev ON" sql_str += " el.email_vessel_id=ev.email_vessel_id WHERE" sql_str += " mail_log_id in " + str(mlis) rows = self.postgresql_query.query_fetch_all(sql_str) header = "Noon report" for row in rows: pdf.add_page() self.render_header_data(pdf, header) self.render_details_data(pdf, row) self.render_footer_data(pdf) return pdf def render_details_data(self, pdf, details): """Render Details Data""" pdf.set_font('Arial', '', 10) start_y = 25 data = "As of " + details['data_date'] pdf.text(10, start_y, str(data)) start_y += 10 for info in details['message']: if info['label'] == "": start_y += 5 else: try: # if info['label'] == "LATITUDE": # latitude = self.to_degrees(info['value']) # lat_cardinal = "S" # if float(info['value']) >= 0: lat_cardinal = "N" # info['value'] = str(latitude) + " " + str(lat_cardinal) # elif info['label'] == "LONGITUDE": # longitude = self.to_degrees(info['value']) # long_cardinal = "W" # if float(info['value']) >= 0: long_cardinal = "E" # info['value'] = str(longitude) + " " + str(long_cardinal) if info['label'] in ["LATITUDE", "LONGITUDE"]: info['value'] = self.unit_conversion.to_degrees( info['label'], info['value']) elif info['label'] == "HEADING": info['value'] = str(int(float(info['value']))) + "°" except: info['value'] = "" data = info['label'] + ": " + info['value'] pdf.text(10, start_y, str(data)) start_y += 5 start_y += 5 # TIME AND DATE EMAIL SEND utc = datetime.utcfromtimestamp( details['created_on']).strftime('%A, %d %B %Y %I:%M:%S %p') date_send = utc + " UTC" data = "DATE SENT: " + str(date_send) pdf.text(10, start_y, str(data)) start_y += 5 # EMAIL RECIPIENT data = "RECIPIENT: " + details['email'] pdf.text(10, start_y, str(data)) start_y += 5 def render_header_data(self, pdf, header): """Render Header Data""" # TITLE pdf.set_font('Courier', 'B', 16.5) top_width = pdf.get_string_width(header) pdf.text((top_width / 2) + 65, 12, header) def render_footer_data(self, pdf): """Render Footer Data""" pdf.set_font('Arial', '', 8)
class DeviceOverview(Common): """Class for DeviceOverview""" # INITIALIZE def __init__(self): """The Constructor for DeviceOverview class""" self.postgres = PostgreSQL() self.couch_query = Queries() self.unit_conversion = UnitConversion() self.calc = calculate_alarm_trigger.CalculateAlarmTrigger() super(DeviceOverview, self).__init__() def device_overview(self): """ This API is for Getting OBU Summary per Vessel --- tags: - Alarm OBU Summary produces: - application/json parameters: - name: token in: header description: Token required: true type: string - name: userid in: header description: User ID required: true type: string - name: vessel_id in: query description: Vessel ID required: true type: string - name: format in: query description: Epoch Start Format required: false type: string responses: 500: description: Error 200: description: Alarm Device Overview """ data = {} # GET DATA token = request.headers.get('token') userid = request.headers.get('userid') vessel_id = request.args.get('vessel_id') epoch_format = request.args.get('format') # CHECK TOKEN if not self.validate_token(token, userid): data['alert'] = "Invalid Token" data['status'] = 'Failed' return self.return_data(data) alarm_types = self.get_alarm_types() ats = self.get_alarm_trigger() devices = self.couch_query.get_all_devices(vessel_id) standard_time = self.epoch_day(time.time()) epoch_time = time.time() temp_data = [] start_date = self.get_start_date(epoch_format) if not start_date and epoch_format not in ["day", "hours"]: data['alert'] = "Invalid format!" data['status'] = 'Failed' return self.return_data(data) for device in devices: if device['doc']['device'] in [ 'PARAMETERS', 'NTWCONF', 'NTWPERF1' ]: continue row = {} row['device'] = device['doc']['device'] row['name'] = device['doc']['device'] row['Alert'] = 0 row['Critical'] = 0 row['Warning'] = 0 row['Info'] = 0 row['Debug'] = 0 for atrigger in ats: trigger_type = self.get_alarm_type_name( alarm_types, atrigger['alarm_type_id']) at_id = atrigger['alarm_trigger_id'] device_id = device['id'] datas = self.calc.calculate_trigger([at_id], standard_time, epoch_time, vessel_id=vessel_id, device_id=device_id) if not datas == "No Alarm Trigger found.": datas_index_0 = datas[0] len_datas = datas_index_0['results'] if len_datas: row[trigger_type] = 1 if epoch_format in ['week', 'month', "quarter", 'annual']: sql_str = "SELECT COUNT(alarm_trigger_id) FROM alarm_data " sql_str += "WHERE device_id='{0}' ".format(device_id) sql_str += "AND epoch_date > {0} ".format(start_date) sql_str += "AND epoch_date < {0}".format(epoch_time) res = self.postgres.query_fetch_one(sql_str) row[trigger_type] = row[trigger_type] + res['count'] temp_data.append(row) final_data = {} final_data['data'] = temp_data final_data['status'] = 'ok' return self.return_data(final_data) def get_alarm_trigger(self): """Return Alarm Trigger""" sql_str = "SELECT * FROM alarm_trigger WHERE alarm_enabled=true" res = self.postgres.query_fetch_all(sql_str) return res def get_alarm_types(self): """Return Alarm Types""" # DATA sql_str = "SELECT * FROM alarm_type" datas = self.postgres.query_fetch_all(sql_str) data = {} data['rows'] = datas return data def get_alarm_type_name(self, a_types, alarm_type_id): """Return Alarm Type Name""" for a_type in a_types['rows']: if a_type['alarm_type_id'] == alarm_type_id: return a_type['alarm_type'] return 0 def get_start_date(self, type_format): """Return Start Date""" start_date = 0 if type_format not in ["annual", "quarter", "week", "month"]: return 0 if type_format.lower() == "week": start_date = self.days_update(time.time(), 7) elif type_format.lower() == "month": start_date = self.datedelta(time.time(), days=1, months=1) elif type_format.lower() in ["annual", "quarter"]: start_date = self.datedelta(time.time(), days=1, years=1) return start_date
def __init__(self): """The Constructor for Delete class""" self.postgres = PostgreSQL() super(Delete, self).__init__()