def publish(self, data): try: self.channel.basic_publish(exchange=self.exchange, routing_key=self.routing_key, body=data) except KeyboardInterrupt as err: logger.error(err, err.args)
def connect(self): try: self.connection = pika.BlockingConnection( pika.ConnectionParameters(host=self.host)) self.channel = self.connection.channel() except Exception as err: logger.error(err, err.args)
def api_check(self): try: logger.info('sending request to {}'.format(self.name)) resource = '/' if not resource.startswith('/'): resource = '/' + resource conn = httplib.HTTPConnection(self.address, self.port) try: logger.info('HTTP connection created successfully') conn.request('GET', resource) logger.info('sending request for %s successful' % resource) response = conn.getresponse() logger.info('response status: %s' % response.status) except socket.error as e: logger.error('HTTP connection failed: %s' % e) return False finally: conn.close() logger.info('HTTP connection closed succeesfully') if response.status in [200, 301]: logger.info('get response from {} successfully'.format( self.name)) return True else: return False except Exception as e: logger.error(traceback.format_exc())
def insert_one(self, data): print(data) try: self.collection.insert_one(data) except Exception as err: logger.error(err, err.args) sys.exit()
def consume(self): try: self.channel.basic_consume(self.callback, self.queue, no_ack=True) self.channel.start_consuming() except (Exception, KeyboardInterrupt) as err: self.channel.stop_consuming() self.close() logger.error(err, err.args)
def connect(self): try: self.client = pymongo.MongoClient(self.host, self.port) self.db = self.client[self.dbName] self.collection = self.db[self.collectionName] except Exception as err: logger.error(err, err.args) sys.exit()
def validate_properties(properties): try: main_schema(properties) logger.info('Properties JSON schema is valid') return True except MultipleInvalid as ex: logger.error(ex) return False
def decompressedFile(self): try: compressedFile = io.BytesIO() compressedFile.write(self.sourceUrl.read()) compressedFile.seek(0) self.uncompressedFile = gzip.GzipFile(fileobj=compressedFile, mode='rb') except BlockingIOError as err: logger.error(err, err.args)
def validate_tag(tag): if not tag: return tag if tag.keys() == {'name', 'clazz', 'child_tag'}: return tag msg = 'Not a valid value for dictionary value' logger.error(msg) raise ValueError(msg)
def prossessing(self): line = str(self.uncompressedFile.readline(),'utf-8') while(line): jsonline = json.loads(line) if jsonline['repo']['name'] in repo_name: try: self.broker.publish(line) except Exception as err: logger.error(err) line = str(self.uncompressedFile.readline(),'utf-8')
def memory_check(): try: mem = Memory() percent_metric = float(config.get("memory", "percent")) if mem.percent > percent_metric: logger.warning(memory_usage_warning + 'memory usage: {:.2f} %\n'.format(mem.percent)) logger.info(mem) except Exception as e: logger.error(traceback.format_exc())
def work(self): for elt in self.callURL: try: self.uploadFile(elt["url"]) self.decompressedFile() self.prossessing() except Exception as err: self.broker.close() logger.error(err) self.broker.close()
def import_nodes_from_csv_files(csv_node_path, separator=','): file_names = read_file_names_from_directory_filtered_by_suffix(csv_node_path, ".csv") for file_name in file_names: url = "file://" + csv_node_path + file_name query = build_query_for_create_or_update(file_name, url, separator) try: graph.cypher.execute(query) except py2neo.cypher.error.schema.ConstraintViolation as constraintViolation: logger.error(constraintViolation) return file_names
def get_cpu_percent(self): try: result = subprocess.Popen("mpstat 1 1 | tail -1", shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) idle = float(result.stdout.readline().strip('\n').split()[-1]) self.percent = round(100 - idle, 2) assert type(self.percent) == float return self.percent except Exception as e: logger.error(traceback.format_exc())
def get_dataset_df(self, text_processor=None): """ Returns pandas dataframe, label to id mapping INDEX TEXT LABELS 0 "Example sentence" [-1, -2, 2, 0, 1] -2 -> label not present in the text -1 -> negative sentiment 0 -> neutral sentiment 1 -> positive sentiment 2 -> ambiguous sentiment Label to ID mapping maps index of label list to the label name """ if text_processor is None: text_processor = self.text_processor dataset_name = self.dataset_info["name"] if dataset_name == "Twitter": return parsing._parse_twitter(self.dataset_path, text_processor) elif dataset_name == "SemEval16": return parsing._parse_sem_eval_16_type(self.dataset_path, text_processor) elif dataset_name == "FourSquared": return parsing._parse_sem_eval_16_type(self.dataset_path, text_processor) elif dataset_name == "SemEval14": return parsing._parse_sem_eval_14_type(self.dataset_path, text_processor) elif dataset_name == "MAMS_ACSA": return parsing._parse_sem_eval_14_type(self.dataset_path, text_processor, label_type="category") elif dataset_name == "MAMS_ATSA": return parsing._parse_sem_eval_14_type(self.dataset_path, text_processor) elif dataset_name == "SamsungGalaxy": return parsing._parse_sem_eval_14_type(self.dataset_path, text_processor) elif dataset_name == "20NG": return parsing._parse_text_gcn_type(self.dataset_path) elif dataset_name == "MR": return parsing._parse_text_gcn_type(self.dataset_path) elif dataset_name == "Ohsumed": return parsing._parse_text_gcn_type(self.dataset_path) elif dataset_name == "R8": return parsing._parse_text_gcn_type(self.dataset_path) elif dataset_name == "R52": return parsing._parse_text_gcn_type(self.dataset_path) else: logger.error("{} dataset not yet supported".format( self.dataset_name)) return NotImplemented
def get_upload(self): try: if not os.path.isfile(self.loadavg_path): raise Exception('{} does not exist.'.format(self.loadavg_path)) with open(self.loadavg_path, 'r') as f: result = f.readline() upload = result.split() self.upload["1m"] = float(upload[0]) self.upload["5m"] = float(upload[1]) self.upload["15m"] = float(upload[2]) assert type(self.upload) == dict return self.upload except Exception as e: logger.error(traceback.format_exc())
def validate_time(time_str): try: splitted_time_str = time_str.split(":") hours = int(splitted_time_str[0]) minutes = int(splitted_time_str[1]) except (ValueError, IndexError): msg = "Please use the right format. The format should be in the 'hour:minutes'" logger.error(msg) raise ValueError(msg) if 0 <= hours < 24 and 0 <= minutes < 60: return time_str else: msg = "Hours should be between 0 and 23 inclusive. Minutes should be between 0 and 59 inclusive" logger.error(msg) raise ValueError(msg)
def port_request_check(self): try: s = socket.socket() # logger.info('Attempting to connect to %s on port %s' % (self.name, self.port)) try: s.connect((self.address, self.port)) logger.info("Connected to %s on port %s" % (self.name, self.port)) return True except socket.error as e: logger.error("Connected to %s on port %s failed: %s" % (self.name, self.port, e)) return False except Exception as e: logger.error(traceback.format_exc())
def get_disk(self): try: main_dir = config.get('disk', 'main_dir') cmd = 'df {} |tail -1'.format(main_dir) result = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) line = result.stdout.readline().strip('\n') free = line.split() self.size = int(free[1]) self.used = int(free[2]) self.avail = int(free[3]) self.percent = int(free[4].strip('%')) except Exception as e: logger.error(traceback.format_exc())
def get_upload_by_uptime(self): try: result = subprocess.Popen("uptime", shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) upload = result.stdout.readline().strip("\n").split(":")[-1].split( ",") assert len(upload) == 3 self.upload["1m"] = float(upload[0]) self.upload["5m"] = float(upload[1]) self.upload["15m"] = float(upload[2]) assert type(self.upload) == dict return self.upload except Exception as e: logger.error(traceback.format_exc())
def api_check(self): try: logger.info('sending request to {}'.format(self.name)) cmd = '`which zkServer.sh` status' result = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) f = result.stdout.read() if 'Mode' in f: logger.info('get response from {} successfully'.format( self.name)) return True else: return False except Exception as e: logger.error(traceback.format_exc())
def port_listen_check(self): try: result = subprocess.Popen(self.port_check_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) line = result.stdout.readline() if line and 'LISTEN' in line: self.port = int(line.split()[4].split(':')[-1]) logger.info('{} is listening at {}'.format( self.name, self.port)) return True else: logger.warning('{} port state is not listen'.format(self.name)) return False except Exception as e: logger.error(traceback.format_exc())
def process_check(self): try: cmd = 'ps -ef | grep {} | grep -v grep '.format(self.name) result = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) line = result.stdout.readline() if len(line) > 0: self.pid = line.split()[1] logger.info('{} is running with pid {}'.format( self.name, self.pid)) return True else: logger.error('{} is not running'.format(self.name)) return False except Exception as e: logger.error(traceback.format_exc())
def get_port(self): try: cmd = "`which zkServer.sh` status | grep port" result = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) lines = result.stdout.readlines() pattern = r"port" for line in lines: result = re.search(pattern, line) if result: self.port = int(line.split('.')[0].split(':')[-1]) break if result is None: raise Exception('port not found in zkServer.sh status') return self.port except Exception as e: logger.error(traceback.format_exc())
def api_check(self): try: logger.info('sending request to {}'.format(self.name)) redis_login = config.redis_auth() cmd = "`which redis-cli` -p {} -a {} info".format( self.port, redis_login['password']) result = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) f = result.stdout.read() if 'Server' in f: logger.info('get response from {} successfully'.format( self.name)) return True else: return False except Exception as e: logger.error(traceback.format_exc())
def api_check(self): try: logger.info('sending request to {}'.format(self.name)) mysql_login = config.mysql_auth() cmd = "`which mysql` -P {} -u{} -p{} -e 'select version()'".format( self.port, mysql_login['user'], mysql_login['password']) result = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) f = result.stdout.read() if 'version' in f: logger.info('get response from {} successfully'.format( self.name)) return True else: return False except Exception as e: logger.error(traceback.format_exc())
def cpu_check(): try: cpu = Processor() cpu_metric = (cpu.upload['1m'] + cpu.upload['5m']) / 2.00 average = cpu_metric / cpu.cpu_count assert type(average) == float thread_per_process = float(config.get("cpu", "thread_per_cpu")) percent_metric = float(config.get("cpu", "percent")) if average > thread_per_process: logger.warning(cpu_overload_warning + 'upload: {}\n'.format(average)) if cpu.percent > percent_metric: logger.warning(cpu_usage_warning + 'cpu usage: {:.2f} %\n'.format(cpu.percent)) logger.info(cpu) # print('cpu_metric is : ' + str(cpu_metric)) except Exception as e: logger.error(traceback.format_exc())
def disk_check(): try: disk = Disk() logger.info(disk) percent_metric = float(config.get("disk", "percent")) available_metric = float(config.get("disk", "available_G")) assert type(disk.percent) == int assert type(disk.avail) == int assert type(available_metric) == float if disk.percent > percent_metric: logger.warning(disk_percent_warning + 'disk usage: {}%\n'.format(disk.percent)) if disk.avail < available_metric * 1024 * 1024: unit, index = get_power_index() logger.warning(disk_available_warning + 'available disk: {} {}b\n'.format(disk.avail/(1024**index), unit)) return disk except Exception as e: logger.error(traceback.format_exc())
def get_io_info(self): try: iostat = list() cmd = 'iostat -xy 1 1' result = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) iostats = result.stdout.readlines() for line in iostats: if self.device in line: iostat = line.split() break if len(iostat) != 0: self. await = float(iostat[9]) self.util = float(iostat[13]) else: raise Exception("device {} not found.".format(self.device)) except Exception as e: logger.error(traceback.format_exc())
def get_mem_info(self): if not os.path.isfile(self.mem_info_path): raise Exception('{} does not exist.'.format(self.mem_info_path)) try: with open(self.mem_info_path, 'r') as f: lines = f.readlines() for line in lines: if 'MemTotal' in line: self.total = int(line.split()[1]) if 'MemFree' in line: self.free = int(line.split()[1]) if 'MemAvailable' in line: self.avail = int(line.split()[1]) if 'Buffers' in line: self.buffer = int(line.split()[1]) if 'Cached' in line: self.cached = int(line.split()[1]) self.percent = 100.00 - (round(self.avail, 2) / self.total) * 100 assert type(self.percent) == float except Exception as e: logger.error(traceback.format_exc())
def service_connection(key_s, mask_s, sock_c, pre_calibrated, angle_threshold, ground_height, gsd): sock_s = key_s.fileobj data_s = key_s.data if mask_s & selectors.EVENT_READ: try: taskID, frameID, latitude, longitude, altitude, roll, pitch, yaw, camera, img = receive( sock_s) if taskID is None: logger.debug("No data received.") return start_time = time.time() # 1. Set IO my_drone = drones.Drones(make=camera) # 2. System calibration & CCS converting init_eo = np.array( [longitude, latitude, altitude, roll, pitch, yaw]) init_eo[:2] = geographic2plane(init_eo, 3857) if pre_calibrated: init_eo[3:] *= np.pi / 180 adjusted_eo = init_eo else: my_georeferencer = georeferencers.DirectGeoreferencer() adjusted_eo = my_georeferencer.georeference(my_drone, init_eo) # 3. Inference timecheck1 = time.time() result = inference_detector(model, img) infer_time = time.time() - timecheck1 logger.info("infer time: %.2f" % (round(infer_time, 3))) # contour(segmentation) object_coords = server_det_masks(result, class_names=CLASSES, score_thr=score_thr) # bounding box # object_coords = server_det_bboxes(result, score_thr=0) if object_coords: logger.debug(object_coords) # 4. Geo-referencing img_rows = img.shape[0] img_cols = img.shape[1] pixel_size = my_drone.sensor_width / img_cols # mm/px R_CG = Rot3D(adjusted_eo).T inference_metadata = [] for inference_px in object_coords: inference_world = georef_inference(inference_px[:-1], img_rows, img_cols, pixel_size, my_drone.focal_length, adjusted_eo, R_CG, ground_height) inference_metadata.append( # create_inference_metadata(inference_px[-1], str(inference_px), inference_world)) create_inference_metadata(inference_px[-1], inference_px[:-1], inference_world)) else: logger.debug(object_coords) inference_metadata = [] send(frameID, taskID, frameID, 0, "", inference_metadata, "", sock_c) logger.info("Sending completed! Elapsed time: %.2f" % (time.time() - start_time)) except Exception as e: import traceback logger.error(traceback.format_exc()) logger.warning("closing connection to %s" % (json.dumps(data_s.addr))) sock_c.close() global client_connection client_connection = 1 sel_server.unregister(sock_s) sock_s.close()