class ParserView(QTableWidget): def __init__(self, parent, log_file): super().__init__(parent) self.log_file = log_file self.columns = 2 self.column_names = ['timestamp', 'text'] self.parser = Parser(self.log_file) self.thread = QThread() self.create_parser() self.create_gui() def create_parser(self): self.parser.data_ready.connect(self.add_row) self.parser.moveToThread(self.thread) self.thread.started.connect(self.parser.run) self.thread.start() def create_gui(self): self.setColumnCount(self.columns) header = self.horizontalHeader() header.setSectionResizeMode(QHeaderView.ResizeToContents) self.setHorizontalHeaderLabels(self.column_names) self.setEditTriggers(QTableWidget.NoEditTriggers) self.show() # TODO: split date and time def add_row(self, data): row = self.rowCount() self.insertRow(row) values = list(data.values()) for column in range(self.columnCount()): self.setItem(row, column, QTableWidgetItem(str(values[column])))
def __init__(self, directory): this = path.relpath(__file__) # this files path root = path.split(this)[0] # removing <filename>.py root = path.join(root, directory) if not path.isdir(root): print(f"# '{root}' is not found. Creating...") os.mkdir(root) self.root = root self.parser = Parser()
def __init__(self, parent, log_file): super().__init__(parent) self.log_file = log_file self.columns = 2 self.column_names = ['timestamp', 'text'] self.parser = Parser(self.log_file) self.thread = QThread() self.create_parser() self.create_gui()
def load_class_from_dir(path, exclude='__init__.py'): """ 加载指定目录下按下划线转驼峰法命名的文件中的类 """ relpath = os.path.relpath(path) result = {} for filename in os.listdir(relpath): if filename.endswith('.py') and filename != exclude: filename = filename[:-3] filepath = os.sep.join((relpath, filename)) namespace = Parser.parse_slash_to_dot(filepath) classname = Parser.parse_dash_to_dump(filename) module = __import__(namespace, fromlist=True) result[classname] = getattr(module, classname) return result
def __send_file(self, line, client, handler): """ Sends the file which is in the given path. """ response = "" bucket_name, file_name = Parser().args(line) data = handler.down(bucket_name, file_name) if data: size = len(data) header = f"{bucket_name} {file_name} {size}" sent = False while not sent: # resending until get confirmation client.send(header.encode()) sent = client.recv(self.BUFFER_SIZE) print(f"# Sending {file_name}") with BytesIO(data) as buffer: while True: bytes_read = buffer.read(self.BUFFER_SIZE) if not bytes_read: break try: client.sendall(bytes_read) except: print("# Error while sending the file. Please, retry.") return print("# Sent") client.recv(self.BUFFER_SIZE) response = "Success!" else: client.send("not found".encode()) response = f"Not found: {bucket_name}/{file_name}" return response
def __response(self, line, client): """ Manages incoming lines """ parser = Parser() response = "" if not parser.parse(line): response = f"Command corrupted: {line}" else: handler = CommandHandler(self.__root_directory) instruction = parser.instruction(line) if instruction == "up": response = self.__receive_file(client, handler) elif instruction == "down": response = self.__send_file(line, client, handler) else: response = handler.execute(line) return response
class ParserMapper(mr.Mapper): def __init__(self, rank, file_list, output_dir, params): super(ParserMapper, self).__init__(rank, file_list, output_dir, params) self.parser = Parser() def process(self, line): tokens = line.strip().split() label = tokens[0] words = self.parser.parse(' '.join(tokens[1:])) self.output.write(label + ' ' + ' '.join(words) + '\n') def done(self): self.output.close()
def weighted_mean_squared_error(y_true, y_pred): difference = y_pred - y_true weights = array([20, 20, 20, 20, 1, 1, 1]) return K.mean(K.square((difference * weights)), axis=-1) inputLayer = Input(shape=(21, )) hiddenLayer1 = Dense(11)(inputLayer) outputLayer = Dense(7)(hiddenLayer1) model = Model(inputs=inputLayer, outputs=outputLayer) model.compile(optimizer='adam', loss='mean_squared_error', metrics=['accuracy']) p = Parser() dataFileTrain = sys.argv[1] dataFileTest = sys.argv[2] inputDataTrain = array(p.Parse(dataFileTrain)) print(inputDataTrain.shape) outputDataTrain = array(p.ParseSpine(dataFileTrain)) print(outputDataTrain.shape) history = model.fit(inputDataTrain, outputDataTrain, 32, 2000) print(model.outputs) [print(n.name) for n in K.get_session().graph.as_graph_def().node] # summarize history for loss plt.plot(history.history['loss']) plt.title('model loss')
[('--lr',), {'type': float, 'default': 0.001, 'help': 'Learning rate'}], [('--lrs',), {'type': int, 'default': [30, 60, 90], 'nargs': '+', 'help': 'Learning rate schedule'}], [('--lrd',), {'type': float, 'default': 0.1, 'help': 'Learning rate decay'}], [('--l2',), {'type': float, 'default': 0.0, 'help': 'L2 regularization'}], [('-d',), {'type': float, 'default': 0.0, 'help': 'Dropout probability'}], [('--dataset',), {'type': str, 'default': 'mnist', 'help': 'Dataset to use'}], [('--root',), {'type': str, 'default': '/mnt/DATA/TorchData', 'help': 'Location of the dataset'}], [('--save_path', '-s'), {'type': str, 'default': '/mnt/DATA/ProjectsResults/contestai', 'help': 'Results path'}], [('--batch_size', '-bs'), {'type': int, 'default': 64, 'help': 'Batch size'}], [('--epochs', '-e'), {'type': int, 'default': 120, 'help': 'Number of epochs'}], [('--log_period', '-lp'), {'type': int, 'default': 20, 'help': 'Logging period in number of epochs'}], #[('--optimizer', '-opt'), {'type': str, 'default': 'adam', 'help': 'Optimizer to use'}], [('--pretrained', '-pr'), {'type': int, 'default': True, 'help': 'Use the pretrained model?'}] ] argparser = Parser("Beantech challenge") argparser.add_arguments(args) opt = argparser.get_dictionary() dirname = build_dirname(opt, ('lr', 'batch_size')) savepath = make_save_directory(opt, dirname) vis = Visdom(port=8098) vm = VisualManager(vis, 'contestai') W = 1280 H = 180 path_training_ok = '/mnt/DATA/beantech_contestAI/Dataset2/campioni OK' path_training_ko = '/mnt/DATA/beantech_contestAI/Dataset2/campioni KO' path_validation_ok = '/mnt/DATA/beantech_contestAI/Dataset1/campioni OK'
class CommandHandler: """ This class implements actions for every command sent to the Server socket """ def __init__(self, directory): this = path.relpath(__file__) # this files path root = path.split(this)[0] # removing <filename>.py root = path.join(root, directory) if not path.isdir(root): print(f"# '{root}' is not found. Creating...") os.mkdir(root) self.root = root self.parser = Parser() def execute(self, line): """ Maps the incoming command to its proper instruction and executes it """ command = self.parser.instruction(line) args = self.parser.args(line) response = "" if command == "content": response = self.__content(args[0]) elif command == "create": response = self.__create(args[0]) elif command == "drop": response = self.__drop(args[0]) elif command == "delete": response = self.__delete(args[0], args[1]) elif command == "list": response = self.__list() return response def __content(self, bucket_name): """ Lists the diferent available buckets """ bucket_path = path.join(self.root, bucket_name) if path.isdir(bucket_path): files = os.listdir(bucket_path) ios = StringIO() ios.write(f"Existing files into '{bucket_name}':\n") [print(f" -> {i}", file=ios) for i in files] return ios.getvalue() return f"Not found: {bucket_name}" def __create(self, bucket_name): """ Creates a new bucket into the root directory """ bucket_path = path.join(self.root, bucket_name) if not path.isdir(bucket_path): os.mkdir(bucket_path) return "Success!" return "Already exists. Dismissing..." def __delete(self, bucket_name, file_name): """ deletes a file from a bucket if exists """ bucket_path = path.join(self.root, bucket_name) if path.isdir(bucket_path): file_name = path.basename(file_name) #avoids going to any other dirs file_path = path.join(bucket_path, file_name) if path.isfile(file_path): os.remove(file_path) return "Success!" return f"Not found: {file_name}" return f"Not found: {bucket_name}" def __drop(self, bucket_name): """ Removes a bucket and its content """ bucket_path = path.join(self.root, bucket_name) if path.isdir(bucket_path): shutil.rmtree(bucket_path) return "Success!" return f"Not found: {bucket_name}" def __list(self): """ Lists the diferent available buckets """ buckets = os.listdir(self.root) ios = StringIO() ios.write("Existing buckets:\n") [print(f" -> {i}", file=ios) for i in buckets] return ios.getvalue() def down(self, bucket_name, file_name): """ Looks for a file and loads it into a BytesIO stream ready to be sent. """ bucket_path = path.join(self.root, bucket_name) if path.isdir(bucket_path): file_name = path.basename(file_name) #avoids going to any other dirs file_path = path.join(bucket_path, file_name) if path.isfile(file_path): with open(file_path, 'rb') as f: return f.read() return False return False def up(self, bucket_name, file_name, incoming_bytes): """ Saves the given data into the given metadata (bucket_name, file_name) """ bucket_path = path.join(self.root, bucket_name) if path.isdir(bucket_path): name = path.join(bucket_path, file_name) with open(name, 'wb') as f: f.write(incoming_bytes.getvalue()) return "Success!" return f"Not found: {bucket_name}"
def __init__(self, slug, *args, **kwargs): self.slug = slug try: self.meta = ObjDict(Parser.get_meta(slug)) except ParserError, e: raise ArticleError("Cannot get data for article")
def __init__(self): self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #http_bind_address self.BUFFER_SIZE = 1024 self.parser = Parser()
class Client: def __init__(self): self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #http_bind_address self.BUFFER_SIZE = 1024 self.parser = Parser() def run(self, address=None, port=9000): """ Tries to connect to the server deployed on the given access point (address:port). and start a session with it. """ #set default address if it's not given address = socket.gethostname() if address == None else address self.__connect(address, port) self.__session() def __connect(self, address, port): print(f"# Connecting to {address}:{port}") try: self.socket.connect((address, port)) print("# Connected!") self.__session() except ConnectionRefusedError: print( f"# Connection refused, please make sure there's a server running." ) sys.exit(1) except InterruptedError as e: print(f"# Connection interrupted: {e}") sys.exit(1) def __session(self): while True: try: msg = input("> ").strip() if msg and self.parser.parse(msg): self.__execute(msg) else: print( f"# Invalid command: '{msg}', type 'help' if you need a hand." ) except (KeyboardInterrupt, EOFError): print("# Disconnected!") self.socket.close() sys.exit() except (InterruptedError, ConnectionError): print(f"# Disconnected due interrupted connection. :(") self.socket.close() sys.exit(1) def __execute(self, msg): instruction = self.parser.instruction(msg) if msg == "quit": raise KeyboardInterrupt elif msg == "help": print(self.__help()) elif instruction == "up": self.__send_file(msg) elif instruction == "down": self.__receive_file(msg) else: self.socket.send(msg.encode()) self.__listen(msg) def __send_file(self, msg): """ Sends the file which is in the given path. """ self.socket.send(msg.encode()) # advertisement bucket_name, file_path = self.parser.args(msg) if path.isfile(file_path): file_name = path.basename(file_path) size = path.getsize(file_path) header = f"{bucket_name} {file_name} {size}" sent = False while not sent: # resending until get confirmation self.socket.send(header.encode()) sent = self.socket.recv(self.BUFFER_SIZE) with open(file_path, 'rb') as f: print(f"# Sending {file_name}") while True: bytes_read = f.read(self.BUFFER_SIZE) if not bytes_read: break try: self.socket.sendall(bytes_read) except: print("# Error while sending the file. Please, retry.") return print("# Sent") else: print(f"# Not found file: {file_path}") def __receive_file(self, msg): """ Receives the file which is in the given path. """ self.socket.send(msg.encode()) header = self.socket.recv(self.BUFFER_SIZE) if header: line = header.decode('UTF-8').replace('\n', '') if line == "not found": # omit transmission if bucket or file doesn't exist return self.socket.send("OK".encode()) # sent confirmation of header args = line.split() if len(args) == 3: bucket_name, file_name, file_size = args file_size = int(file_size) print(f"# Receiving: {file_name}") with BytesIO() as incoming_bytes: total_received = 0 while total_received < file_size: bytes_read = self.socket.recv(self.BUFFER_SIZE) total_received += incoming_bytes.write(bytes_read) if not bytes_read: print("# Data corrupted. Dismissing...") return print("# Received") self.socket.send( "OK".encode()) # sent confirmation of data this = path.relpath(__file__) # this files path this = path.split(this)[0] # removing <filename>.py down_path = path.join(this, "downloads", bucket_name) if not path.isdir(down_path): print(f"# '{down_path}' not found. Creating...") os.makedirs(down_path) file_path = path.join(down_path, file_name) with open(file_path, 'wb') as f: f.write(incoming_bytes.getvalue()) else: print("# Header corrupted.") else: print("# Header not found.") def __listen(self, msg): data = self.socket.recv(self.BUFFER_SIZE) if data: line = data.decode('UTF-8') # convert to string (Python 3 only) print("< " + line) else: raise InterruptedError def __help(self): lines = [ "- content <BUCKET_NAME>: Lists the different files inside the <BUCKET_NAME> bucket.\n", "- create <BUCKET_NAME>: Creates a new bucket empty and ready to access. If the bucket is already created, the server will return a reject message.\n", "- delete <BUCKET_NAME> <FILE_NAME>: Deletes a file stored inside a bucket.\n", "- down <BUCKET_NAME> <FILE_NAME>: Downloads a file from a bucket to the client entity.\n", "- drop <BUCKET_NAME>: Deletes an existing bucket.\n", "- help: Shows this guideline.\n", "- list: Lists the different existing buckets.\n", "- quit: Breaks the connection to the server.\n", "- up <BUCKET_NAME> <FILE_NAME>: Uploads a file to an existing bucket.\n\n", "* REMARKS: \n", " - The available commands are all CASE-SENSITIVE\n" " - <BUCKET_NAME> only accepts alphanumeric, numeric and '_' chars.\n", " - <FILE_NAME accepts the same as <BUCKET_NAME> + and optional aphanumeric extension.\n" ] ios = StringIO() ios.writelines(lines) return ios.getvalue()
def post(self): try: # Parse HTTP Body parser = Parser(['roomId']) args = parser.get_args() # Read whole log data room_id = args['roomId'] reader = MongoReader(room_id) topics, p_contents = reader.read_topic_n_content() print(topics) print(p_contents) # TODO: 예지 : 주제별요약&키워드가 담길 배열 변수 --> 이부분이 요청으로 들어갈 2개의 변수 아래 예시데이터보면 확인할 수 있음 contents = [] keywords = [] # Abstract summary, keywords summary = {} keyword = {} summary_stop_word = set([('있', 'VV'), ('웃', 'VV'), ('와우', 'IC'), ('시작', 'NNG'), ('협조', 'NNG'), ('하', 'VV'), ('되', 'VV'), ('이', 'VCP'), ('것', 'NNB'), ('들', 'XSN'), ('그', 'MM'), ('수', 'NNB'), ('이', 'NP'), ('보', 'VX'), ('않', 'VX'), ('없', 'VA'), ('나', 'NP'), ('주', 'VV'), ('아니', 'VCN'), ('등', 'NNB'), ('같', 'VA'), ('우리', 'NP'), ('때', 'NNG'), ('년', 'NNB'), ('가', 'VV'), ('한', 'MM'), ('지', 'VX'), ('대하', 'VV'), ('오', 'VV'), ('그렇', 'VA'), ('위하', 'VV'), ('그것', 'NP'), ('두', 'VV'), ('그러나', 'MAJ'), ('못하', 'VX'), ('그런', 'MM'), ('또', 'MAG'), ('더', 'MAG'), ('그리고', 'MAJ'), ('중', 'NNB'), ('씨', 'NNB'), ('지금', 'NNG'), ('그러', 'VV'), ('속', 'NNG'), ('데', 'NNB'), ('안', 'MAG'), ('어떤', 'MM'), ('내', 'NP'), ('다시', 'MAG'), ('이런', 'MM'), ('번', 'NNB'), ('나', 'VX'), ('어떻', 'VA'), ('개', 'NNB'), ('이렇', 'VA'), ('점', 'NNG'), ('좀', 'MAG'), ('잘', 'MAG'), ('이빨', 'NNG')]) keyword_stop_word = set([('있', 'VV'), ('웃', 'VV'), ('와우', 'IC'), ('시작', 'NNG'), ('협조', 'NNG'), ('하', 'VV'), ('되', 'VV'), ('이', 'VCP'), ('것', 'NNB'), ('들', 'XSN'), ('그', 'MM'), ('수', 'NNB'), ('이', 'NP'), ('보', 'VX'), ('않', 'VX'), ('없', 'VA'), ('나', 'NP'), ('주', 'VV'), ('아니', 'VCN'), ('등', 'NNB'), ('같', 'VA'), ('우리', 'NP'), ('때', 'NNG'), ('년', 'NNB'), ('가', 'VV'), ('한', 'MM'), ('지', 'VX'), ('대하', 'VV'), ('오', 'VV'), ('그렇', 'VA'), ('위하', 'VV'), ('그것', 'NP'), ('두', 'VV'), ('그러나', 'MAJ'), ('못하', 'VX'), ('그런', 'MM'), ('또', 'MAG'), ('더', 'MAG'), ('그리고', 'MAJ'), ('중', 'NNB'), ('씨', 'NNB'), ('지금', 'NNG'), ('그러', 'VV'), ('속', 'NNG'), ('데', 'NNB'), ('안', 'MAG'), ('어떤', 'MM'), ('내', 'NP'), ('다시', 'MAG'), ('이런', 'MM'), ('번', 'NNB'), ('나', 'VX'), ('어떻', 'VA'), ('개', 'NNB'), ('이렇', 'VA'), ('점', 'NNG'), ('좀', 'MAG'), ('잘', 'MAG'), ('이빨', 'NNG')]) # abstract summary, keywords from whole data for topic in topics: if topic not in p_contents: continue text = p_contents[topic] print(' -*-*- WHOLE-TEXT -*-*- ') print(text) summary_temp = Summary(text, summary_stop_word).run() keyword_temp = Keyword(text, keyword_stop_word).run() print(' -*-*- SEMI-RESULT -*-*- ') print(f"summary_temp => {summary_temp}") print(f"keyword_temp => {keyword_temp}") summary[topic] = summary_temp keyword[topic] = keyword_temp # TODO: 예지 - 주제별 요약 contents.append({"topic": topic, "content": summary_temp}) # TODO: 예지 - 주제별 키워드 # 전체 합 구한 다음 각각의 값에서 sum을 나누고 100을 곱해 비율계산한 뒤 자연수로 변환 total = 0 for val in keyword_temp.values(): total += val for key, val in keyword_temp.items(): keywords.append({ "keyword": f"{key[0][0]}", "value": int((val / total) * 100) }) total_to_convert = 0 for word_val in keywords: total_to_convert += word_val["value"] for word_val in keywords: word_val['value'] = int(word_val['value'] * (7 / total_to_convert) + 8) print('==== Conference Log Summary ====') print(contents) print(keywords) return { 'status': 200, 'data': json.dumps({ "keywords": keywords, "contents": contents }) } except Exception as e: print(e) print(traceback.format_exc()) return {'status': '500', 'message': f'{e}'}
def __init__(self, rank, file_list, output_dir, params): super(ParserMapper, self).__init__(rank, file_list, output_dir, params) self.parser = Parser()