def _modify_node_operation(self, ctx, current_entities): new_operation = utils.create_dict(ctx.modification_breadcrumbs, ctx.raw_entity_value) changes = { ctx.OPERATIONS: { ctx.operation_id: new_operation }, ctx.PLUGINS: ctx.raw_node[ctx.PLUGINS] } self.sm.update_node(deployment_id=ctx.deployment_id, node_id=ctx.raw_node_id, **changes) current_node = current_entities[ctx.raw_node_id] if ctx.modification_breadcrumbs: operation_to_update = utils.traverse_object( current_node[ctx.OPERATIONS][ctx.operation_id], ctx.modification_breadcrumbs[:-1]) operation_to_update[ctx.modification_breadcrumbs[-1]] = \ ctx.raw_entity_value else: operation_to_update = current_node[ctx.OPERATIONS] operation_to_update[ctx.operation_id] = ctx.raw_entity_value current_node[ctx.PLUGINS] = ctx.raw_node[ctx.PLUGINS] return ctx.entity_id
def modify(self, ctx, current_entities): changes = { ctx.PROPERTIES: { ctx.property_id: utils.create_dict(ctx.modification_breadcrumbs, ctx.raw_entity_value) } } self.sm.update_node(deployment_id=ctx.deployment_id, node_id=ctx.raw_node_id, **changes) properties = current_entities[ctx.raw_node_id][ctx.PROPERTIES] if ctx.modification_breadcrumbs: property_to_update = \ utils.traverse_object(properties[ctx.property_id], ctx.modification_breadcrumbs[:-1]) property_to_update[ctx.modification_breadcrumbs[-1]] = \ ctx.raw_entity_value else: properties[ctx.property_id] = ctx.raw_entity_value return ctx.entity_id
def _add_node_operation(self, ctx, current_nodes): new_operations = utils.create_dict(ctx.modification_breadcrumbs, ctx.raw_entity_value) changes = { ctx.OPERATIONS: {ctx.operation_id: new_operations}, ctx.PLUGINS: ctx.raw_node[ctx.PLUGINS] } self.sm.update_node(deployment_id=ctx.deployment_id, node_id=ctx.raw_node_id, changes=changes) current_node = current_nodes[ctx.raw_node_id] if ctx.modification_breadcrumbs: operation_to_update = utils.traverse_object( current_node[ctx.OPERATIONS][ctx.operation_id], ctx.modification_breadcrumbs[:-1] ) operation_to_update[ctx.modification_breadcrumbs[-1]] = \ ctx.raw_entity_value else: operation_to_update = current_node[ctx.OPERATIONS] operation_to_update[ctx.operation_id] = ctx.raw_entity_value current_node[ctx.PLUGINS] = ctx.raw_node[ctx.PLUGINS] return ctx.entity_id
def _add_property(self, ctx, current_nodes): changes = { ctx.PROPERTIES: { ctx.property_id: utils.create_dict(ctx.modification_breadcrumbs, ctx.raw_entity_value) } } self.sm.update_node(deployment_id=ctx.deployment_id, node_id=ctx.raw_node_id, changes=changes) properties = current_nodes[ctx.raw_node_id][ctx.PROPERTIES] current_node = current_nodes[ctx.raw_node_id] if ctx.modification_breadcrumbs: property_to_update = \ utils.traverse_object(properties[ctx.property_id], ctx.modification_breadcrumbs[:-1]) property_to_update[ctx.modification_breadcrumbs[-1]] = \ ctx.raw_entity_value else: property_to_update = current_node[ctx.PROPERTIES] property_to_update[ctx.property_id] = ctx.raw_entity_value return ctx.entity_id
def add(self, ctx, current_entities): new_workflow = deployment_update_utils.create_dict( ctx.modification_breadcrumbs, ctx.raw_entity_value) deployment = self.sm.get(models.Deployment, ctx.deployment_id) new_workflows = deployment.workflows.copy() new_workflows.update({ctx.workflow_id: new_workflow}) deployment.workflows = new_workflows self.sm.update(deployment) current_entities[ctx.WORKFLOWS][ctx.workflow_id] = new_workflow
def add(self, ctx, current_entities): new_workflow = utils.create_dict(ctx.modification_breadcrumbs, ctx.raw_entity_value) changes = {ctx.WORKFLOWS: {ctx.workflow_id: new_workflow}} self.sm.update_deployment( _data_template(Deployment, current_entities['id'], 'id', **changes)) current_entities[ctx.WORKFLOWS][ctx.workflow_id] = new_workflow
def add(self, ctx, current_entities): new_output = deployment_update_utils.create_dict( ctx.modification_breadcrumbs, ctx.raw_entity_value) deployment = self.sm.get(models.Deployment, ctx.deployment_id) new_outputs = deployment.outputs.copy() new_outputs.update({ctx.output_id: new_output}) deployment.outputs = new_outputs self.sm.update(deployment) current_entities[ctx.OUTPUTS][ctx.output_id] = ctx.raw_entity_value return ctx.entity_id
def add(self, ctx, current_entities): new_output = utils.create_dict(ctx.modification_breadcrumbs, ctx.raw_entity_value) changes = {ctx.OUTPUTS: {ctx.output_id: new_output}} self.sm.update_deployment( _data_template(Deployment, current_entities['id'], 'id', **changes)) current_entities[ctx.OUTPUTS][ctx.output_id] = ctx.raw_entity_value return ctx.entity_id
def modify(self, ctx, current_entities): node = get_node(ctx.deployment_id, ctx.raw_node_id) properties = deepcopy(node.properties) properties[ctx.property_id] = deployment_update_utils.create_dict( ctx.modification_breadcrumbs, ctx.raw_entity_value) node.properties = properties self.sm.update(node) properties = current_entities[ctx.raw_node_id][ctx.PROPERTIES] if ctx.modification_breadcrumbs: property_to_update = \ deployment_update_utils.traverse_object( properties[ctx.property_id], ctx.modification_breadcrumbs[:-1]) property_to_update[ctx.modification_breadcrumbs[-1]] = \ ctx.raw_entity_value else: properties[ctx.property_id] = ctx.raw_entity_value return ctx.entity_id
def modify(self, ctx, current_entities): node = get_node(ctx.deployment_id, ctx.raw_node_id) properties = deepcopy(node.properties) properties[ctx.property_id] = deployment_update_utils.create_dict( ctx.modification_breadcrumbs, ctx.raw_entity_value ) node.properties = properties self.sm.update(node) properties = current_entities[ctx.raw_node_id][ctx.PROPERTIES] if ctx.modification_breadcrumbs: property_to_update = \ deployment_update_utils.traverse_object( properties[ctx.property_id], ctx.modification_breadcrumbs[:-1]) property_to_update[ctx.modification_breadcrumbs[-1]] = \ ctx.raw_entity_value else: properties[ctx.property_id] = ctx.raw_entity_value return ctx.entity_id
def _modify_node_operation(self, ctx, current_entities): new_operation = deployment_update_utils.create_dict( ctx.modification_breadcrumbs, ctx.raw_entity_value) node = get_node(ctx.deployment_id, ctx.raw_node_id) operations = deepcopy(node.operations) operations.update({ctx.operation_id: new_operation}) node.operations = operations node.plugins = ctx.raw_node[ctx.PLUGINS] self.sm.update(node) current_node = current_entities[ctx.raw_node_id] if ctx.modification_breadcrumbs: operation_to_update = deployment_update_utils.traverse_object( current_node[ctx.OPERATIONS][ctx.operation_id], ctx.modification_breadcrumbs[:-1]) operation_to_update[ctx.modification_breadcrumbs[-1]] = \ ctx.raw_entity_value else: operation_to_update = current_node[ctx.OPERATIONS] operation_to_update[ctx.operation_id] = ctx.raw_entity_value current_node[ctx.PLUGINS] = ctx.raw_node[ctx.PLUGINS] return ctx.entity_id
def _modify_node_operation(self, ctx, current_entities): new_operation = deployment_update_utils.create_dict( ctx.modification_breadcrumbs, ctx.raw_entity_value) node = get_node(ctx.deployment_id, ctx.raw_node_id) operations = deepcopy(node.operations) operations.update({ctx.operation_id: new_operation}) node.operations = operations node.plugins = ctx.raw_node[ctx.PLUGINS] self.sm.update(node) current_node = current_entities[ctx.raw_node_id] if ctx.modification_breadcrumbs: operation_to_update = deployment_update_utils.traverse_object( current_node[ctx.OPERATIONS][ctx.operation_id], ctx.modification_breadcrumbs[:-1] ) operation_to_update[ctx.modification_breadcrumbs[-1]] = \ ctx.raw_entity_value else: operation_to_update = current_node[ctx.OPERATIONS] operation_to_update[ctx.operation_id] = ctx.raw_entity_value current_node[ctx.PLUGINS] = ctx.raw_node[ctx.PLUGINS] return ctx.entity_id
def execute_operation( self ): # Anropa olika funktioner beroende på valet i radiobuttons och skapa dictionaries inför generering av rapport folder = self.katalog_entry.get() ext = self.ext_entry.get() keyword = self.keyword_entry.get() date = self.date_entry.get() if self.radiovar.get() == 1: if folder: if os.path.isdir(folder): list_tmp = utils.find_all_files(folder) self.match_hashset += utils.verify_files(list_tmp) utils.create_dict(folder, self.allfiles, list_tmp) self.display_results(list_tmp) else: tkinter.messagebox.showerror( 'Error', 'Detta är inte en äkta katalog!') else: tkinter.messagebox.showerror('Error', 'Du måste ange en katalog!') elif self.radiovar.get() == 2: if folder and ext: if os.path.isdir(folder): list_tmp = utils.find_specific_files(folder, ext) self.match_hashset += utils.verify_files(list_tmp) utils.create_dict(ext, self.specificfiles, list_tmp) self.display_results(list_tmp) else: tkinter.messagebox.showerror( 'Error', 'Detta är inte en äkta katalog!') else: tkinter.messagebox.showerror( 'Error', 'Du måste ange både katalog och filändelse!') elif self.radiovar.get() == 3: if folder and ext and keyword: if os.path.isdir(folder): list_tmp = utils.search_files(folder, ext, keyword) self.match_hashset += utils.verify_files(list_tmp) utils.create_dict(keyword, self.infofiles, list_tmp) self.display_results(list_tmp) else: tkinter.messagebox.showerror( 'Error', 'Detta är inte en äkta katalog!') else: tkinter.messagebox.showerror( 'Error!', 'Du måste ange katalog, filändelse och sökord!') elif self.radiovar.get() == 4: if folder and date: if os.path.isdir(folder): list_tmp = utils.find_modified_files(folder, date) self.match_hashset += utils.verify_files(list_tmp) utils.create_dict(date, self.datefiles, list_tmp) self.display_results(list_tmp) else: tkinter.messagebox.showerror( 'Error', 'Detta är inte en äkta katalog!') else: tkinter.messagebox.showerror( 'Error!', 'Du måste ange katalog och datum!')
# -*- coding: utf-8 -*- import os import utils from config import Config from model import BiRNN os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' conf = Config() wav_files, text_labels = utils.get_wavs_lables() words_size, words, word_num_map = utils.create_dict(text_labels) bi_rnn = BiRNN(wav_files, text_labels, words_size, words, word_num_map) bi_rnn.build_train()
# Version Setting # Set evaluation version as the prefix folder version_folder = 'E/' # Set run version as prefix and uiuc_run_folder p_f_run = 'E1' # E5 uiuc_run_folder = 'RPI_TA1_E1/' # Set the number of multiple processes processes_num = 32 # Input: LDC2019E42 unpacked data, CU visual grounding and all detection results, UIUC text mention results, USC grounding results # Input Paths # Source corpus data paths print('Check Point: LDC raw data change', corpus_path) parent_child_tab = corpus_path + 'docs/parent_children.sorted.tab' parent_dict, child_dict = utils.create_dict(parent_child_tab) # CU visual grounding feature paths out_path_jpg = working_path + 'cu_grounding_matching_features/' + 'semantic_features_jpg.lmdb' out_path_kfrm = working_path + 'cu_grounding_matching_features/' + 'semantic_features_keyframe.lmdb' # CU instance matching feature paths out_path_jpg = working_path + 'cu_grounding_matching_features/' + 'instance_features_jpg.lmdb' out_path_kfrm = working_path + 'cu_grounding_matching_features/' + 'instance_features_keyframe.lmdb' # CU visual grounding result path grounding_dict_path = working_path + 'cu_grounding_results/' + version_folder + 'grounding_dict_' + p_f_run + '.pickle' print('Check Point: version change', grounding_dict_path) grounding_dict = pickle.load(open(grounding_dict_path, 'rb')) # def top_dict(kv_dict, num = 2): # k_list = list(kv_dict.keys())[:num]
def insert_data_mongo(client, zno_table, tracker, filenames, batch_size=1000): idx = 0 helper_row = 0 if tracker.find_one() is None: tracker.insert_one({"batch_index": 0}) else: helper_row = tracker.find_one().get("batch_index") start_time = time() for filename in filenames: exam_year = re.findall(r'\d+', filename)[0] with open(filename, 'r', encoding="cp1251") as csv_data: csv_line = csv_data.readline() col_name = csv_line.strip().replace('"', '').replace(',', '.').split(";") col_name.append("examyear") col_name = [elem.lower() for elem in col_name] while csv_line != "": batch_dict_arr = [] for i in range(batch_size): csv_line = csv_data.readline() if csv_line is None or csv_line == "": logging.debug("End of file reached") break if helper_row > idx: idx += 1 continue line_to_insert = csv_line.strip().replace('"', '').replace( ',', '.').split(";") line_to_insert.append(exam_year) line_to_insert = [ None if elem == "null" else float_int_check(elem) for elem in line_to_insert ] batch_dict_arr.append(create_dict(col_name, line_to_insert)) try: zno_table.insert_many(batch_dict_arr) logging.debug("Batch inserted") # update tracker table tracker.update_one({"batch_index": idx}, {"$set": { "batch_index": idx + 1 }}) idx += 1 except: return client, False end_time = time() print(f"INSERTION TIME: {end_time - start_time}") logging.info(f"INSERTION TIME: {end_time - start_time}") tracker.drop() return client, True
def main(): allfiles = dict() specificfiles = dict() infofiles = dict() datefiles = dict() match_hashset = list() while True: print("\n") print("################################################") print("# [1]Search [2]Encryption [3]File Difference #") print("# [4]System Info [5]Generate report #") print('# q or "exit" to exit #') print("################################################") ch = input("$ ") # Search in files if ch == "1": while True: print("\n") print("##########################################") print("# [1] Find all files [2] File Extension #") print("# [3] By date [4] Search in files #") print('# q or "back" to go back #') print("##########################################") ch2 = input("$ ") if ch2 == "1": path = input("$ Path to folder: ") if path == "q" or path == "back": break list_tmp = utils.find_all_files(path) utils.create_dict(path, allfiles, list_tmp) match_hashset += utils.verify_files(list_tmp) print_results(list_tmp) if ch2 == "2": ext = input("$ Extension: ") if ext == "q" or ext == "back": break folder = input("$ Path to folder: ") if folder == "q" or folder == "back": break list_tmp = utils.find_specific_files(folder, ext) utils.create_dict(ext, specificfiles, list_tmp) match_hashset += utils.verify_files(list_tmp) print_results(list_tmp) if ch2 == "3": folder = input("$ Path to folder: ") if folder == "q" or folder == "back": break date = input("$ Date (Ex format: 2020-03-03): ") if date == "q" or date == "back": break list_tmp = utils.find_modified_files(folder, date) utils.create_dict(date, datefiles, list_tmp) match_hashset = utils.verify_files(list_tmp) print_results(list_tmp) if ch2 == "4": folder = input("$ Path to folder: ") if folder == "q" or folder == "back": break ext = input("$ Extension: ") if ext == "q" or ext == "back": break keyword = input("$ Keyword: ") if keyword == "q" or keyword == "back": break list_tmp = utils.search_files(folder, ext, keyword) utils.create_dict(keyword, infofiles, list_tmp) match_hashset = utils.verify_files(list_tmp) print_results(list_tmp) if ch2 == "q" or ch2 == "back": break #Encryption if ch == "2": while True: print("\n") print("###########################") print("# [1] Encrypt [2] Decrypt #") print('# q or "back" to go back #') print("###########################") ch2 = input("$ ") if ch2 == "1": filename = input("$ Path to file: ") if filename == "q" or filename == "back": break utils.encrypt_file(filename) print(filename + " has been encrypted.") if ch2 == "2": filename = input("$ Path to file: ") if filename == "q" or filename == "back": break utils.decrypt_file(filename) print(filename + "has been decrypted.") if ch2 == "q" or ch2 == "back": break # File Difference if ch == "3": while True: print("\n") print(' q or "back" to go back') file1 = input("$ File 1: ") if file1 == "q" or file1 == "back": break file2 = input("$ File 2: ") if file2 == "q" or file2 == "back": break file1_diff, file2_diff = utils.word_difference(file1, file2) print() print("Words in file 1, but not in file 2:") print_results(file1_diff) print("Words in file 2, but not in file 1:") print_results(file2_diff) # System info if ch == "4": print_results(utils.system_information()) if ch == "5": dictionary = dict() dictionary['sys'] = utils.system_information() dictionary['hashset'] = match_hashset dictionary['allfiles'] = allfiles dictionary['extfiles'] = specificfiles dictionary['infofiles'] = infofiles dictionary['datefiles'] = datefiles utils.gen_report(dictionary) print("The report has been generated!") if ch == "q" or ch == "exit": print("\n") print(" Cya! ") print("\n") break
# -*- coding: utf-8 -*- import os import utils from config import Config from model import BiRNN os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' conf = Config() wav_files, text_labels = utils.get_wavs_lables() words_size, words, word_num_map = utils.create_dict( conf.get("FILE_DATA").vocab_path) bi_rnn = BiRNN(wav_files, text_labels, words_size, words, word_num_map) bi_rnn.build_train()