def saveModules(where): with modulesLock: for i in ActiveModules: #Iterate over all of the resources in a module and save them as json files #under the URL urld module name for the filename. for resource in ActiveModules[i]: #Make sure there is a directory at where/module/ util.ensure_dir(os.path.join(where, url(i), url(resource))) #Open a file at /where/module/resource with open(os.path.join(where, url(i), url(resource)), "w") as f: #Make a json file there and prettyprint it json.dump(ActiveModules[i][resource], f, sort_keys=True, indent=4, separators=(',', ': ')) #Now we iterate over the existing resource files in the filesystem and delete those that correspond to #modules that have been deleted in the ActiveModules workspace thing. for i in util.get_immediate_subdirectories( os.path.join(where, url(i))): if unurl(i) not in ActiveModules: os.remove(os.path.join(where, url(i), i)) for i in util.get_immediate_subdirectories(where): #Look in the modules directory, and if the module folder is not in ActiveModules\ #We assume the user deleted the module so we should delete the save file for it. #Note that we URL url file names for the module filenames and foldernames. if unurl(i) not in ActiveModules: shutil.rmtree(os.path.join(where, i)) with open(os.path.join(where, '__COMPLETE__'), 'w') as f: f.write( "By this string of contents quite arbitrary, I hereby mark this dump as consistant!!!" )
def saveModules(where): with modulesLock: for i in ActiveModules: #Iterate over all of the resources in a module and save them as json files #under the URL urld module name for the filename. for resource in ActiveModules[i]: #Make sure there is a directory at where/module/ util.ensure_dir(os.path.join(where,url(i),url(resource)) ) #Open a file at /where/module/resource with open(os.path.join(where,url(i),url(resource)),"w") as f: #Make a json file there and prettyprint it json.dump(ActiveModules[i][resource],f,sort_keys=True,indent=4, separators=(',', ': ')) #Now we iterate over the existing resource files in the filesystem and delete those that correspond to #modules that have been deleted in the ActiveModules workspace thing. for i in util.get_immediate_subdirectories(os.path.join(where,url(i))): if unurl(i) not in ActiveModules: os.remove(os.path.join(where,url(i),i)) for i in util.get_immediate_subdirectories(where): #Look in the modules directory, and if the module folder is not in ActiveModules\ #We assume the user deleted the module so we should delete the save file for it. #Note that we URL url file names for the module filenames and foldernames. if unurl(i) not in ActiveModules: shutil.rmtree(os.path.join(where,i)) with open(os.path.join(where,'__COMPLETE__'),'w') as f: f.write("By this string of contents quite arbitrary, I hereby mark this dump as consistant!!!")
def train_log_id(self): total_log_id_dict = {} total_log_id_ignore_content_dict ={} total_log_id_ignore_line_number_dict = {} #for approximate total_log_id_content_approximate_dict ={} total_log_id_ignore_line_num_content_approximate_dict = {} case_num = 0 for sub_dir in util.get_immediate_subdirectories(self.log_path_upper): case_num += 1 print(os.linesep*2) print('*'*20) print(case_num) #if log_type.is_install_log(): log_parser_obj = log_parser.log_parser(os.path.join(self.log_path_upper,sub_dir),log_type.is_event_log(self.log_type)) #else: # log_parser_obj = log_parser_install.log_parser(os.path.join(os.path.join(self.log_path_upper,sub_dir),'TiInst')) log_id_dict,log_id_ignore_content_dict,log_id_ignore_line_number_dict,log_id_content_approximate,log_id_ignore_line_num_content_approximate = log_parser_obj.parse_log_id() total_log_id_dict = dict(total_log_id_dict.items() + log_id_dict.items()) total_log_id_ignore_content_dict = dict(total_log_id_ignore_content_dict.items() + log_id_ignore_content_dict.items()) total_log_id_ignore_line_number_dict = dict(total_log_id_ignore_line_number_dict.items() + log_id_ignore_line_number_dict.items()) #for approximate total_log_id_content_approximate_dict = dict(total_log_id_content_approximate_dict.items() + log_id_content_approximate.items()) total_log_id_ignore_line_num_content_approximate_dict = dict(total_log_id_ignore_line_num_content_approximate_dict.items() + log_id_ignore_line_num_content_approximate.items()) log_parser_obj.clean_up() print('len of total_log_id_dict is ',len(total_log_id_dict)) print('len of total_log_id_ignore_content_dict is ',len(total_log_id_ignore_content_dict)) print('len of total_log_id_ignore_line_number_dict is ',len(total_log_id_ignore_line_number_dict)) return total_log_id_dict,total_log_id_ignore_content_dict,total_log_id_ignore_line_number_dict,total_log_id_content_approximate_dict,total_log_id_ignore_line_num_content_approximate_dict
def train_log_id_install(self): total_log_id_dict = {} total_log_id_ignore_content_dict ={} total_log_id_ignore_line_number_dict = {} #for approximate total_log_id_content_approximate_dict ={} total_log_id_ignore_line_num_content_approximate_dict = {} case_num = 0 total_log_id_dict = {} #critical issue #for k in log_id_def.get_log_id_list(): # daemon_log_id_dict[k] = {} for sub_dir in util.get_immediate_subdirectories(self.log_path_upper): case_num += 1 print(os.linesep*2) print('*'*20) print(case_num) log_parser_obj = log_parser_install.log_parser(os.path.join(os.path.join(self.log_path_upper,sub_dir),'TiInst')) log_id_dict = log_parser_obj.parse_log_id() for k_o in log_id_dict.keys(): print(k_o) daemon_log_id_dict = {} for k in log_id_def.get_log_id_list(): daemon_log_id_dict[k] = {} inner_dict = log_id_dict[k_o] for k_i in inner_dict.keys(): print(k_i,len(log_id_dict[k_o][k_i].items())) total_log_id_dict[k_o] = total_log_id_dict.get(k_o,daemon_log_id_dict) total_log_id_dict[k_o][k_i] = dict(total_log_id_dict[k_o][k_i].items() + log_id_dict[k_o][k_i].items()) log_parser_obj.clean_up() return total_log_id_dict
def train_log_id_install_msi(self): case_num = 0 total_content_set = set() for sub_dir in util.get_immediate_subdirectories(self.log_path_upper): case_num += 1 print os.linesep*2 print '*'*20 print case_num log_parser_obj = log_parser_msi.log_parser(os.path.join(os.path.join(self.log_path_upper,sub_dir),'TiInst')) content_set,_ = log_parser_obj.parse_log_id() total_content_set = total_content_set.union(content_set) return total_content_set
def do(self): new_programs = [] deleted_programs = [] current_programs = [] current_abbreviations = [] user = self.get_current_user() if user.user_type != 'god': raise core.PermissionDenied() # Check what program directories exist path = os.path.join(os.getcwd(), 'programs') new_abbreviations = util.get_immediate_subdirectories(path) # Check what program entities exist current_programs = Program.all().filter('deleted =', False).fetch(11) if len(current_programs) > 10: raise Exception("Too many programs. Limit is 10.") # Check them against each other. for p in current_programs: if p.abbreviation in new_abbreviations: current_abbreviations.append(p.abbreviation) new_abbreviations.remove(p.abbreviation) else: p.deleted = True deleted_programs.append(p) db.put(deleted_programs) # Anything remaining in the list is new. for a in new_abbreviations: program_config = Program.get_app_configuration(a) p = Program.create(abbreviation=a, name=program_config.name) current_programs.append(p) new_programs.append(p) db.put(new_programs) return { 'success': True, 'data': { 'deleted_programs': [p.to_dict() for p in deleted_programs], 'new_programs': [p.to_dict() for p in new_programs], 'current_programs': [p.to_dict() for p in current_programs], } }
def loadModules(modulesdir): for i in util.get_immediate_subdirectories(modulesdir): loadModule(i,modulesdir) newevt.getEventsFromModules()
# if 'label' in data_config[f] and data_config[f]['label']: # if 'type' in data_config[f] and data_config[f]['type'] == 'range': # idx = data_config[f]['conll_idx'] # j = i + idx[1] if idx[1] != -1 else -1 # label_idx_map[f] = (i, j) # else: # label_idx_map[f] = (i, i+1) # create transition parameters if training or decoding with crf/viterbi # need to load these here for ensembling (they're also loaded by the model) transition_params = util.load_transition_params(layer_task_config, vocab) if args.ensemble: predict_fns = [ predictor.from_saved_model("%s/%s" % (args.save_dir, subdir)) for subdir in util.get_immediate_subdirectories(args.save_dir) ] else: predict_fns = [predictor.from_saved_model(args.save_dir)] def dev_input_fn(): return train_utils.get_input_fn(vocab, data_config, dev_filenames, hparams.batch_size, num_epochs=1, shuffle=False, embedding_files=embedding_files)
def loadModules(modulesdir): for i in util.get_immediate_subdirectories(modulesdir): loadModule(i,modulesdir)
def lsdirs(path): return util.get_immediate_subdirectories(path)
def loadModules(modulesdir): for i in util.get_immediate_subdirectories(modulesdir): loadModule(i, modulesdir)