def comparison(input_folder, result_csv, output_folder): f = FileManager(result_csv) fields = [ "key", "method_id", "masked_method_id", "prediction_size is_perfect_t5", "Dataset" ] separator = "!--*__!" f.open_file_csv("r", fields) data = f.read_csv_list(separator) dict_t5 = dict() print("Read {} records".format(len(data))) for row in data: parts = row.split(separator) key = parts[1] + "_" + parts[2] dict_t5[key] = parts[4] files = os.listdir(input_folder) files = [f for f in files if ".txt" in f] print(files) overall_perfect_predictions = 0 overall_records = 0 for ff in files: f = FileManager(os.path.join(input_folder, ff)) data_cloning = f.read_file_txt() num_records = len(data_cloning) print("Read {} records".format(num_records)) num_perfect_prediction = 0 for d in data_cloning: parts = d.split("|_|") method_id = parts[0] masked_method_id = parts[1] key = "{}_{}".format(method_id, masked_method_id) if dict_t5[key] == "TRUE": num_perfect_prediction += 1 overall_perfect_predictions += num_perfect_prediction overall_records += num_records print("{}: {} perfect prediction out of {} ({}%)".format( ff.replace("_raw.txt", "").replace("_", " "), num_perfect_prediction, num_records, round(100 * num_perfect_prediction / num_records, 2))) print("Overall: {} perfect prediction out of {} ({}%)".format( overall_perfect_predictions, overall_records, round(100 * overall_perfect_predictions / overall_records, 2)))
def __init__(self, url, depth=-1, basic_auth="", allowed_urls=[]): self.spider = Spider(url, depth=depth, basic_auth=basic_auth, allowed_urls=allowed_urls) self.filemanager = FileManager() self.shown_urls = []
def infer(model, directory): """ infer a set of images for classification""" click.clear() click.echo() click.echo(lr.APP_TITLE) fm = FileManager() directory = fm.adjustDirectoryPath(directory) logTrainer = LogisticImageTrainer() if os.path.exists(lr.LOCAL_SAVED_MODEL_LR): logTrainer.loadModel(lr.LOCAL_SAVED_MODEL_LR) print("== model", lr.LOCAL_SAVED_MODEL_LR, "loaded") it = ImageTools() imageList = fm.folderFileListOfImages(directory) if len(imageList) > 0: for filename in imageList: code,descr = logTrainer.infer(directory,filename) out = "== FILE {0:s} Class {1:s}->{2:s}".format(filename, code ,descr) click.echo(out) for filename in imageList: code,descr = logTrainer.infer(directory,filename) out = "== {0:s} -> {1:s}".format( code ,descr) it.showImage(directory+filename,out) else: click.echo(':: No files in \'%s\' directory' % directory) else: print("== model", lr.LOCAL_SAVED_MODEL_LR, "required for predictions do not exist!")
def __init__(self,root,canvas,scrollbar): #Se inicia el conbtrolador de los archivos self.FileManager=FileManager() #Se asignan las varibles externas como varibles internas de la clase self.root = root self.canvas = canvas self.scrollbar = scrollbar # se inicia los iconos self.init_icons(root) #se crean un diccionario con los accesos directos a las carpetas principales dependidno del sistema operativo home = expanduser("~") print("Carpetas del usuario"+home) #windows if (name == "nt"): self.listadir={"Documentos":"\Documents","Escritorio":"\Desktop","Descargas":"\Downloads","Imagenes":"\Pictures"} for key,value in self.listadir.items(): self.listadir[key]= home+value #print(self.listadir) else: #linux self.dirHome = home + "/Files" self.dirImagenes = home + "/Files/Imagenes" self.dirTexto = home + "/Files/Documentos" # se obtienen el directorio actual que sera de donde se inicie la aplicacion self.directorio_actual =getcwd() # se inicia el canvas con los iconos actuales de "directorio_actual" self.actualizar_canvas()
def datatoexcel(tipo="padrao"): fm = FileManager("arquivos/filtrados/") # use creds to create a client to interact with the google drive api scope = [ 'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive' ] creds = ServiceAccountCredentials.from_json_keyfile_name( "arquivos/config/credentials.json", scope) client = gspread.authorize(creds) # enviar dados consolidado if tipo == 1: sheet_dados_consolidado = client.open( "Teste Data Studio Black").worksheet("Prog Display - Consolidado") arquivo_consolidado = fm.verificar_arquivo("dados_consolidado.json") with open(arquivo_consolidado, 'r') as arq: dados_consolidado = dict(json.load(arq)) for e, value in enumerate(dados_consolidado.values()): sheet_dados_consolidado.update_cell(2, e + 1, value) # ------------------------------------------------------- # enviar dados dia if tipo == 2: sheet_dados_dia = client.open("Teste Data Studio Black").worksheet( "Prog Display - Dia") arquivo_dia = fm.verificar_arquivo("dados_dia.json") with open(arquivo_dia, 'r') as arq: dados_dia = dict(json.load(arq)) days = dados_dia["dia"] for d, day in enumerate(days, 2): for v, value in enumerate(day.values(), 1): sheet_dados_dia.update_cell(d, v, value)
def __init__(self, url, number_of_threads=20, allowed_urls=[], blocked_urls=[], basic_auth=(), depth=-1): self.url = url self.number_of_threads = number_of_threads self.allowed_urls = allowed_urls # self.blocked_urls = blocked_urls self.lost_url = set() self.basic_auth = basic_auth self.depth = depth self.crawl = True self.visited = {} self.general_visited = set() self.unvisited = set() self.general_unvisited = {self.url} self.fetched_url_record = dict() self.csv_table = CsvFormat([ "url", "status code", "title", "keyword", "description", "h1", "h2", "h3", "h4", "h5", "h6", "index", "open tags", "external links", "h_tag_format" ]) self.downloaded_pages = {} self.record = [] self.url_parser = UrlParser(url) self.parser = HtmlParser() self.filemanager = FileManager()
class User: file_manager = FileManager() def __init__(self): try: data = self.file_manager.read_json(CLIENT_FILE) except FileNotFoundError: data = {} self.client_id = data.get('client_id', CLIENT_ID) self.secret = data.get('client_secret', CLIENT_SECRET) self.username = data.get('username', 'Unknown') self.grand_type = data.get('grand_type', GRAND_TYPE_PASSWORD) self.access_token = data.get('access_token', '') self.refresh_token = data.get('refresh_token', '') self.step_type = data.get('step_type', 'code') self.save() def save(self): data = dict() data['client_id'] = self.client_id data['client_secret'] = self.secret data['username'] = self.username data['grand_type'] = self.grand_type data['access_token'] = self.access_token data['refresh_token'] = self.refresh_token data['step_type'] = self.step_type self.file_manager.write_json(CLIENT_FILE, data)
def test_all(): input_filenames = listdir(path='./testing/inputs') scraper = Scraper() filemanager = FileManager() for input_filename in input_filenames: input_data = filemanager.read_input_file( \ 'testing/inputs/' + input_filename) scraper_data = [] for word in input_data[0]: scraper_data.append(scraper.scrape_word(word)) for sentence_tuple in input_data[1]: scraper_data.append(scraper.scrape_sentence( \ sentence_tuple[0])) output_data = [] output_filename = 'output' + input_filename[5:] output_data = filemanager.read_output_file( \ 'testing/outputs/' + output_filename) for data in output_data: if '#####' in data[1]: audio_bytes = b'' else: audio_bytes = filemanager.read_audio_file('testing/audio/' + data[2] + '.mp3') data[1] = audio_bytes for i in range(len(scraper_data)): for j in range(4): if scraper_data[i][j] != output_data[i][j]: print() print(scraper_data[i][2]) print() assert scraper_data[i][j] == output_data[i][j]
class Bundle: fm = FileManager() def __init__(self): pass def get_template(self) -> str: return self.fm.read_data('template.pli') def filter_data(self, data: Dict, send_iban: str) -> Generator[Dict[str, Any], None, None]: """Returns a generator filtered by the sender IBAN.""" return (d for d in data if d['send_iban'] == send_iban) def generate(self, data, send_iban: str) -> str: """Generates a bundle.""" content = self.get_template() template = Template(content) sender = settings.SENDER result = '' for item in self.filter_data(data, send_iban): result += template.substitute(sender=sender, **item) return result def save_to_disk(self, send_iban: str, content: str) -> None: """Saves a bundle to disk.""" self.fm.write_data(send_iban, content)
def run(): import time # Get parameter from command-line reciever_hosts = sys.argv[1].split(',') reciever_port = int(sys.argv[2]) file_path = sys.argv[3] file_manager = FileManager(file_path) # Sent packets one by one per reciever threads = [ threading.Thread(target=send_threaded, args=( file_manager, reciever_addr, reciever_port, )) for reciever_addr in reciever_hosts ] for thread in threads: thread.start() start_time = time.perf_counter() for thread in threads: thread.join() end_time = time.perf_counter() print("All targets receive file successfully!!") print( f'Program finished sending file for {(end_time-start_time)*1000:.2f}ms' )
def StartReconstruction(datastore): reconstructor = Reconstructor() project_ini = reconstructor.load_project_ini( path.realpath(datastore.ini_path)) reconstructor.project_ini = project_ini rp = path.realpath(datastore.root_path) if datastore.recursive == True: filemanager = FileManager(rp) else: filemanager = [rp] for cnt, rel_path in enumerate(filemanager): current_project_path = path.join( datastore.root_path, rel_path) if datastore.root_path != "./" else "./" reconstructor.root_dir = current_project_path print("#" + (cnt + 1).__str__() + "# " + "Working on " + current_project_path) print(datastore.steps) steps = Steps.fromBinaryString(datastore.steps) if Steps.matcher in steps: matcher_index = steps.index(Steps.matcher) steps[matcher_index].matching_strategy = MatchingStrategy[ datastore.matching_strategy] [reconstructor.execute_step(s) for s in steps]
def __init__(self, axis_width, interaction_matrix, type_array, shift_data, distance_matrix): """ Args: axis_width: Number of points to generate function mapping for Returns: None """ self.x_axis = range(10 * axis_width) self.interaction_map = {} self.interaction_matrix = interaction_matrix initial_coordinates = np.random.rand(len(interaction_matrix), 3) self.coordinate_manager = CoordinateManager(initial_coordinates) self.interaction_matrix[self.interaction_matrix == 1] = InteractionValues.DEFAULT self.interaction_matrix_original = np.copy(interaction_matrix) self.file_manager = FileManager() self.bonds = [] self.atoms = [] self.initialise_atoms(type_array, shift_data) self.system_states = [] self.distance_matrix = distance_matrix
def test_filemanager_populate(self): sta = False types = ['atm', 'ice', 'ocn', 'rest', 'streams.cice', 'streams.ocean'] database = 'test.db' simstart = 51 simend = 60 experiment = '20171011.beta2_FCT2-icedeep_branch.A_WCYCL1850S.ne30_oECv3_ICG.edison' filemanager = FileManager(mutex=self.mutex, sta=sta, types=types, database=database, remote_endpoint=self.remote_endpoint, remote_path=self.remote_path, local_endpoint=self.local_endpoint, local_path=self.local_path) filemanager.populate_file_list(simstart=simstart, simend=simend, experiment=experiment) simlength = simend - simstart + 1 atm_file_names = [ x.name for x in DataFile.select().where(DataFile.datatype == 'atm') ] self.assertTrue(len(atm_file_names) == (simlength * 12)) for year in range(simstart, simend + 1): for month in range(1, 13): name = '{exp}.cam.h0.{year:04d}-{month:02d}.nc'.format( exp=experiment, year=year, month=month) self.assertTrue(name in atm_file_names)
def test_filemanager_update_local(self): sta = False types = ['atm', 'ice', 'ocn', 'rest', 'streams.cice', 'streams.ocean'] database = 'test.db' simstart = 51 simend = 60 experiment = '20171011.beta2_FCT2-icedeep_branch.A_WCYCL1850S.ne30_oECv3_ICG.edison' filemanager = FileManager(mutex=self.mutex, sta=sta, types=types, database=database, remote_endpoint=self.remote_endpoint, remote_path=self.remote_path, local_endpoint=self.local_endpoint, local_path=self.local_path) filemanager.populate_file_list(simstart=simstart, simend=simend, experiment=experiment) self.mutex.acquire() df = DataFile.select().limit(1) name = df[0].name head, tail = os.path.split(df[0].local_path) if not os.path.exists(head): os.makedirs(head) with open(df[0].local_path, 'w') as fp: fp.write('this is a test file') if self.mutex.locked(): self.mutex.release() filemanager.update_local_status() self.mutex.acquire() df = DataFile.select().where(DataFile.name == name)[0] self.assertEqual(df.local_status, 0) self.assertTrue(df.local_size > 0)
def test_filemanager_update_remote_no_sta(self): sta = False types = ['atm', 'ice', 'ocn', 'rest', 'streams.cice', 'streams.ocean'] database = 'test.db' simstart = 51 simend = 60 experiment = '20171011.beta2_FCT2-icedeep_branch.A_WCYCL1850S.ne30_oECv3_ICG.edison' filemanager = FileManager(mutex=self.mutex, sta=sta, types=types, database=database, remote_endpoint=self.remote_endpoint, remote_path=self.remote_path, local_endpoint=self.local_endpoint, local_path=self.local_path) filemanager.populate_file_list(simstart=simstart, simend=simend, experiment=experiment) client = get_client() filemanager.update_remote_status(client) self.mutex.acquire() for datafile in DataFile.select(): self.assertEqual(datafile.remote_status, 0) if self.mutex.locked(): self.mutex.release() self.assertTrue(filemanager.all_data_remote())
def test_filemanager_update_remote_yes_sta(self): sta = True types = ['atm', 'ice', 'ocn', 'streams.cice', 'streams.ocean'] database = 'test.db' simstart = 51 source_path = '/global/cscratch1/sd/golaz/ACME_simulations/20170915.beta2.A_WCYCL1850S.ne30_oECv3_ICG.edison' simend = 60 experiment = '20170915.beta2.A_WCYCL1850S.ne30_oECv3_ICG.edison' filemanager = FileManager(mutex=self.mutex, sta=sta, types=types, database=database, remote_endpoint=self.remote_endpoint, remote_path=source_path, local_endpoint=self.local_endpoint, local_path=self.local_path) filemanager.populate_file_list(simstart=simstart, simend=simend, experiment=experiment) client = get_client() filemanager.update_remote_status(client) self.mutex.acquire() for datafile in DataFile.select(): if datafile.remote_status != 0: print datafile.name, datafile.remote_path self.assertEqual(datafile.remote_status, 0) if self.mutex.locked(): self.mutex.release() self.assertTrue(filemanager.all_data_remote())
def add_row_to_table(self, name_of_row): if not self.row.get(name_of_row): return False row = self.row[name_of_row] del self.row[name_of_row] self.table.append(row) self.filemanager = FileManager()
def get_user_input(): print("Choose process: (delete, copy, move)") process = input() print("Enter regex for input files:") regex = input() print("Enter origin directory:") origin = FilePath.from_string(input()) file_manager = FileManager() origin_files = file_manager.get_folder_contents(origin) if process in ["copy", "move"]: print("Enter destination directory:") destination = FilePath.from_string(input()) else: destination = None file_rule = create_file_rule(regex) process = create_process(file_manager, process) rule = Rule(file_rule, process, destination) filtered = rule.apply_files_rule(origin_files) for p in rule.apply_process(filtered): input("Enter to continue...")
def save_results_for_KM(root_dir, res_dict, method_name, dat_name): """ The function is used to save generated results for K-means and its variants """ res_dir = os.path.join( root_dir, 'results', method_name, dat_name) # get the result directory where the result is stored f_manager = FileManager(res_dir) f_path = os.path.join(res_dir, 'cls_quality.csv') f_manager.add_file(f_path) print f_path # Then, we save the results to one csv file like # "seed_num" "time" "Purity" "ARI" "ACC" "NMI" ... # 1 1 000 000 000 000 000 ... # 2 2 000 000 000 000 000 ... field_names = ['seed', 'time', 'Purity', 'ARI', 'ACC', 'NMI', 'd_W'] # fill out the field names for CSV with open(f_path, mode='wb') as csv_file: # open the file, if not exist, create it writer = csv.DictWriter( csv_file, fieldnames=field_names ) # create a writer which maps the dictionaries onto output rows in CSV writer.writeheader() # write the field names to the header for key in res_dict.keys(): writer.writerow(res_dict[key])
def test_runmanager_write(self): filemanager = FileManager( database=os.path.join(self.project_path, 'test.db'), types=['atm'], sta=False, mutex=self.mutex, remote_endpoint=self.remote_endpoint, remote_path=self.remote_path, local_endpoint=self.local_endpoint) runmanager = RunManager( event_list=Event_list(), output_path=self.output_path, caseID=self.config['global']['experiment'], scripts_path=self.run_scripts_path, thread_list=[], event=threading.Event()) runmanager.setup_job_sets( set_frequency=[5, 10], sim_start_year=int(self.config['global']['simulation_start_year']), sim_end_year=int(self.config['global']['simulation_end_year']), config=self.config, filemanager=filemanager) path = os.path.join(self.project_path, 'output', 'job_state.txt') runmanager.write_job_sets(path) self.assertTrue(os.path.exists(path))
def setUp(self): manager = FileManager("products.csv", "urls.db") manager.cleanup_database() log = Log(verbose=False) self.crawler = Crawler("https://www.epocacosmeticos.com.br", manager, log)
def removeFavorite(self, server): """ Removes a server from the favorites list. """ Log.log.debug('[GuiController] remove Favorite called for server with ' \ + 'adress ' + server.getaddress()) fm = FileManager() fm.removeFavorite(server)
def __init__(self): options = DeckOptions() self.myConfig = Configuration(options.pathToConfig()) self.t = NoteTranslator() self.sm = MidiStringMaker(self.t) self.tm = MidiTrackMaker(self.t, self.sm) self.fm = FileManager(self.myConfig, self.tm)
def __init__(self, interaction_manager): self.interaction_manager = interaction_manager self.interaction_manager.plot_all_interactions() self.file_manager = FileManager() self.best_atom_coordinates = self.interaction_manager.get_initial_coordinates( ) self.best_response_value = 0 self.iterations = 0 self.date = self.get_now()
def __init__(self): self.__crawler = Crawler() self.__cleaner = Cleaner() self.__file_manager = FileManager() self.__search_engine = GoogleSearch(config.SEARCH_TOPIC, config.MAX_ITEM, config.NUMBER_OF_RESULTS_PER_PAGE, config.PAUSE_BTW_REQUEST) self.__csf_manager = CSFManager()
def __init__(self): self.file_manager = FileManager() system, self.atom_coordinates = self.read_coordinates() # self.interaction_manager = get_interaction_manager(get_twod_signal_manager().get_interaction_matrix(), system.type_array.tolist(), np.zeros(shape=system.type_array.shape)) self.interaction_manager = get_interaction_manager(*get_twod_signal_manager().get_interaction_data()) self.viewer = QtViewer() self.best_response_value = 1000000.0 self.best_atom_coordinates = np.copy(self.atom_coordinates) self.fragments = self.generate_fragments(system) self.atom_coordinates = self.fix_hydrogen_bond_lengths(system, self.atom_coordinates)
def addFavorite(self, server): """ Adds a favorite server. The favorites are persisted in a CSV file. """ Log.log.debug('[GuiController] addFavorite called for server with ' \ + 'adress ' + server.getaddress()) fm = FileManager() fm.addFavorite(server)
def _get_header(self): _not_valid = ['{\n', '}\n', '[\n'] _headers = [] with FileManager(self.file, 'r') as f: for line in f.readlines(): if not any(item in line for item in _not_valid): if '},' in line: return _headers _headers.append(line.split(':')[0].strip('\t"')) return _headers
def main(): """ Submitter 0.3 Tools for submitting solutions to stepik.org """ file_manager = FileManager() try: file_manager.create_dir(APP_FOLDER) except OSError: exit_util("Can't do anything. Not enough rights to edit folders.")
def _get_values(self): _not_valid = ['{\n', '},\n,', '}', '[', ']'] _header_len = len(self._get_header()) _values = [] with FileManager(self.file, 'r') as f: for line in f.readlines(): if not any(item in line for item in _not_valid) and not line == ',\n': _values.append([line.split(':')[-1].strip('\t\n, ')]) return _values