def main(): flt = FileUtils() data = flt.load_csv('parsedCsvAll.csv') prob_fn = 'probabilities.pcl' # save_probs(data[1:], prob_fn) condProbs, nameProbs = load_probs(prob_fn) # print(condProbs, nameProbs) gendProbs = {'F':0.52, 'M':0.48} newCsv = open("parsedCsv_gender.csv", 'w', encoding='utf-8', newline='\n') csvWriter = csv.writer(newCsv, delimiter=',') csvWriter.writerow(data[0]) # i = 1 for row in data[1:]: # print(i) if len(row) < 10: continue if row[9] == 'M': row[9] = is_female([row[0], row[9]], condProbs, nameProbs, gendProbs) csvWriter.writerow(row)
def process_stamping_files(): """ Client per il sistema di rilevazione delle presenze presenti su file. I file con le timbrature possono essere in locale, in ftp o in sftp. Si occupa di elaborare i file presenti in "dataDir" e di spedire le timbrature via HTTP/Restful al sistema di rilevazione delle presenze. """ logging.info("@@ Invio timbrature @@") if STAMPINGS_ON_LOCAL_FOLDER or STAMPINGS_SERVER_PROTOCOL == "local": manager = LocalFolderManager() manager.check_new_stamping_files() elif STAMPINGS_SERVER_PROTOCOL == "sftp": manager = SFTPDownloader() manager.check_new_stamping_files() manager.close() elif STAMPINGS_SERVER_PROTOCOL == "ftp": ftpDownloader = FTPDownloader() ftpDownloader.check_new_stamping_files() elif STAMPINGS_SERVER_PROTOCOL == "smartclock": last_stamping, last_stampingdate = SmartClockManager.downloadstampings( ) if last_stamping is not None: FileUtils.save_last_request(last_stamping, last_stampingdate) SmartClockManager.process_stamping_files()
def _retrieve_and_process_file(self, file_name, from_line=None): """ Scarica il file via SFTP e lo importa in Epas. Restituisce il numero dell'ultima riga processata del file. """ logging.info("Process il file %s", file_name) self._retrieve_file(file_name) file_path = "%s/%s" % (STAMPINGS_DIR, file_name) lines, last_line_processed = self._raw_stampings(file_path, from_line) stamping_importer = StampingImporter() bad_stampings, parsing_errors = stamping_importer.sendStampingsOnEpas( lines) if len(bad_stampings) > 0: # Rimuove eventuali duplicati bad_stampings = set(bad_stampings) FileUtils.storestamping(self.bad_stampings_path, bad_stampings) if len(parsing_errors) > 0: FileUtils.storestamping(self.parsing_errors_path, parsing_errors) file_size = os.path.getsize(file_path) self.fileInfoManager.save(file_name, file_size, last_line_processed)
def send_bad_stampings(): logging.info("@@ Invio timbrature con problemi @@") if os.path.exists(bad_stampings_path): bad_stampings = [] parsing_errors = [] now = datetime.now() still_good_stampings = [] oldest_day_allowed = datetime.combine( now - timedelta(MAX_BAD_STAMPING_DAYS), now.time().min) logging.info( "Bad Stampings: verranno mantenute solo le timbrature più nuove di %s", oldest_day_allowed) with open(bad_stampings_path, 'r') as f: lines = f.read().splitlines() lines = set(lines) # butto via le timbrature più vecchie di x giorni for line in lines: stamp = StampingImporter._parseLine(line) stamping_date = datetime(stamp.anno, stamp.mese, stamp.giorno, stamp.ora, stamp.minuti) if stamping_date >= oldest_day_allowed: still_good_stampings.append(line) removed_lines = len(lines) - len(still_good_stampings) if removed_lines > 0: logging.info( 'Rimosse %d timbrature dal file %s perché più vecchie del %s', removed_lines, BAD_STAMPINGS_FILE, oldest_day_allowed) os.remove(bad_stampings_path) logging.info("Rimosso il file %s", BAD_STAMPINGS_FILE) if still_good_stampings: bad, errors = StampingImporter.sendStampingsOnEpas( still_good_stampings) bad_stampings += bad parsing_errors += errors if len(bad_stampings) > 0: # Rimuove eventuali duplicati bad_stampings = set(bad_stampings) FileUtils.storestamping(bad_stampings_path, bad_stampings) if len(parsing_errors) > 0: FileUtils.storestamping(parsing_errors_path, parsing_errors) else: logging.info("File %s non presente.", BAD_STAMPINGS_FILE)
def __init__(self): self.driver = SeleniumUtils.getChromedriver(__file__) self.CHART_DISP_LIMIT = 5 # get main login user config = FileUtils.open_file(__file__, "/config.json") self.user = config["sbis_login_info"][0]
def __init__(self): self.driver = SeleniumUtils.getChromedriver(__file__) self.applyCount = 0 self.IPO_REQ_BUTTON_SELECTOR = "a[name] + table .fl01 a img[alt='申込']" # all member login info config = FileUtils.open_file(__file__, "/config.json") self.login_info_list = config["sbis_login_info"]
def process_stamping_files(): stamping_files = os.listdir(STAMPINGS_DIR) bad_stampings_path = os.path.join(DATA_DIR, BAD_STAMPINGS_FILE) parsing_errors_path = os.path.join(DATA_DIR, PARSING_ERROR_FILE) bad_stampings = [] parsing_errors = [] for stamping_file in stamping_files: logging.info("Processo il file %s per estrarne le timbrature", stamping_file) stamping_file_path = os.path.join(STAMPINGS_DIR, stamping_file) with open(stamping_file_path, 'r') as f: # Questo metodo di lettura delle righe toglie anche gli \n di fine riga lines = f.read().splitlines() # Rimuove eventuali duplicati lines = set(lines) # Se c'è almeno una timbratura.. if lines: bad, errors = StampingImporter.sendStampingsOnEpas(lines) bad_stampings += bad parsing_errors += errors os.remove(stamping_file_path) logging.info("Rimosso il file %s", stamping_file_path) if len(bad_stampings) > 0: # Rimuove eventuali duplicati bad_stampings = set(bad_stampings) FileUtils.storestamping(bad_stampings_path, bad_stampings) if len(parsing_errors) > 0: FileUtils.storestamping(parsing_errors_path, parsing_errors)
def __init__(self): self.driver = SeleniumUtils.getChromedriver(__file__) self.verificationErrors = [] # target month setting if len(sys.argv) > 2: warn_mes = "Sample:\n python3 sbi_monthly.py [write his memo month]" print(warn_mes) sys.exit() param_han_month = None if len(sys.argv) == 2: param_han_month = sys.argv[1] han_month = str(datetime.datetime.today().month + 1) self.month = mojimoji.han_to_zen( han_month if param_han_month is None else param_han_month) # all member login info config = FileUtils.open_file(__file__, "/config.json") # "sbib_login_info":{"uid": "user_id", "upa": "user_pass", "uspa": "user_tra_pass"}, sbib = config["sbib"] self.login_info = sbib["sbib_login_info"] self.move_money_info = sbib["move_money_info"]
import os from RSAtools import RSAtools from fileUtils import FileUtils # This file generate : # RSA public and private key for the bank, the commercant and the client # Crypt of the public key of client by the bank SK, To garantee that the client is in this bank if __name__ == '__main__' : rsa = RSAtools() fileUtil = FileUtils() # generation des clés RSA rsaClient = rsa.generateRSAkey(1024) rsaCommercant = rsa.generateRSAkey(1024) rsaBanque = rsa.generateRSAkey(1024) """n,e,d""" # ecriture des clés dans leur fichiers respectifs clientpk = open('clientPk','w') clientpk.write(str(rsaClient [0])+' '+str(rsaClient [1])) clientpk.close() clientsk = open('clientSk','w') clientsk.write(str(rsaClient [0])+' '+str(rsaClient [2])) clientsk.close() commercantpk = open('commercantPk','w') commercantpk.write(str(rsaCommercant [0])+' '+str(rsaCommercant [1])) commercantpk.close() commercantsk = open('commercantSk','w') commercantsk.write(str(rsaCommercant [0])+' '+str(rsaCommercant [2])) commercantsk.close() banquepk = open('banquePk','w') banquepk.write(str(rsaBanque [0])+' '+str(rsaBanque [1])) banquepk.close()
if y[i][rank - 1] == 1: pos_num += 1 else: neg_num += 1 print('ベースラインの正解率') print(pos_num / data_num) ######################################## # 0.訓練データ・テストデータの読込 ######################################## # ファイル読み込み file_name = 'C:\\Users\\okumura.tomoki\\PycharmProjects\\Mahjong\\resource\\data\\data.txt' lines = fu.read_file(file_name) data_x = [] data_y = [] for i, line in enumerate(lines): # 末尾の改行を除去してカンマで分割 data_str = line.strip('\r\n').split(',') # 数値型に変換 data = [int(i) for i in data_str] # 特徴とラベルに分割 data_x.append(data[1:5]) data_y.append(data[5:]) # numpy配列に変換
from fileUtils import FileUtils as fu from haifuReader import HaifuReader from haifuAnalyzer import HaifuAnalyzer # 牌譜を読み込み haifu_file_name = 'C:\\Users\\okumura.tomoki\\PycharmProjects\\Mahjong\\resource\\haifu\\totsugekitohoku.txt' hr = HaifuReader() shiai_list = hr.read(haifu_file_name) # 特徴を抽出 ha = HaifuAnalyzer(shiai_list) result = ha.get_diff_score_and_result_rank_per_kyokukaishi() # 結果をファイルに書き込み data_file_name = 'C:\\Users\\okumura.tomoki\\PycharmProjects\\Mahjong\\resource\\data\\data.txt' fu.write_2d_list(data_file_name, result)
def outputToFile(self, outputPath): fileUtils = FileUtils() fileUtils.writeToFile(outputPath, self.jsonDict)
def __read_init_conf(): iniConf = {} iniConf["config"] = FileUtils.open_file(__file__, "/config.json") return iniConf
from facture import Facture from fileUtils import FileUtils from RSAtools import RSAtools if( len(sys.argv) < 4 ): print("You need to put all of the arguments (invoice file, signed check and client's Pk)") sys.exit() # file opening try: invoice_file = open(sys.argv[1], 'r') signedcheck_file = open(sys.argv[2], 'r') except (OSError, IOError) as error: print("Error reading file : ", error) sys.exit() fileutils = FileUtils() rsatools = RSAtools() #Getting the infos from the files clepub_client = fileutils.recupKey(sys.argv[3]) facture = Facture(sys.argv[1]) clepub_merchant_original = fileutils.recupKey("commercantPk") bankPk = fileutils.recupKey("banquePk") merchantkeyclientciphered = [] clientkeybanqueciphered = [] merchantkeyclientciphered.append( fileutils.readKey(sys.argv[2],0)) merchantkeyclientciphered.append( fileutils.readKey(sys.argv[2],1))
def create_json(self): FileUtils.read_file(self.select_path.get()) self.show("success")
elif STAMPINGS_SERVER_PROTOCOL == "smartclock": last_stamping, last_stampingdate = SmartClockManager.downloadstampings( ) if last_stamping is not None: FileUtils.save_last_request(last_stamping, last_stampingdate) SmartClockManager.process_stamping_files() if __name__ == "__main__": from config import LOGGING import timeit import sys import logging.config start = timeit.default_timer() FileUtils.checkdirs() logging.config.dictConfig(LOGGING) logging.getLogger("requests").setLevel(logging.WARNING) lock.lock() LOG_START = '##################################### AVVIO CLIENT TIMBRATURE ######################################' logging.info(LOG_START) if BAD_STAMPINGS_COMMAND in sys.argv: EpasClient.send_bad_stampings() else: process_stamping_files()
def __init__(self): self.driver = SeleniumUtils.getChromedriver(__file__) config = FileUtils.open_file(__file__, "/config.json") self.user = config["gmo"] self.gc = GSpSheetUtils.getGoogleCred(__file__, 'mypro_sec.json')
#### How does a bank check if a check has been cashed #### #### or not ? #### #### - Save a file with the 40 first number of each #### #### merchant's key it encounters #### #### -> inside the file, puts on each line, the unique #### #### number the merchant has produced and the customer's #### #### key, separated by a space #### #### If it's fine, the bank with cash the check and add #### #### the line to the right file #### ################################################################### from fileUtils import FileUtils from RSAtools import RSAtools import os.path import sys fileutils = FileUtils() rsatools = RSAtools() #Making sure the arguments and the files are there clientkey_file = open(sys.argv[2], 'r') merchantkey_file = open(sys.argv[3], 'r') #Getting client's key and merchant's original key clientkey_original = fileutils.recupKey(sys.argv[2]) merchant_key_original = fileutils.recupKey(sys.argv[3]) #Getting client's key ciphered by the bank and bank's private key clientkeybanqueciphered = [] clientkeybanqueciphered.append(fileutils.readKey(sys.argv[1],2)) clientkeybanqueciphered.append(fileutils.readKey(sys.argv[1],3)) bank_publickey = fileutils.recupKey('banquePk')
def parse_fetch_details_write_csv(): fts = FileUtils() # fts.concat_files("backup/voterDetails*.txt", 'concatVoterDetails.txt') # details = fts.load_json('concatVoterDetails.txt') # fts.save_pickle(details, 'pickleDetails.pcl') records = load_records('pickle', 'voterRecords.pcl') details = fts.load_json('concatVoterDetails.txt') csvFile = open("parsedCsvAll.csv", 'w', encoding='utf-8', newline='\n') csvWriter = csv.writer(csvFile, delimiter=',') csvWriter.writerow(['First Name', 'Middle Name', 'Relation Name', 'House Name', 'Serial No', 'LAC No', 'PS No', 'Status', 'Age', 'Gender', 'ELID No']) csvRow = [] # # prs = MyParser() # prs.init_web() rem_details = [] i = 0 for rec in records: # Name, RelationName, HouseName, SerialNo, LACNo, PSNo # print(rec) if ' ' in rec[0]: splt = rec[0].split(' ') firstN= splt[0] midN = ' '.join(splt[1:]) else: firstN = rec[0] midN = '' csvRow = [] csvRow.extend([firstN,midN]) csvRow.extend(rec[1:6]) # Status csvRow.append(rec[7]) elid = re.search('>([a-zA-Z0-9\/]+)<', rec[6]) if elid: elid = elid.group(1) if elid in details: parsedDetails = details[elid] else: # print("Elector ID %s not found in fetched details" % elid) # print("Fetching from site...") # parsedDetails = prs.request_and_parse(rec) #Make sure that we got something back, otherwise skip this record # if parsedDetails == None: # continue # #Append a copy of the newly fetched details to a dict for saving to file later rem_details.append(rec) continue # print(parsedDetails) #Now parse the info for writing to csv #Age csvRow.append(parsedDetails[2][1]) #Gender if re.search("Husband", parsedDetails[3][0]): csvRow.append('F') else: csvRow.append('M') csvRow.append(elid) # Write row to CSV csvWriter.writerow(csvRow)
def file_modify(self): msg = FileUtils.file_modify(self.select_path.get()) self.show(msg)
def __init__(self): self.driver = SeleniumUtils.getChromedriver(__file__, True) capTgtList = FileUtils.readCsv("cap_tgt_list.csv") self.capTgtList = capTgtList
#!/usr/bin/env python from parser import JsonParser from fileUtils import FileUtils from exceptionHandler import jsonParserException if __name__ == '__main__': mParser = JsonParser() mFileUtils = FileUtils() try: jsonStr = mFileUtils.readFromFile('JsonFiles/json2.txt') mParser.parseJson(jsonStr) mParser.printJsonDict() mParser.outputToFile('OutputFiles/output2.txt') except jsonParserException as e: print e except Exception as e: print e
def downloadstampings(): """ :return: la timbratura più recente scaricata e la data relativa, se presenti """ last_stamping, from_date = FileUtils.load_last_request() now = datetime.now() # Elimino nel caso esistano i file residui dall'esecuzione precedente command_file = os.path.join(BASE_DIR, COMMAND_FILE) if os.path.exists(command_file): os.remove(command_file) log_file = os.path.join(BASE_DIR, LOG_FILE) if os.path.exists(log_file): os.remove(log_file) stamping_file = os.path.join(BASE_DIR, STAMPINGS_FILE) if os.path.exists(stamping_file): os.remove(stamping_file) if from_date is not None: formatted_date = from_date.__format__(F4_COMMAND_DATA_FORMAT) else: # Richiedo gli ultimi n giorni di timbrature al lettore first_request_date = datetime.combine( now - timedelta(DAYS_TO_DOWNLOAD), now.time().min) formatted_date = first_request_date.__format__( F4_COMMAND_DATA_FORMAT) # Creazione file LTCOM.COM con il comando F4 filter_command = "%s%s" % (STAMPING_FILTER_COMMAND, formatted_date) with open(command_file, 'w') as new_command_file: new_command_file.writelines("%s\r\n" % filter_command) logging.info( 'Tentativo di connessione al lettore %s tramite protocollo FTP', BADGE_READER_IP) ftp = None try: ftp = FTP() logging.debug( f"Tenativo di connessione al lettore {BADGE_READER_IP}:{BADGE_READER_PORT}" + f"(connection timeout = {FTP_CONNECTION_TIMEOUT}) in corso...") ftp.connect(BADGE_READER_IP, int(BADGE_READER_PORT), FTP_CONNECTION_TIMEOUT) ftp.login(BADGE_READER_USER, BADGE_READER_PSW) logging.info("Connessione al lettore effettuata: %s" % ftp.welcome) clear_log = ftp.delete(LOG_FILE) logging.info('Ripuliti messaggi di log dal file %s: %s', LOG_FILE, clear_log) # INVIO COMANDO F4 AL LETTORE tramite il file LTCOM.COM send_com = ftp.storbinary('STOR %s' % COMMAND_FILE, open(command_file, 'rb')) logging.info("Inviato comando %s al lettore: %s", filter_command, send_com) # Attendo qualche secondo per essere sicuro che il lettore abbia elaborato il comando time.sleep(WAIT_SECONDS) log_size = ftp.size(LOG_FILE) # Se la dimensione differisce presumo che il lettore abbia loggato qualcosa relativamente al comando passato # e verifico che non ci siano errori if log_size == 0: logging.warning( 'Nessun messaggio di risposta trovato nel file %s. Nessuna timbratura scaricata', LOG_FILE) return None, None # Download del file di log LTCOM.LOG ftp.retrbinary('RETR ' + LOG_FILE, open(log_file, 'wb+').write) # Leggo la prima riga contenente la risposta al comando inviato if os.path.exists(log_file): if CHECK_SUCCESS_MSG: with open(log_file, 'r') as f: first_line = f.readline().strip() response = re.search("Rx\d{4}", first_line).group() # Verifico che il messaggio del lettore sia quello di successo if response != SUCCESS_MSG: logging.error( "Risposta del lettore inattesa, nessuna timbratura scaricata: %s", response) return None, None logging.info("Risposta del lettore al comando %s: %s", STAMPING_FILTER_COMMAND, response) else: logging.debug( "Ignorata verifica del SUCCESS_MSG del lettore, parametro CHECK_SUCCESS_MSG != True" ) # Eseguo il download del file LTCOM.TRN contenente le timbrature get_stamp = ftp.retrbinary('RETR ' + STAMPINGS_FILE, open(stamping_file, 'wb+').write) logging.info("File %s correttamente scaricato dal lettore: %s", STAMPINGS_FILE, get_stamp) with open(stamping_file, 'r') as f: stampings_received = f.read().splitlines( ) # Rimuovo l'ultima timbratura già ricevuta if last_stamping in stampings_received: stampings_received.remove(last_stamping) if len(stampings_received) > 0: filename = now.__format__(STAMPINGS_FILE_FORMAT) new_stampings_file = os.path.join(STAMPINGS_DIR, filename) with open(new_stampings_file, 'w') as new_file: for stamping in stampings_received: new_file.write("%s\n" % (stamping, )) logging.info('Salvate %s timbrature nel file %s', len(stampings_received), filename) else: logging.info('Nessuna nuova timbratura ricevuta.') return None, None return Archive.archive_and_check_stampings(stampings_received) except Exception as e: logging.error("Errore durante il download delle timbrature: %s", e) finally: # chiudo la connessione ftp if ftp is not None and ftp.sock is not None: ftp.quit() return None, None