def handler(self, request, db): if not request.json or \ not 'user_id' in request.json or \ not 'parent_id' in request.json or \ not 'dir_name' in request.json: raise Exception('request json error:' + request.json) user_id = request.json['user_id'] parent_id = request.json['parent_id'] dir_name = request.json['dir_name'] dir_id = self.get_uuid() last_time = int(time.time() * 1000) ret = True if parent_id == '0': #创建在顶级目录下 if not Db_executer.create_dir(db, user_id, '0', dir_id, dir_name, last_time): print 'Create_dir_handler:Db_executer failed' ret = False else: #检查父目录是否存在 if not Db_executer.dir_id_exists(db, parent_id): print 'Create_dir_handler:parent_id:%s not exists' % ( parent_id) ret = False else: #创建目录 if not Db_executer.create_dir(db, user_id, parent_id, dir_id, dir_name, last_time): print 'Create_dir_handler:Db_executer failed' ret = False ret_json = {} if ret: ret_content = {} ret_content['dir_id'] = dir_id ret_content['last_time'] = last_time ret_json['result'] = 'success' ret_json['content'] = ret_content else: ret_json['result'] = 'failed' return str(ret_json)
def insert_to_db(self, user_id, parent_id, file_id, file_name, format): last_time = int(time.time() * 1000) ret = True if parent_id == '0': # 创建在顶级目录下 if not Db_executer.create_file(db, user_id, '0', file_id, file_name, format, last_time): print 'Create_file_handler:Db_executer failed' ret = False else: # 检查父目录是否存在 if not Db_executer.dir_id_exists(db, parent_id): print 'Create_file_handler:parent_id:%s not exists' % ( parent_id) ret = False else: # 创建目录 if not Db_executer.create_dir(db, user_id, parent_id, dir_id, dir_name, last_time): print 'Create_file_handler:Db_executer failed' ret = False return ret
import os import time from db_executer import Db_executer from my_logger import logger path_to_watch = r'.\files' before = dict([(f, None) for f in os.listdir(path_to_watch)]) db_name =r'E:\Moje\Python szkolenie\Projekt-zaliczenie\coronavirus_italy.db' db = Db_executer(db_name) db.doses_analysis() while 1: after = dict([(f, None) for f in os.listdir(path_to_watch)]) added = [] for name in after: if not name in before: ext = os.path.splitext(name)[-1].lower() if ext == ".csv": added.append(name) removed = [] for name in before: if not name in after: removed.append(name) if added: for added_file in added: # dla każdego pliku z listy dodanych wykonaj: if 'coronavirus' in added_file: print("".join([str(name) for name in added_file]))
from operator import __add__ from db_executer import Db_executer db_name = r"C:\Users\cp24\Desktop\Akademia Kodu\Project2\clinic.db" db = Db_executer(db_name) print(db.select_all_tasks("patients")) db.close_conn() # to w try except print(__name__) print(__add__(1, 2))
import os import time import db_handler from db_executer import Db_executer from CustomLogger import logger path_to_watch = '.' before = dict([(f, None) for f in os.listdir(path_to_watch)]) while 1: db_name = r'E:\Moje\Python szkolenie\Python zaawansowany\Project\clinic.db' db = Db_executer(db_name) after = dict([(f, None) for f in os.listdir(path_to_watch)]) added = [] for name in after: #sprawdzamy czy pojawił sie nowy plik if not name in before: #porównanie added.append(name) #jeśli nie ma tej nazwy w before to dodaj removed = [] for name in before: if not name in after: removed.append(name) if added: #jeśli lista nie jest pusta to wchodze do tego bloku logger.info(f"Added : {added}") for added_file in added: #dla każdego pliku z listy dodanych wykonaj: if 'patients' in added_file: #jeśli w nazwie jest słowo patients to : with open(added_file, 'r') as file: #otwórz ten plik rows = file.readlines() #odczytaj wiersz po wierszu
from db_executer import Db_executer import csv from datetime import datetime from send_sms import send_sms db_name = r"C:\Users\cp24\Desktop\Akademia Kodu\Project\clinic.db" db = Db_executer(db_name) #wstawianie zadziała tylko raz, ponieważ gdy na bazie bedą już dane id wywoła się bład Uniqe error csv_file = r'C:\Users\cp24\Desktop\Akademia Kodu\Project\Antarctica.csv' all_db_patients = db.select_all_tasks("patients") print(all_db_patients) def antarctica_analysis(csv_file): with open(csv_file, encoding="utf-8-sig") as an_file: all_rows = csv.reader(an_file, delimiter=';') for one_row in all_rows: #print(one_row) for one_patient in all_db_patients: #print(type(one_row[2]), one_row[2], type(one_patient[3]), one_patient[3]) if one_row[0] != 'id': if one_row[2] == str(one_patient[3]): #porównuje pesele aby dostać id one_row[4] = str(datetime.strptime(one_row[4], "%d.%m.%Y %H:%M")) if one_row[5] == 'T': one_row[5] = 'True' else: one_row[5] = 'False' db.insert_analysis( probe_number=one_row[1], analysis_id=one_row[3],
import os import time from db_executer import Db_executer from CustomLogger import logger import datetime logger.info(f"Start {datetime.datetime.now()}") path_to_watch = '.' before = dict([(f, None) for f in os.listdir(path_to_watch)]) while 1: db_name = r'C:\Users\Kasia\PycharmProjects\W1_cwiczenia\Project\clinic.db' db = Db_executer(db_name) after = dict([(f, None) for f in os.listdir(path_to_watch)]) added = [] for name in after: #sprawdzamy czy pojawil sie nowy plik if not name in before: # porownanie added.append( name) #jesli nie a tej nazwy to znaczy ze zostal dodany removed = [] for name in before: if not name in after: removed.append(name) if added: # jesli lista nie jest pusta to wchodze do tego bloku logger.info(f'Added:{added}') for added_file in added: # dla kazdego pliku