def __init__(self): super().__init__() #constructor of super class if not Exists('Songs'): MakeDirs('Songs') if not Exists('MappedSongs'): MakeDirs('MappedSongs') try: self.iconphoto(True, Photo(file = 'Res/Logo.png')) #setting icon of the app except: pass if IsAdmin(): #checking if the user has admin rights HEIGHT = int(self.winfo_screenheight() * 0.3) #getting the height of screen and dividing by 3 and setting it as height of the window WIDTH = int(HEIGHT * 1.5) #setting the height * 1.2 as width of the window Sizes = str(WIDTH) + 'x' + str(HEIGHT) #converting the sizes in a string X = int((self.winfo_screenwidth() - WIDTH) / 2) #setting the starting x as center of the screen Y = int((self.winfo_screenheight() - HEIGHT) / 2) #setting the starting y as center of the screen Pos = '+' + str(X) + '+' + str(Y) #formatting the position self.geometry(Sizes + Pos) # setting size and position of the window self.title('Genshin Impact Lyre Autoplay') # setting the title of the window GenshinLyrePlayer.Screens[Home.Name] = Home(self) #initializing the home screen GenshinLyrePlayer.Screens[CompilationScreen.Name] = CompilationScreen(self) #initializing the compilation screen GenshinLyrePlayer.Raise(Home.Name) for Screen in GenshinLyrePlayer.Screens: GenshinLyrePlayer.Screens[Screen].place(anchor= 'n', relx= 0.5, rely = 0, relheight = 1, relwidth = 1) else: #if the user has no admin rights self.AlternativeStart() #show a warning to restart with admin rights
def __init__(self, filename, backend=JSONBackend, read=True): self.filename = filename if Exists(filename) and read: data = open(filename).read() else: data = None NodeManager.__init__(self, data, backend)
def walkInDir(dir_path): files_export = [] if (Exists(dir_path) and Isdir(dir_path)): for root, dirs, files in Walk(dir_path, topdown=False): for name in files: #append the filepath to the array files_export.append(Join(root, name)) return files_export
def CopyDir(Dir_, ToDir_, ReWrite_=False): """ Функция папку Dir_ в папку ToDir_ со всеми внутренними поддиректориями и файлами. @param Dir_: Папка/директория, которая копируется. @param ToDir_: Папка/директория, в которую копируется Dir_. @param ReWrite_: Указание перезаписи директории, если она уже существует. @return: Функция возвращает результат выполнения операции True/False. """ try: to_dir = ToDir_ + '/' + BaseName(Dir_) if Exists(to_dir) and ReWrite_: shutil.rmtree(to_dir, 1) shutil.copytree(Dir_, to_dir) return True except: log.fatal(u'Ошибка копирования папки <%s> в папку <%s>' % (Dir_, ToDir_)) return False
def CloneDir(Dir_, NewDir_, ReWrite_=False): """ Функция переносит все содержимое папки Dir_ в папку с новым именем NewDir_. @param Dir_: Папка/директория, которая копируется. @param NewDir_: Новое имя папки/директории. @param ReWrite_: Указание перезаписи директории, если она уже существует. @return: Функция возвращает результат выполнения операции True/False. """ try: if Exists(NewDir_) and ReWrite_: shutil.rmtree(NewDir_, 1) MakeDirs(NewDir_) for sub_dir in GetSubDirs(Dir_): shutil.copytree(sub_dir, NewDir_) for file_name in GetFiles(Dir_): icCopyFile(file_name, NewDir_ + '/' + BaseName(file_name)) return True except: log.fatal(u'Ошибка переноса содержимого папки <%s> в папку <%s>' % (Dir_, NewDir_)) return False
def main(): module = AnsibleModule(argument_spec=dict( #arguments here save_name=dict(required=True, type='str'), action=dict(required=True, type='str'), path_to_save=dict(required=False, type='str'), block_size=dict(required=False, type='int'), restore_date=dict(required=False, type='str'), mysql_host=dict(required=True, type='str'), mysql_user=dict(required=True, type='str'), mysql_passwd=dict(required=True, type='str'), mysql_db=dict(required=True, type='str'), ftp_host=dict(required=True, type='str'), ftp_user=dict(required=True, type='str'), ftp_passwd=dict(required=True, type='str'))) #get params save_name = module.params.get("save_name") action = module.params.get("action") mysql_host = module.params.get("mysql_host") mysql_user = module.params.get("mysql_user") mysql_passwd = module.params.get("mysql_passwd") mysql_db = module.params.get("mysql_db") ftp_host = module.params.get("ftp_host") ftp_user = module.params.get("ftp_user") ftp_passwd = module.params.get("ftp_passwd") #variable for module_exit output = "" changed = False #instantiate db db = DB( mysql_host, mysql_user, mysql_passwd, mysql_db, #database connection ftp_host, ftp_user, ftp_passwd) #ftp connection if (action == "save"): if (not module.params["path_to_save"]): module.exit_json(changed=False, ansible_module_results="path_to_save is missing.", failed=True) path_to_save = module.params.get("path_to_save") #get param (or use default value) if (not module.params["block_size"]): blockSize = 4096 #default for ext4 else: blockSize = module.params.get("block_size") #check if the path exists and if it's a file or a directory if (Exists(path_to_save)): if (Isfile(path_to_save)): #path is a single file files = [path_to_save] else: #path is a directory #get files of dir files = walkInDir(path_to_save) lastSaveId = db.get_last_saveid_by_savename( save_name)[0]["max(id)"] #mockup #lastSaveId = 1 if (db.create_save(save_name, str(Datetime.now()))): db_files = db.get_files_of_save(lastSaveId) for file in files: #compute actual md5 of the file hashfile = md5() with open(file, 'rb') as fopen: sliced_content = fopen.read(blockSize) while sliced_content: hashfile.update(sliced_content) sliced_content = fopen.read(blockSize) #get both name and directory of the file file_dir, file_name = SplitFile(file) compute_blocks_flag = True db_file_id = -1 #check hash of files for db_file in db_files: if (db_file["NAME"] == file_name): #check hashes if (db_file["HASH"] == hashfile.hexdigest()): #build array of all locations file file_locations = [] for loc in db.get_locations_by_fileid( db_file["ID"]): file_locations.append(loc["location"]) #check if location exists if (file_dir in file_locations): #no changes for this file #just insert references for the current save and continue db.create_file_references( db_file["ID"], db_file["location"]) compute_blocks_flag = False else: #file already exists but has been moved or copied #update location db.create_file_references( db_file["ID"], file_dir) compute_blocks_flag = False #else, file has been modified, we have to create file and compute blocks db_file_id = db_file["ID"] break #no changes for the current file, continue to the next file_to_save if not compute_blocks_flag: continue else: fileid = db.create_file(file_name, GetSizeOfThis(file), hashfile.hexdigest(), file_dir) #get stored hash_blocks db_hashes = {} for db_hash in db.get_hashblocks_of_file(db_file_id): db_hashes[db_hash["BLOCKNUMBER"]] = db_hash["HASH"] #compute hash of each block with open(file, 'rb') as fopen: #read file and slice it in blockSize block = fopen.read(blockSize) block_number = 0 while block: hash_block = md5(block) if (not len(db_hashes) or (len(db_hashes) < block_number + 1 or hash_block.hexdigest() != db_hashes[block_number])): #hash are differents, we have to reupload the block db.create_block(block_number, block, hash_block.hexdigest(), fileid) else: db.create_block_references( hash_block.hexdigest(), fileid) block_number += 1 block = fopen.read(blockSize) output = 'saved 100 per 100 ok' changed = True else: output = "Can't create save object in database" else: output = "The given path doesn't exist on the host." elif (action == "restore"): #initialize variable restore_date = None if (module.params["restore_date"]): restore_date = module.params.get("restore_date") #if no specific date is set, get last save id with the save_name if (restore_date == None): lastSaveId = db.get_last_saveid_by_savename( save_name)[0]["max(id)"] else: lastSaveId = db.get_saveid_by_savedate(restore_date)[0]["id"] for restore_file in db.get_files_of_save(lastSaveId): #if folder doesn't exists, create it if (not (Exists(restore_file["location"]) and Isdir(restore_file["location"]))): makedirs(restore_file["location"]) #erase / create file restored_file = open( Join(restore_file["location"] + '/', restore_file["NAME"]), 'wb') restored_file.close() #store blocks in file with open( Join(restore_file["location"] + '/', restore_file["NAME"]), 'ab+') as restored_file: for db_hash in db.get_hashblocks_of_file(restore_file["ID"]): block = db.get_block(db_hash["HASH"]) restored_file.write(block) output = "restoration 100 per 100 ok" changed = True else: output = "Unknow action \"" + action + "\". Available : 'save' or 'restore'" #export something to ansible output module.exit_json(changed=changed, ansible_module_results=output)