def AUTO_READ_ZIP_TRAKT(url): dialog = xbmcgui.Dialog() _out = xbmc.translatePath(os.path.join('special://','home/tmp_trakt')) _in = url dp.create(AddonTitle,"Restoring File:",_in,'Please Wait...') unzip(_in, _out, dp) name = "[COLOR ghostwhite][B]RESTORE[/B][/COLOR]" link=Common.OPEN_URL(TRAKTURL) plugins=re.compile('<plugin>(.+?)</plugin>').findall(link) for match in plugins: ADDONPATH = xbmc.translatePath(os.path.join(ADDON_DATA,match)) ADDONSETTINGS = xbmc.translatePath(os.path.join(ADDONPATH,'settings.xml')) EXCLUDEMOVE = xbmc.translatePath(os.path.join(_out,match+'_settings.xml')) if os.path.exists(EXCLUDEMOVE): if not os.path.exists(ADDONPATH): os.makedirs(ADDONPATH) if os.path.isfile(ADDONSETTINGS): os.remove(ADDONSETTINGS) os.rename(EXCLUDEMOVE, ADDONSETTINGS) try: os.remove(EXCLUDEMOVE) except: pass try: shutil.rmtree(_out) shutil.rmdir(_out) except: pass dialog = xbmcgui.Dialog() dialog.ok(AddonTitle,'Your Real Debrid & Trakt settings have been restored!','','')
def Restore_RD_TRAKT(): link=Common.OPEN_URL(BASEURL) plugins=re.compile('<plugin>(.+?)</plugin>').findall(link) for match in plugins: ADDONPATH = xbmc.translatePath(os.path.join(ADDON_DATA,match)) ADDONSETTINGS = xbmc.translatePath(os.path.join(ADDONPATH,'settings.xml')) EXCLUDEMOVE = xbmc.translatePath(os.path.join(EXCLUDES_FOLDER,match+'_settings.xml')) if os.path.exists(EXCLUDEMOVE): if not os.path.exists(ADDONPATH): os.makedirs(ADDONPATH) if os.path.isfile(ADDONSETTINGS): os.remove(ADDONSETTINGS) os.rename(EXCLUDEMOVE, ADDONSETTINGS) try: os.remove(EXCLUDEMOVE) except: pass try: shutil.rmtree(EXCLUDEMOVE) shutil.rmdir(EXCLUDEMOVE) except: pass Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS()
def write_sounds_to_files(sounds, output_path, definition_filename): """ Write the audio segments to file and export the definition JSON file """ if os.path.isdir(output_path): input( ">>> The folder '%s' already exist. \n>>> Press enter to overwrite it. Otherwise CTRL+C" % output_path) rmdir(output_path) os.mkdir(output_path) for sound in sounds: new_filename = '%s_%s_%s.wav' % (sound['instrument'], sound['octave'], sound['note']) sound['audio_segment'].export(os.path.join(output_path, new_filename), format='wav') sound['filename'] = new_filename del sound['audio_segment'] with open(os.path.join(output_path, definition_filename), 'w') as f: json.dump(sounds, f, indent=2) print("Elementary sounds successfully written in '%s'" % output_path)
def __init__(self): """ """ self.mp_queue = Queue() output_star = listdir(PATH_OUTPUT + "star/") if LIMIT: output_star = output_star[:LIMIT] for fil in output_star: if not isfile(PATH_OUTPUT + "star/" + fil + \ "/Aligned.sortedByCoord.out.bam"): print('no star bam file for {0} skipping'.format(fil)) if isdir(PATH_OUTPUT + "star/" + fil) and CLEANING_MODE: rmdir(PATH_OUTPUT + "star/" + fil) continue if isfile("{0}/data/{1}/snv_filtered.vcf"\ .format(OUTPUT_PATH_SNV, fil)): print('VCF file output already exists for {0} skipping...'\ .format(fil)) continue print("file to be processed:", fil) self.mp_queue.put(fil) print("\n #### now launching multiprocessing analysis #### \n") self.processes = [ TrSNVMultiprocessing(self.mp_queue, id=i) for i in range(NB_PROCESS) ]
def AUTO_READ_ZIP_TRAKT(url): dialog = xbmcgui.Dialog() _out = xbmc.translatePath(os.path.join('special://','home/tmp_trakt')) _in = url dp.create(AddonTitle,"Restoring File:",_in,'') unzip(_in, _out, dp) name = "[COLOR ghostwhite][B]RESTORE[/B][/COLOR]" link=Common.OPEN_URL('http://echocoder.com/other/rd_trakt.xml') plugins=re.compile('<plugin>(.+?)</plugin>').findall(link) for match in plugins: try: ADDONPATH = xbmc.translatePath(os.path.join(ADDON_DATA,match)) ADDONSETTINGS = xbmc.translatePath(os.path.join(ADDONPATH,'settings.xml')) EXCLUDEMOVE = xbmc.translatePath(os.path.join(_out,match+'_settings.xml')) if os.path.isfile(EXCLUDEMOVE): if not os.path.exists(ADDONPATH): os.makedirs(ADDONPATH) if os.path.isfile(ADDONSETTINGS): os.remove(ADDONSETTINGS) try: os.rename(EXCLUDEMOVE, ADDONSETTINGS) except: pass try: os.remove(EXCLUDEMOVE) except: pass except: pass try: shutil.rmtree(_out) shutil.rmdir(_out) except: pass
def checkAndCreate(self, task, preferences, taskList): '''check if output directory exist and create it if they don't. check if there is path colliding/previous rendering and resolve the maters.''' # check main output path if not os.path.exists(self.path): log.write('\033[31mOutput path don\'t exist!\033[0m') return False if not os.path.isdir(self.path): log.write('\033[31mOutput path is not a directory!\033[0m') return False if not os.access(self.path, os.W_OK): log.write( '\033[31mYou don\'t have the right to write in the output path!\033[0m' ) return False # get necessary naming info fileName = task.path.split('/').pop() ext = fileName.rfind('.blend') if ext != -1: fileName = fileName[0:ext] scene = task.scene preset = task.preset if preset == '[default]': preset = preferences.presets.default if type(preferences.presets.getPreset(preset)) is Metapreset: groups = list(preferences.presets.getPreset(preset).groups.keys()) groups = task.getUsefullGroup(groups, preferences) else: groups = [] # generate the path dedicated to the blender file/scene and the preset used by the task mainPath = self.getMainPath(fileName, scene, preset) # check the path with preset file name and scene name exist if os.path.exists(mainPath): # if the path exist, check for old render and move it in backup directory or erase it if self.overwrite: content = os.listdir(mainPath) for f in content: if os.path.isfile(mainPath + f): os.remove(mainPath + f) else: rmdir(mainpath + f) else: self.backup(mainPath, taskList) else: # if the path didn't exist, make it os.makedirs(mainPath) if self.pattern.count('/L/') > 0: for g in groups: os.mkdir(mainPath + g) # create file to let know the state and settings of the task self.outputTaskInfo(task, groups, preferences, mainPath)
def BACKUP_RD_TRAKT(): if not os.path.exists(USB): os.makedirs(USB) vq = _get_keyboard(heading="Enter a name for this backup") if (not vq): return False, 0 title = urllib.quote_plus(vq) backup_zip = xbmc.translatePath( os.path.join(USB, title + 'RD_Trakt_Settings.zip')) if not os.path.exists(EXCLUDES_FOLDER): os.makedirs(EXCLUDES_FOLDER) link = Common.OPEN_URL('http://pastebin.com/raw/CU2PSGze') plugins = re.compile('<plugin>(.+?)</plugin>').findall(link) for match in plugins: ADDONPATH = xbmc.translatePath(os.path.join(ADDON_DATA, match)) ADDONSETTINGS = xbmc.translatePath( os.path.join(ADDONPATH, 'settings.xml')) EXCLUDEMOVE = xbmc.translatePath( os.path.join(EXCLUDES_FOLDER, match + '_settings.xml')) dialog = xbmcgui.Dialog() try: if os.path.exists(ADDONSETTINGS): copyfile(ADDONSETTINGS, EXCLUDEMOVE) except: pass exclude_dirs = [' '] exclude_files = [" "] message_header = "Creating full backup..." message_header2 = "Creating full backup..." message1 = "Archiving..." message2 = "" message3 = "" try: ARCHIVE_CB(EXCLUDES_FOLDER, backup_zip, message_header2, message1, message2, message3, exclude_dirs, exclude_files) except: pass time.sleep(1) try: shutil.rmtree(EXCLUDEMOVE) shutil.rmdir(EXCLUDEMOVE) except: pass Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() dialog.ok("[COLOR green][B]SUCCESS![/B][/COLOR]", 'The backup was completed successfully!.', "Backup Location: ", '[COLOR=yellow]' + backup_zip + '[/COLOR]')
def cleanup(self, images=True, videos=True): """Removes the local directories for the videos and images :param images: Remove the images dir :param videos: remove the videos dir """ if images: rmdir(self.images_dir) if videos: rmdir(self.videos_dir)
def checkAndCreate(self, task, preferences, taskList): '''check if output directory exist and create it if they don't. check if there is path colliding/previous rendering and resolve the maters.''' # check main output path if not os.path.exists(self.path): log.write('\033[31mOutput path don\'t exist!\033[0m') return False if not os.path.isdir(self.path): log.write('\033[31mOutput path is not a directory!\033[0m') return False if not os.access(self.path, os.W_OK): log.write('\033[31mYou don\'t have the right to write in the output path!\033[0m') return False # get necessary naming info fileName = task.path.split('/').pop() ext = fileName.rfind('.blend') if ext != -1 : fileName = fileName[0:ext] scene = task.scene preset = task.preset if preset == '[default]': preset = preferences.presets.default if type(preferences.presets.getPreset(preset)) is Metapreset: groups = list(preferences.presets.getPreset(preset).groups.keys()) groups = task.getUsefullGroup(groups, preferences) else: groups = [] # generate the path dedicated to the blender file/scene and the preset used by the task mainPath = self.getMainPath(fileName, scene, preset) # check the path with preset file name and scene name exist if os.path.exists(mainPath): # if the path exist, check for old render and move it in backup directory or erase it if self.overwrite: content = os.listdir(mainPath) for f in content: if os.path.isfile(mainPath+f): os.remove(mainPath+f) else: rmdir(mainpath+f) else: self.backup(mainPath, taskList) else: # if the path didn't exist, make it os.makedirs(mainPath) if self.pattern.count('/L/')>0: for g in groups: os.mkdir(mainPath+g) # create file to let know the state and settings of the task self.outputTaskInfo(task, groups, preferences, mainPath)
def rm(path): try: rmdir(path) except: pass try: remove(path) except: pass
def _bcolz_remove(self, name): ''' Remove a bcolz array from disk ''' path = os.path.join(self._get_dbpath('bcz'), name) if not os.path.exists(path): raise ValueError(f'{name} does not exist') else: rmdir(path)
def _remove_tree(path): # Clear compiled mods for root, dirs, files in os.walk(path): for dir in dirs: try: rmdir(os.path.join(path, dir)) except PermissionError as _: pass for file in files: try: os.remove(os.path.join(path, file)) except PermissionError as _: print("Couldn't remove", file)
def Backup_Login(): check_path() if not os.path.exists(USB): os.makedirs(USB) vq = _get_keyboard(heading=cr1 + "Please enter a name for this backup" + cr2) if (not vq): return False, 0 title = urllib.quote_plus(vq) backup_zip = xbmc.translatePath( os.path.join(USB, title + '_Login_Data.zip')) if not os.path.exists(EXCLUDES_FOLDER): os.makedirs(EXCLUDES_FOLDER) link = base_info.OPEN_URL(BASEURL + 'login.xml') plugins = re.compile('<plugin>(.+?)</plugin>').findall(link) for match in plugins: ADDONPATH = xbmc.translatePath(os.path.join(ADDON_DATAPATH, match)) ADDONSETTINGS = xbmc.translatePath( os.path.join(ADDONPATH, 'settings.xml')) EXCLUDEMOVE = xbmc.translatePath( os.path.join(EXCLUDES_FOLDER, match + '_settings.xml')) DIALOG = xbmcgui.Dialog() if os.path.exists(ADDONSETTINGS): copyfile(ADDONSETTINGS, EXCLUDEMOVE) exclude_dirs = [' '] exclude_files = [" "] message_header = cr + gn + " Wizard" + cr2 + cr1 + "is creating your backup..." + cr2 message_header2 = cr1 + "Creating full backup..." + cr2 message1 = cr1 + "Archiving..." + cr2 message2 = "" message3 = "" ARCHIVE_CB(EXCLUDES_FOLDER, backup_zip, message_header2, message1, message2, message3, exclude_dirs, exclude_files) time.sleep(1) try: shutil.rmtree(EXCLUDEMOVE) shutil.rmdir(EXCLUDEMOVE) except: pass base_info.REMOVE_EMPTY_FOLDERS() base_info.REMOVE_EMPTY_FOLDERS() base_info.REMOVE_EMPTY_FOLDERS() base_info.REMOVE_EMPTY_FOLDERS() base_info.REMOVE_EMPTY_FOLDERS() base_info.REMOVE_EMPTY_FOLDERS() base_info.REMOVE_EMPTY_FOLDERS() base_info.REMOVE_EMPTY_FOLDERS() DIALOG.ok(cr + gn + " Wizard" + cr2, cr1 + 'The backup completed successfully!.' + cr2, cr + "Backup Location: " + cr2, cr1 + backup_zip + cr2)
def AUTO_BACKUP_RD_TRAKT(): TMP_TRAKT = xbmc.translatePath(os.path.join(HOME, 'tmp_trakt')) if not os.path.exists(TMP_TRAKT): os.makedirs(TMP_TRAKT) backup_zip = xbmc.translatePath( os.path.join(TMP_TRAKT, 'Restore_RD_Trakt_Settings.zip')) if not os.path.exists(EXCLUDES_FOLDER): os.makedirs(EXCLUDES_FOLDER) link = Common.OPEN_URL('http://pastebin.com/raw/CU2PSGze') plugins = re.compile('<plugin>(.+?)</plugin>').findall(link) for match in plugins: try: ADDONPATH = xbmc.translatePath(os.path.join(ADDON_DATA, match)) ADDONSETTINGS = xbmc.translatePath( os.path.join(ADDONPATH, 'settings.xml')) EXCLUDEMOVE = xbmc.translatePath( os.path.join(EXCLUDES_FOLDER, match + '_settings.xml')) if os.path.exists(ADDONSETTINGS): copyfile(ADDONSETTINGS, EXCLUDEMOVE) found = 2 except: pass if found == 2: exclude_dirs = [' '] exclude_files = [" "] message_header = "Creating full backup..." message_header2 = "Creating full backup..." message1 = "Archiving..." message2 = "" message3 = "" ARCHIVE_CB(EXCLUDES_FOLDER, backup_zip, message_header2, message1, message2, message3, exclude_dirs, exclude_files) time.sleep(1) try: shutil.rmtree(EXCLUDES_FOLDER) shutil.rmdir(EXCLUDES_FOLDER) except: pass MARKER_TRAKT = xbmc.translatePath(os.path.join(TMP_TRAKT, 'marker.xml')) open(MARKER_TRAKT, 'w')
def BACKUP_RD_TRAKT(): if not os.path.exists(USB): os.makedirs(USB) vq = _get_keyboard( heading="Enter a name for this backup" ) if ( not vq ): return False, 0 title = urllib.quote_plus(vq) backup_zip = xbmc.translatePath(os.path.join(USB,title+'RD_Trakt_Settings.zip')) if not os.path.exists(EXCLUDES_FOLDER): os.makedirs(EXCLUDES_FOLDER) link=Common.OPEN_URL(TRAKTURL) plugins=re.compile('<plugin>(.+?)</plugin>').findall(link) for match in plugins: ADDONPATH = xbmc.translatePath(os.path.join(ADDON_DATA,match)) ADDONSETTINGS = xbmc.translatePath(os.path.join(ADDONPATH,'settings.xml')) EXCLUDEMOVE = xbmc.translatePath(os.path.join(EXCLUDES_FOLDER,match+'_settings.xml')) dialog = xbmcgui.Dialog() if os.path.exists(ADDONSETTINGS): copyfile(ADDONSETTINGS, EXCLUDEMOVE) exclude_dirs = [' '] exclude_files = [" "] message_header = "Creating full backup..." message_header2 = "Creating full backup..." message1 = "Archiving..." message2 = "" message3 = "Please Wait" ARCHIVE_CB(EXCLUDES_FOLDER, backup_zip, message_header2, message1, message2, message3, exclude_dirs, exclude_files) time.sleep(1) name = "[COLOR ghostwhite][B]BACKUP[/B][/COLOR]" #add_download = Common.add_one_backups(name) try: shutil.rmtree(EXCLUDEMOVE) shutil.rmdir(EXCLUDEMOVE) except: pass Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() dialog.ok("[COLOR green][B]SUCCESS![/B][/COLOR]", 'The backup was completed successfully!.',"Backup Location: ",'[COLOR=yellow]'+backup_zip+'[/COLOR]')
def clone_axom(): """ Creates a fresh clone of axom """ cwd = os.getcwd() tmp_dir = os.path.abspath("_tmp_clone_%s" % (get_timestamp())) if os.path.isdir(tmp_dir): shutil.rmdir(tmp_dir) os.mkdir(tmp_dir) os.chdir(tmp_dir) print("[cloning axom into {0}]".format(pjoin(tmp_dir,"axom"))) res = sexe("git clone https://github.com/LLNL/axom.git",echo=True) if res != 0: print("[ERROR: clone of axom repo failed]") sys.exit(res) os.chdir(cwd) return tmp_dir
def clone_axom(): """ Creates a fresh clone of axom """ cwd = os.getcwd() tmp_dir = os.path.abspath("_tmp_clone_%s" % (get_timestamp())) if os.path.isdir(tmp_dir): shutil.rmdir(tmp_dir) os.mkdir(tmp_dir) os.chdir(tmp_dir) print "[cloning axom into %s]" % pjoin(tmp_dir,"axom") res = sexe("git clone ssh://[email protected]:7999/atk/axom.git",echo=True) if res != 0: print "[ERROR: clone of axom repo failed]" sys.exit(res) os.chdir(cwd) return tmp_dir
def remove(f): try: if exists(f): if isdir(f): return rmdir(f, ignore_errors=True) else: return rmfile(f) except Exception as err: log('remove', f, err) pass return False
def backup(self, path, taskList): '''make backup moving operation''' # level up the backup self.upBackup(path, 1, taskList) # list content of path and split backup element in dedicated list content = os.listdir(path) backup = [] backupRegex = re.compile(r'^previous rendering \d+$') for c in content[:]: if backupRegex.match(c) is not None: backup.append(c) content.remove(c) # move all remaining file in new first level backup directory if len(content) > 0: if os.path.exists(path+'task.setting') and os.path.isfile(path+'task.setting')\ and os.access(path+'task.setting', os.R_OK): with open(path + 'task.setting', 'r') as taskFile: uid = xmlMod.fromstring(taskFile.read()).get('uid') taskList.upBackup(uid) os.mkdir(path + 'previous rendering 1') for c in content: os.rename(path + c, path + 'previous rendering 1/' + c) # apply backup limitation by erasing greater level backup if self.backupLimit > 0: for b in backup: level = int(b[19:]) if level > self.backupLimit: settingPath = path + b + '/task.setting' if os.path.exists(settingPath) and os.path.isfile(settingPath)\ and os.access(settingPath, os.R_OK): with open(settingPath, 'r') as taskFile: uid = xmlMod.fromstring(taskFile.read()).get('uid') taskList.eraseBackup(uid) rmdir(path + b)
def backup(self, path, taskList): '''make backup moving operation''' # level up the backup self.upBackup(path, 1, taskList) # list content of path and split backup element in dedicated list content = os.listdir(path) backup = [] backupRegex = re.compile(r'^previous rendering \d+$') for c in content[:]: if backupRegex.match(c) is not None: backup.append(c) content.remove(c) # move all remaining file in new first level backup directory if len(content) > 0: if os.path.exists(path+'task.setting') and os.path.isfile(path+'task.setting')\ and os.access(path+'task.setting', os.R_OK): with open(path+'task.setting','r') as taskFile: uid = xmlMod.fromstring(taskFile.read()).get('uid') taskList.upBackup(uid) os.mkdir(path+'previous rendering 1') for c in content: os.rename(path+c,path+'previous rendering 1/'+c ) # apply backup limitation by erasing greater level backup if self.backupLimit > 0: for b in backup: level = int(b[19:]) if level > self.backupLimit: settingPath = path+b+'/task.setting' if os.path.exists(settingPath) and os.path.isfile(settingPath)\ and os.access(settingPath, os.R_OK): with open(settingPath,'r') as taskFile: uid = xmlMod.fromstring(taskFile.read()).get('uid') taskList.eraseBackup(uid) rmdir(path+b)
def write_audio_segments(audio_segments, output_folder_path, original_folder_path, elementary_sounds_definition_filename): print("Writing new elementary sounds audio") if os.path.isdir(output_folder_path): input( ">>> The folder '%s' already exist. \n>>> Press enter to overwrite it. Otherwise CTRL+C" % output_folder_path) rmdir(output_folder_path) os.mkdir(output_folder_path) # Writing all the audio segments for sound in audio_segments: filepath = os.path.join(output_folder_path, sound['filename']) sound['audio_segment'].export(filepath, format='wav') # Copy the definition file to the output folder old_definition_filepath = os.path.join( original_folder_path, elementary_sounds_definition_filename) new_definition_filepath = os.path.join( output_folder_path, elementary_sounds_definition_filename) copyfile(old_definition_filepath, new_definition_filepath)
def AUTO_BACKUP_RD_TRAKT(): TMP_TRAKT = xbmc.translatePath(os.path.join(HOME,'tmp_trakt')) if not os.path.exists(TMP_TRAKT): os.makedirs(TMP_TRAKT) backup_zip = xbmc.translatePath(os.path.join(TMP_TRAKT,'Restore_RD_Trakt_Settings.zip')) if not os.path.exists(EXCLUDES_FOLDER): os.makedirs(EXCLUDES_FOLDER) link=Common.OPEN_URL(TRAKTURL) plugins=re.compile('<plugin>(.+?)</plugin>').findall(link) for match in plugins: ADDONPATH = xbmc.translatePath(os.path.join(ADDON_DATA,match)) ADDONSETTINGS = xbmc.translatePath(os.path.join(ADDONPATH,'settings.xml')) EXCLUDEMOVE = xbmc.translatePath(os.path.join(EXCLUDES_FOLDER,match+'_settings.xml')) dialog = xbmcgui.Dialog() if os.path.exists(ADDONSETTINGS): copyfile(ADDONSETTINGS, EXCLUDEMOVE) exclude_dirs = [' '] exclude_files = [" "] message_header = "Creating full backup..." message_header2 = "Creating full backup..." message1 = "Archiving..." message2 = "" message3 = "Please Wait" ARCHIVE_CB(EXCLUDES_FOLDER, backup_zip, message_header2, message1, message2, message3, exclude_dirs, exclude_files) time.sleep(1) name = "[COLOR ghostwhite][B]BACKUP[/B][/COLOR]" try: shutil.rmtree(EXCLUDES_FOLDER) shutil.rmdir(EXCLUDES_FOLDER) except: pass
def makemovie(plotfn,args_fixed,args,name='movie'): """ makes a movie out of plotfn, called with series of args (a dictionary of lists) """ moviefile = '%s.gif' % name if os.path.exists(moviefile): shutil.remove(moviefile) if os.path.exists(name): shutil.rmdir(name) os.mkdir(name) #convert dictionary of lists into list of dictionaries sizes = {} maxsize = 0 for arg,val in args.iteritems(): sizes[arg] = np.size(val) if sizes[arg] > maxsize: maxsize = sizes[arg] N = maxsize arglist = [] for i in range(N): d = {} for k in args.keys(): d[k] = args[k][i] for k in args_fixed.keys(): d[k] = args_fixed[k] arglist.append(d) plt.ioff() for i in range(N): plt.figure() plotfn(**arglist[i]) plt.savefig('%s/frame%i.png' % (name,i)) plt.close()
async def _manager_job(self, event, task_id): rule = self._get_rule_by_event(event) if not rule: return logger = SchedulerLogger(self._log, {"id": task_id}) try: path = event.pathname if rule.action in ["copy", "move"]: dst = rule.src_re.sub(rule.dst_re, path) if not dst: raise RuntimeError(f"unable to {rule.action} '{path}', " f"resulting destination path is empty") if os.path.exists(dst) and not rule.overwrite: raise RuntimeError( f"unable to {rule.action} file from '{path} " f"to '{dst}', path already exists") dst_dir = os.path.dirname(dst) if not os.path.isdir(dst_dir) and rule.auto_create: logger.info(f"create directory '{dst_dir}'") first_subdir = dst_dir while not os.path.isdir(first_subdir): parent = os.path.dirname(first_subdir) if not os.path.isdir(parent): first_subdir = parent else: break try: os.makedirs(dst_dir) await self._set_mode_and_owner(first_subdir, rule, logger) except Exception as e: raise RuntimeError(e) logger.info(f"{rule.action} '{path}' to '{dst}'") try: if rule.action == "copy": if os.path.isdir(path): shutil.copytree(path, dst) else: shutil.copy2(path, dst) else: os.rename(path, dst) await self._set_mode_and_owner(dst, rule, logger) except Exception as e: raise RuntimeError(e) elif rule.action == "delete": logger.info(f"{rule.action} '{path}'") try: if os.path.isdir(path): if rule.rec: shutil.rmtree(path) else: shutil.rmdir(path) else: os.remove(path) except Exception as e: raise RuntimeError(e) except RuntimeError as e: logger.error(e) except Exception as e: logger.exception(e)
def Restore_RD_TRAKT(): link = Common.OPEN_URL(BASEURL + base64.b64decode(b'b3RoZXIvcmRfdHJha3QueG1s')) plugins = re.compile('<plugin>(.+?)</plugin>').findall(link) for match in plugins: ADDONPATH = xbmc.translatePath(os.path.join(ADDON_DATA, match)) ADDONSETTINGS = xbmc.translatePath( os.path.join(ADDONPATH, 'settings.xml')) EXCLUDEMOVE = xbmc.translatePath( os.path.join(EXCLUDES_FOLDER, match + '_settings.xml')) if os.path.exists(EXCLUDEMOVE): if not os.path.exists(ADDONPATH): os.makedirs(ADDONPATH) if os.path.isfile(ADDONSETTINGS): os.remove(ADDONSETTINGS) os.rename(EXCLUDEMOVE, ADDONSETTINGS) try: os.remove(EXCLUDEMOVE) except: pass try: shutil.rmtree(EXCLUDEMOVE) shutil.rmdir(EXCLUDEMOVE) except: pass Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() Common.REMOVE_EMPTY_FOLDERS() # service_url = BASEURL + base64.b64decode(b'b3RoZXIvcmRfdHJha3QueG1s') # f = urllib2.urlopen(service_url) # data = f.read() # f.close() # # patron = "<items>(.*?)</items>" # addons = re.findall(patron,data,re.DOTALL) # # items = [] # for addon in addons: # item = {} # item["plugin"] = Common.find_single_match(addon,"<plugin>([^<]+)</plugin>") # # if item["plugin"]!="": # items.append(item) # # ADDONPATH = xbmc.translatePath(os.path.join(ADDON_DATA,item["plugin"])) # ADDONSETTINGS = xbmc.translatePath(os.path.join(ADDONPATH,'settings.xml')) # EXCLUDEMOVE = xbmc.translatePath(os.path.join(EXCLUDES_FOLDER,item["plugin"]+'_settings.xml')) # if os.path.exists(EXCLUDEMOVE): # if not os.path.exists(ADDONPATH): # os.makedirs(ADDONPATH) # if os.path.isfile(ADDONSETTINGS): # os.remove(ADDONSETTINGS) # os.rename(EXCLUDEMOVE, ADDONSETTINGS) # try: # os.remove(EXCLUDEMOVE) # except: pass # # try: # shutil.rmtree(EXCLUDEMOVE) # shutil.rmdir(EXCLUDEMOVE) # except: pass
def train(file_name, out_dir, no_warning=False): global nlu_config ####Seperating lines with entities from that of lines without entities, for keyword training try: file_content = '\n'.join([ i for i in open(file_name, 'r').read().split('\n') if (i != '' and ('(' in i or "#" in i)) ]) except FileNotFoundError: raise FileNotFoundError("Kindly pass the correct name of the md file") try: if not (isdir(out_dir)): mkdir(out_dir) elif not (no_warning): choice = input( "The desired directory name for training already exists .. \n proceeding may erase the contents should we proceed(y/n): " ).lower() if 'n' in choice: raise UserInterruptError('Program execution cancelled') else: rmdir(out_dir) mkdir(out_dir) else: rmdir(out_dir) mkdir(out_dir) mkdir(out_dir + '/training_data') file_nlu = open(out_dir + '/training_data/nlu.md', 'w') file_nlu.write('## intent:debug\n') except FileNotFoundError: raise FileNotFoundError('Invalid output directory name passed!!') except PermissionError: raise PermissionError( "File permissions not satisfied for the operation") entity_intents = { } #Holds data aobut which entity belongs to whk.ich intent intents = [] entities_parsed = [] current_intent = None intents_split = [i for i in file_content.split('##') if i != ''] ###Removing unnesseaccary \n (blank lines) for i in range(len(intents_split)): intent_name = intents_split[i].split('\n')[0].split(':')[1].strip() entities_detected = extract_entities(intents_split[i]) intents.append(intent_name) for j in range(len(entities_detected)): if entities_detected[j] in entities_parsed: choice = entities_detected[j] + '"' while choice in entities_parsed: choice = choice + '"' intents_split[i] = intents_split[i].replace( '(' + entities_detected[j] + ')', '(' + choice + ')') entities_detected[j] = choice print("Common entity detected fixing...") entity_intents[entities_detected[ j]] = i #Storing the entity names under the intent entities_parsed.append(entities_detected[j]) print('%d out of %d' % (i, len(intents_split)) ) #Saving the intents parsed for further tracking intents_split = [''] + intents_split file_nlu.write('##'.join(intents_split)) rasa_data = (intents, entity_intents) file_nlu.close() open(out_dir + '/nlu_config.yml', 'w').write(nlu_config) feather_file = open(out_dir + '/feather.data', 'wb') pickle.dump(rasa_data, feather_file) ####Seperating lines without entities from lines with entities for further fuzzy wuzzy learning file_lines = [ i for i in open(file_name, 'r').read().split('\n') if (i != '' and ("#" in i or '(' not in i)) ] fuzzy_wuzzy = {} intents = [] count = -1 for i in file_lines: if "#" in i: count += 1 intents.append(i.split(':')[1].strip()) else: fuzzy_wuzzy[i[i.find('-') + 1:]] = count fuzzy_data = (intents, fuzzy_wuzzy) pickle.dump(fuzzy_data, feather_file) feather_file.close() if version[0] == '3': system( "python3 -m rasa_nlu.train -c %s/nlu_config.yml --data %s/training_data/nlu.md -o %s/models --fixed_model_name nlu --project current --verbose " % (out_dir, out_dir, out_dir)) else: system( "python3 -m rasa_nlu.train -c %s/nlu_config.yml --data %s/training_data/nlu.md -o %s/models --fixed_model_name nlu --project current --verbose " % (out_dir, out_dir, out_dir))
#!/usr/bin/env python """ Author: Brandon Kallaher ([email protected]) Description: Uses parse_run.py to parse a full directory of runs """ from parse_run import parse_run from os import walk from fnmatch import filter as ffilter from shutil import rmtree as rmdir #Need this function in order to delete tmp dir without deleting tmp files import json from os import environ scores = {} for root, dirs, files in walk(environ('HOME') + "/runs"): for dir in dirs: for root, dirs, files in walk(environ('HOME') + "/runs/" + dir): print root for item in ffilter(files, "*.csv"): scores[item[4:-4] + "-" + root[-4:]] = parse_run(root + '/', item, environ('HOME') + "/runs/base.csv") rmdir(root + "/tmp", ignore_errors=True) print "" f = open("/home/bkallaher/runs/scores.json", 'w+') json.dump(scores, f) f.close()
def _cleanup(): try: shutil.rmdir(venv) except (OSError, IOError): pass
# -*- coding: utf-8 -*- from functions import * from github import Github import os import shutil import config import auth gh = Github(auth.oauth) try: if not os.path.exists('./cache'): os.makedirs('./cache') except: raise for location in config.locations: if not os.path.exists('./cache/' + location + '.md'): usernames = getUsernames(config.locations[location], gh) top = getTop(usernames, config.result_size, gh) updateTop(top, location, config.locations[location], config.db_path, './cache') shutil.copy('./cache', config.output_path) shutil.rmdir('./cache')
def closeEvent(self, QCloseEvent): rmdir('cache/') mkdir('cache/')
if "version=" in line: newsetup.write(line.replace(old_version, version)) else: newsetup.write(line) shutil.move("../setup.py.tmp", "../setup.py") print("Updated setup.py") if os.path.isfile("../swood.egg-info/PKG-INFO"): with open("../swood.egg-info/PKG-INFO") as old_pkginfo, open("../swood.egg-info/PKG-INFO.tmp", "w") as new_pkginfo: for line in old_pkginfo: if line.startswith("Version:"): new_pkginfo.write(line.replace(old_version, version)) else: new_pkginfo.write(line) shutil.move("../swood.egg-info/PKG-INFO.tmp", "../swood.egg-info/PKG-INFO") print("Updated PKG-INFO") while True: inp = input("Publish to PyPI? (Y/n): ").lower() if inp in ("yes", "y"): os.chdir("..") subprocess.run([sys.executable, "setup.py", "sdist", "--formats=gztar,bztar,zip"], check=True) os.chdir("dist") twine.cli.dispatch(["upload", *os.listdir()]) os.chdir("..") shutil.rmdir("dist") break elif inp in ("no", "n", ""): break
def get_files_for_diff(args): ''' 分别获取 PR 中所有所有文件的 bash 和 head 版本, 将获取的文件在服务器中使用 diff 产生 patch, 在应用补丁到SC中 由于AIX版本 diff 和 patch 版本过低, 所以需要到 server 上 patch 文件 ''' base_restore_directory = os.environ.get('PR_NUMBER') need_apply_files = 'apply_files.txt' assert need_apply_files, "Error: Restore applied file name can not be empty." base_sha = args.base_sha assert base_sha, "Error: Bash sha can not be empty." head_sha = args.head_sha assert head_sha, "Error: Head sha can not be empty." github_token = args.github_token assert github_token, "Error: Github token can not be empty." pr_number = args.pr_number assert pr_number, "Error: PR number is empty." mango_work_tree = args.mango_work_tree assert pr_number, "Error: PR number is empty." git_cmd = 'git --git-dir={0:s}/.git --work-tree={0:s}'.format(mango_work_tree) url = 'https://api.github.com/repos/sugareps/Mango/compare/{0:s}...{1:s}'.format( base_sha, head_sha ) try: response = fetch_api(url=url, token=github_token) data = jsloads(response) except urllib2.HTTPError as e: errmsg = { 401: '{0:s}: Please check if your commit sha is correct.'.format(e.reason), 404: '{0:s}: Please check if your token is correct.'.format(e.reason), }[e.code] if errmsg: print errmsg sys.exit(e.code) except ValueError as e: print("Wrong response value : {0:s}".format(e.message)) sys.exit(2) # 创建新目录, 并进入到该目录中生成所有文件 rmdir(base_restore_directory, ignore_errors=True) os.makedirs(base_restore_directory) os.chdir(base_restore_directory) file_list = open(need_apply_files, 'w+') # 生成PR中的文件: base 和 head 两个版本 for pr_file in data['files']: # 只允许更新 sugarcrm 目录下的文件更新 # 空的 sha 表示文件内容没有更新, 如: 修改文件权限 if not pr_file['sha'] or not pr_file['filename'].startswith('sugarcrm/'): continue dirname = os.path.split(pr_file['filename'])[0] status = pr_file['status'] if not os.path.exists(dirname): os.makedirs(dirname) base_file_name = "{0:s}.base".format(pr_file['filename']) head_file_name = "{0:s}.head".format(pr_file['filename']) base_cmd = "{0:s} show {1:s}:{2:s}".format(git_cmd, base_sha, pr_file['filename']) head_cmd = "{0:s} show {1:s}:{2:s}".format(git_cmd, head_sha, pr_file['filename']) # # status: added | modified | removed # added: 只有 head # removed: 只有 base # modified: 包含 base 和 head if status == 'added': # head_sha write_context_to_file(head_file_name, head_cmd) elif status == 'modified': # base_sha write_context_to_file(base_file_name, base_cmd) # head_sha write_context_to_file(head_file_name, head_cmd) elif status == 'removed': # base_sha write_context_to_file(base_file_name, base_cmd) # 写入更新文件信息 file_list.write(pr_file['filename']) file_list.write('\n') # 循环结束后关闭文件 file_list.close()
with Experiment(filename) as exp: # Open in read-only, close the file at the end of the block # Load all features and masks in memory. # WARNING: this might consume to much Ram ==> need to do it by chunks. fm = exp.channel_groups[channel_group].spikes.features_masks[:] # fm is a Nspikes x Nfeatures x 2 array (features AND masks) fet = fm[:,:,0] fmask = fm[:,:,1] # Convert to .fet and .fmask. # These functions are in (old) spikedetekt.files write_fet(fet, filename_fet) write_mask(fmask, filename_fmask, fmt="%f") # Sort out the KK parameters. opt = ' '.join(['-{k}={v}'.format(k=k, v=v) for k, v in params.iteritems()]) # Call KK os.system("klustakwik {fn} {opt}".format(fn=basename, opt=opt)) # Read the .clu file. clu = read_clu(filename_clu) # Add the clusters to the KWIK file. with Experiment(filename, mode='a') as exp: exp.channel_groups[channel_group].spikes.clusters.original[:] = clu # Delete the temporary folder. shutil.rmdir(tmpdir) # Get back to the original folder. os.chdir(curdir)
def main(): model_names = sorted( name for name in vgg.__dict__ if name.islower() and not name.startswith("__") and name.startswith("vgg") and callable(vgg.__dict__[name])) parser = argparse.ArgumentParser(description='PyTorch ImageNet Training') parser.add_argument('--arch', '-a', metavar='ARCH', default='vgg19', choices=model_names, help='model architecture: ' + ' | '.join(model_names) + ' (default: vgg19)') parser.add_argument('-j', '--workers', default=0, type=int, metavar='N', help='number of data loading workers (default: 0)') parser.add_argument('--epochs', default=100, type=int, metavar='N', help='number of total epochs to run') parser.add_argument('--start-epoch', default=0, type=int, metavar='N', help='manual epoch number (useful on restarts)') parser.add_argument('-b', '--batch-size', default=128, type=int, metavar='N', help='mini-batch size (default: 128)') parser.add_argument('--lr', '--learning-rate', default=0.05, type=float, metavar='LR', help='initial learning rate') parser.add_argument('--momentum', default=0.9, type=float, metavar='M', help='momentum') parser.add_argument('--weight-decay', '--wd', default=5e-4, type=float, metavar='W', help='weight decay (default: 5e-4)') parser.add_argument('--resume', default='', type=str, metavar='PATH', help='path to latest checkpoint (default: none)') parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true', help='evaluate model on validation set') parser.add_argument('--pretrained', dest='pretrained', action='store_true', help='use pre-trained model') parser.add_argument('--half', dest='half', action='store_true', help='use half-precision(16-bit) ') parser.add_argument('--cpu', dest='cpu', action='store_true', help='use cpu') parser.add_argument('--augment', action='store_true') parser.add_argument('--instagram', action='store_true') parser.add_argument('--p_cifar', default=1.0, type=float, help='proportion of cifar dataset to use') parser.add_argument( '--p_thresholded', default=0.0, type=float, help='proportion of synthetic (gan generated) dataset to use') parser.add_argument('--p_monet', default=0.0, type=float, help='proportion of monet stylized dataset to use') parser.add_argument('--p_udnie', default=0.0, type=float, help='proportion of udnie stylized dataset to use') parser.add_argument('--threshold', default=None, type=float, help='realness of synthetic dataset') parser.add_argument('--save-freq', default=20, type=int, metavar='N', help='print frequency (default: 20)') global args, best_prec1 best_prec1 = 0 args = parser.parse_args() args.save_dir = os.path.join( 'run_logs', datetime.now().strftime("%m_%d_%H_%M_%S") + "__" + "__".join([arg.replace("--", "") for arg in sys.argv[1:]])) # Check the save_dir exists or not if not os.path.exists(args.save_dir): os.makedirs(args.save_dir) print_log(pformat(vars(args)), args.save_dir) model = vgg.__dict__[args.arch]() model.features = torch.nn.DataParallel(model.features) if args.cpu: model.cpu() else: model.cuda() # optionally resume from a checkpoint if args.resume: if os.path.isfile(args.resume): print("=> loading checkpoint '{}'".format(args.resume)) checkpoint = torch.load(args.resume) args.start_epoch = checkpoint['epoch'] best_prec1 = checkpoint['best_prec1'] model.load_state_dict(checkpoint['state_dict']) print("=> loaded checkpoint '{}' (epoch {})".format( args.evaluate, checkpoint['epoch'])) else: print("=> no checkpoint found at '{}'".format(args.resume)) cudnn.benchmark = True device = torch.device("cuda" if torch.cuda.is_available() else "cpu") # create train dataloader without normalization to find mean/std for normalization train_dataloader_without_normalization = torch.utils.data.DataLoader( CombinedDataset( p_cifar=args.p_cifar, p_thresholded=args.p_thresholded, threshold=args.threshold, p_monet=args.p_monet, p_udnie=args.p_udnie, device=device, train=True, ), batch_size=args.batch_size, num_workers=args.workers, ) mean, std = get_mean_std(train_dataloader_without_normalization) print(f"mean: {mean}, std: {std}") normalize = transforms.Normalize(mean=mean, std=std) if args.augment and args.instagram: print("AUGMENTING and FILTERING") transformations = [ lambda x: x.cpu(), transforms.ToPILImage(), transforms.ColorJitter(), # transforms.RandomGrayscale(p=0.05), transforms.RandomHorizontalFlip(), transforms.RandomCrop(32, 4), transforms.ToTensor(), lambda x: x.to(device), normalize, ] elif args.augment and not args.instagram: print("AUGMENTING ONLY") transformations = [ lambda x: x.cpu(), transforms.ToPILImage(), transforms.RandomHorizontalFlip(), transforms.RandomCrop(32, 4), transforms.ToTensor(), lambda x: x.to(device), normalize, ] elif not args.augment and args.instagram: print("FILTERING ONLY") transformations = [ lambda x: x.cpu(), transforms.ToPILImage(), transforms.ColorJitter(), # transforms.RandomGrayscale(p=0.05), transforms.ToTensor(), lambda x: x.to(device), normalize, ] else: print("NOT AUGMENTING OR FILTERING") transformations = [normalize] print("Using combined dataset!") train_loader = torch.utils.data.DataLoader( CombinedDataset( p_cifar=args.p_cifar, p_thresholded=args.p_thresholded, threshold=args.threshold, p_monet=args.p_monet, p_udnie=args.p_udnie, device=device, transform=transforms.Compose(transformations), train=True, ), batch_size=args.batch_size, shuffle=True, num_workers=args.workers, ) val_loader = torch.utils.data.DataLoader( datasets.CIFAR10( root='./data', train=False, transform=transforms.Compose([ transforms.ToTensor(), normalize, ]), download=True, ), batch_size=args.batch_size, shuffle=False, num_workers=args.workers, ) # define loss function (criterion) and pptimizer criterion = nn.CrossEntropyLoss() if args.cpu: criterion = criterion.cpu() else: criterion = criterion.cuda() if args.half: model.half() criterion.half() optimizer = torch.optim.SGD(model.parameters(), args.lr, momentum=args.momentum, weight_decay=args.weight_decay) if args.evaluate: validate(val_loader, model, criterion) return global pbar with trange(args.start_epoch, args.epochs) as pbar: for epoch in pbar: try: adjust_learning_rate(optimizer, epoch) # train for one epoch train(train_loader, model, criterion, optimizer, epoch, args.epochs) # evaluate on validation set prec1 = validate(val_loader, model, criterion) # remember best prec@1 and save checkpoint if prec1 > best_prec1: best_prec1 = prec1 save_checkpoint( { 'epoch': epoch + 1, 'state_dict': model.state_dict(), 'best_prec1': best_prec1, }, filename=os.path.join(args.save_dir, 'best_model.path'.format(epoch))) except KeyboardInterrupt: if epoch < .9 * args.epochs: shutil.rmdir(args.save_dir) raise
else: newsetup.write(line) shutil.move("../setup.py.tmp", "../setup.py") print("Updated setup.py") if os.path.isfile("../swood.egg-info/PKG-INFO"): with open("../swood.egg-info/PKG-INFO") as old_pkginfo, open( "../swood.egg-info/PKG-INFO.tmp", "w") as new_pkginfo: for line in old_pkginfo: if line.startswith("Version:"): new_pkginfo.write(line.replace(old_version, version)) else: new_pkginfo.write(line) shutil.move("../swood.egg-info/PKG-INFO.tmp", "../swood.egg-info/PKG-INFO") print("Updated PKG-INFO") while True: inp = input("Publish to PyPI? (Y/n): ").lower() if inp in ("yes", "y"): os.chdir("..") subprocess.run( [sys.executable, "setup.py", "sdist", "--formats=gztar,bztar,zip"], check=True) os.chdir("dist") twine.cli.dispatch(["upload", *os.listdir()]) os.chdir("..") shutil.rmdir("dist") break elif inp in ("no", "n", ""): break
fig.savefig(basedir + "tmp/out.png", transparent=True, bbox_inches='tight') #Load the new image for image processing img = cv2.imread(basedir + "tmp/out.png") hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV) low_b = np.array([110,50,50]) high_b = np.array([130,255,255]) mask = cv2.inRange(hsv, low_b, high_b) res = cv2.bitwise_and(img, img, mask=mask) # cv2.imshow('image', img) # cv2.imshow('HSV', hsv) # cv2.imshow('mask', mask)2878 # cv2.imshow('res', res) b,g,r = cv2.split(res) return cv2.countNonZero(b) # get count of all pixels with at least 1 blue value #cv2.destroyAllWindows() if __name__ == '__main__': print parse_run(dirbase, fname, dirbase + baseline_file) rmdir(dirbase + "tmp", ignore_errors=True)
else: scan() return if not op.isdir("data"): os.mkdir("data") with utils.dirBongdi("data"): MY_PATH = os.getcwd() # op.dirname(__file__) TARGET_DIR = op.join(MY_PATH, REPO_NAME) CONFIG_FILE = op.join(MY_PATH, "configuration.do.not.edit") if op.isfile(CONFIG_FILE): pass # so that configFile can be made global else: print("Configuration file not found! Are you trying to setup?") if utils.replyIsYes(): utils.setup(TARGET_DIR, CONFIG_FILE) else: raise utils.UserCausedError("User refused to continue with setup") shutil.rmdir("data") configFile = ConfigFile(CONFIG_FILE, REPO_NAME) main()
def clear_cache(): global cache_path if cache_path != None: shutil.rmdir(cache_path) cache_path = None
def clean(directory): try: shutil.rmdir(directory) except: error('Could not remove object file directory!')
TRAKT_MARKER = xbmc.translatePath(os.path.join(TMP_TRAKT,'marker.xml')) backup_zip = xbmc.translatePath(os.path.join(TMP_TRAKT,'Restore_RD_Trakt_Settings.zip')) if os.path.isfile(TRAKT_MARKER): choice = xbmcgui.Dialog().yesno(AddonTitle, '[COLOR lime][B]A backup of your Real Debrid & Trakt settings has been found.[/B][COLOR]','[COLOR red][B]SELECTING NO WILL LOSE ALL SETTINGS[/COLOR][/B]','[COLOR yellow]Do you want to resotre those settings now?[/COLOR]',nolabel='[B][COLOR red]NO[/COLOR][/B]',yeslabel='[B][COLOR lime]YES[/COLOR][/B]') if choice == 1: backuprestore.AUTO_READ_ZIP_TRAKT(backup_zip) else: choice2 = xbmcgui.Dialog().yesno(AddonTitle, '[COLOR red][B]YOU HAVE CHOSEN NOT TO RESTORE YOUR SETTINGS.[/B][/COLOR]','[COLOR red][B]YOU WILL NOT HAVE THIS OPTION AGAIN[/COLOR][/B]','[COLOR red][B]ARE YOU SURE YOU WANT TO COMPLETE THIS ACTION?[/B][/COLOR]',yeslabel='[B][COLOR lime]YES[/COLOR][/B]',nolabel='[B][COLOR red]NO[/COLOR][/B]') if choice2 == 0: backuprestore.AUTO_READ_ZIP_TRAKT(backup_zip) else: _out = xbmc.translatePath(os.path.join('special://','home/tmp_trakt')) try: shutil.rmtree(_out) shutil.rmdir(_out) except: pass runner.check() #Check Internet Connection try: response = Common.OPEN_URL_NORMAL(GoogleOne) except: try: response = Common.OPEN_URL_NORMAL(GoogleTwo) except: dialog.ok(AddonTitle,'Sorry we are unable to check for updates!','The device is not connected to the internet','Please check your connection settings.') nointernet = 1 pass