def alpm_pkg_name_version_from_path(self, path): last_slash_pos = path.rindex('/') lastdash_pos = path.rindex('-') dashver_pos = path[: lastdash_pos].rindex('-') dotpkg_pos = path[: path[: path.rindex('.')].rindex('.')].rindex('.') pkg_name = path[last_slash_pos+1: dashver_pos] pkg_ver = path[dashver_pos+1: dotpkg_pos] return pkg_name, pkg_ver
def process_function_types(units, warning=False): # type: list[AsmFile.Unit] -> dict[str, str] def find_nth(string, substring, n): if (n == 1): return string.find(substring) else: return string.find(substring, find_nth(string, substring, n - 1) + 1) out = {} # type: dict(str, str) units.sort(key=lambda x: x.file_path, reverse=True) file_functions = {} # type: dict(str, list[AsmFile.Unit]) for unit in units: if unit.id == AsmFile.UNITS.FUNCTION: if unit.file_path in file_functions: file_functions[unit.file_path].append(unit) else: file_functions[unit.file_path] = [unit] for path in file_functions.keys(): with open( 'docs/decomp/' + path[path.rindex('/') + 1:path.rindex('.')] + '.c', 'r') as c_file: c_lines = c_file.readlines() c_content = ''.join(c_lines) for unit in file_functions[path]: content = filter_source(unit.content) function_name = content[:content.index(':')] function_name = function_name[function_name.rindex('\n') + 1:].strip() for i in range(c_content.count(function_name + '(')): line_number = c_content[:find_nth(c_content, function_name + '(', i + 1)].count('\n') if '{' in c_lines[line_number + 1]: out[function_name] = c_lines[line_number].strip() break if function_name not in out and function_name + '@' in c_content: line_number = c_content[:c_content.index(function_name + '@')].count('\n') if '{' in c_lines[line_number + 1]: out[function_name] = c_lines[line_number].strip() if function_name not in out: if warning: print('WARNING: Type unidentified for %s' % function_name) out[function_name] = '' return out
def parseMap(fileName): mapEnties = [] with open(fileName, 'r') as f: while True: line = f.readline() if not line: break try: if line[20] != 'x': continue except Exception as e: print e print line sys.exit(1) r1 = int(line[0:8], 16) r2 = int(line[9:17], 16) path = line[49:].rstrip() try: lastSlash = path.rindex('/') mapEnties.append( MapEntry(r1, r2, path[lastSlash + 1:], path[0:lastSlash])) except ValueError: pass return mapEnties
def add_cert_to_db(path, validities, pprinted_certchain, fprints): # validities -- output of openssl verify on each cert in this chain # pprinted_certchain -- output of openssl x509 -text on each crt in chain assert len(validities) == len(pprinted_certchain), (validities, pprinted_certchain) assert len(pprinted_certchain) == len(fprints) for i, pprinted_cert in enumerate(pprinted_certchain): print "parsing", path try: # start a dictionary of database fields/columns... fields = hacky_parse(pprinted_cert) except: print "Error parsing", path + "\n" + pprinted_cert raise fields['path'] = path moz_valid, ms_valid = validities[i] if ("Yes" in validities[i]): valid = 1 else: valid = 0 fields['moz_valid'] = moz_valid fields['ms_valid'] = ms_valid if len(fprints[i]): fields['fingerprint'] = fprints[i] fields['ip'] = path[path.rindex('/') + 1:-8] # linux pathsep dependency q = "INSERT INTO %s SET " % gdb.escape_string(tablename) q += "`fetchtime`=%.0f ,\n" % os.path.getmtime(path) q += "valid=%r ,\n" % valid for (f, v) in fields.items(): q += "`%s`='%s' ,\n" % (gdb.escape_string(f), gdb.escape_string(v)) q = q[:-2] gdbc.execute(q)
def __getitem__(self, index): path, target = self.imgs[index] img = self.loader(path) if self.transform is not None: img = self.transform(img) img = self.method(img, self.severity) if self.target_transform is not None: target = self.target_transform(target) save_path = './' + self.method.__name__ + \ '/' + str(self.severity) + '/' + self.idx_to_class[target] try: if not os.path.exists(save_path): os.makedirs(save_path) except Exception as e: print(e) save_path += path[path.rindex('/'):] Image.fromarray(np.uint8(img)).save(save_path, quality=85, optimize=True) return 0 # we do not care about returning the data
def dirlist(path): filelist = os.listdir(path) print(path) pathDirName = path[path.rindex(os.sep) + 1:len(path)] final_path = path + os.sep + pathDirName + "_all_table.cql" if os.path.exists(final_path): os.remove(final_path) allContent = [] for filename in filelist: filepath = os.path.join(path, filename) if os.path.isdir(filepath): dirlist(filepath) else: if filepath.endswith(".cql"): sqlFileName = filepath[filepath.rindex(os.sep) + 1:len(filepath)] if (sqlFileName.startswith("all") | sqlFileName.endswith("all_table.cql")): continue else: print(sqlFileName) allContent.append(readFile(filepath)) allContent.append(";\n") allContent.append("\n") else: print("") if len(allContent) > 0: writeFile(final_path, allContent)
def add_cert_to_db(path, validities, pprinted_certchain, fprints): # validities -- output of openssl verify on each cert in this chain # pprinted_certchain -- output of openssl x509 -text on each crt in chain assert len(validities) == len(pprinted_certchain), (validities, pprinted_certchain) assert len(pprinted_certchain) == len(fprints) for i, pprinted_cert in enumerate(pprinted_certchain): print "parsing", path try: # start a dictionary of database fields/columns... fields = hacky_parse(pprinted_cert) except: print "Error parsing", path + "\n" + pprinted_cert raise fields['path'] = path moz_valid, ms_valid = validities[i] if ("Yes" in validities[i]): valid = 1 else: valid = 0 fields['moz_valid'] = moz_valid fields['ms_valid'] = ms_valid if len(fprints[i]): fields['fingerprint'] = fprints[i] fields['ip'] = path[path.rindex('/') +1:-8] # linux pathsep dependency q = "INSERT INTO %s SET " % gdb.escape_string(tablename) q += "`fetchtime`=%.0f ,\n" % os.path.getmtime(path) q += "valid=%r ,\n" % valid for (f,v) in fields.items(): q += "`%s`='%s' ,\n" % (gdb.escape_string(f), gdb.escape_string(v)) q = q[:-2] gdbc.execute(q)
def __init__(self, json=None, path=None, verify=True): ''' Creates a new attachment class, optionally from existing JSON. Keyword Arguments: json -- json to create the class from. this is mostly used by the class internally when an attachment is downloaded from the cloud. If you want to create a new attachment, leave this empty. (default = None) path -- a string giving the path to a file. it is cross platform as long as you break windows convention and use '/' instead of '\'. Passing this argument will tend to the rest of the process of making an attachment. Note that passing in json as well will cause this argument to be ignored. ''' if json: self.json = json self.isPDF = '.pdf' in self.json['Name'].lower() elif path: with open(path, 'rb') as val: self.json = { '@odata.type': '#Microsoft.OutlookServices.FileAttachment' } self.isPDF = '.pdf' in path.lower() self.setByteString(val.read()) try: self.setName(path[path.rindex('/') + 1:]) except: self.setName(path) else: self.json = { '@odata.type': '#Microsoft.OutlookServices.FileAttachment' } self.verify = verify
def check_filepath(self, path): try: make_dir_r(path[: path.rindex("/")]) except (ValueError): print ("Seems strange path to me: %s" % path) return False return True
def __getitem__(self, index): # path, target = self.imgs[index] path = self.imgs[index] img = self.loader(path) if self.transform is not None: img = self.transform(img) img = self.method(img, self.severity) if self.target_transform is not None: target = self.target_transform(target) # save_path = '/share/data/vision-greg/DistortedImageNet/JPEG/' + self.method.__name__ + \ # '/' + str(self.severity) + '/' + self.idx_to_class[target] save_path = os.path.join( PLATFORM_ROOT, 'Data/udacityA_nvidiaB', '_'.join( ['valB', 'IMGC', self.method.__name__, str(self.severity)])) if os.path.exists(save_path) == False: os.makedirs(save_path, exist_ok=True) save_path += path[path.rindex('/'):] Image.fromarray(np.uint8(img)).resize( (RESIZE_W, RESIZE_H)).save(save_path, quality=85, optimize=True) return 0 # we do not care about returning the data
def filename_from_url(url): """Extract the last part of an url to use as a filename""" try: path = urllib.parse.urlparse(url).path pos = path.rindex("/") return path[pos + 1:] except ValueError: return url
def filename_from_url(url): """Extract the last part of an url to use as a filename""" try: path = urllib.parse.urlparse(url).path pos = path.rindex("/") return path[pos+1:] except ValueError: return url
def recursive_mkdir(path): """ Create directory path including parents if necessary """ dirs = [] path = os.path.abspath(path) while not os.path.exists(path): dirs.append(path) path = path[:path.rindex('/')] for d in reversed(dirs): os.mkdir(d)
def __find_template_file(cls, path, prefix, suffix=""): directory = os.path.dirname(path) path = os.path.join(os.path.dirname(path), os.path.basename(path).replace("template_", prefix)) path = "%s*" % path[: path.rindex(".") - 1] try: possible_paths = glob.glob(path) return filter(lambda p: p.endswith(suffix), possible_paths)[0] except: return None
def do_resource(self, resource_urls, http_request): for resource_url in resource_urls: path = resource_url file_name = path[path.rindex('/') + 1:] if os.path.isfile(file_name): continue image_saver = ImageSaver.ImageSaver('./', self.http_request_) http_request_new = self.construct_request(resource_url, http_request) printDebug('PageDelegate::do_img: handling img %s, new request: (%s)' % (resource_url, http_request_new)) HttpFetchProcess.newDownloader(image_saver).download(http_request_new)
def create_CV_datasets(path, fold): training = format.C45_FORMAT.get_training_instances(path) datasets = training.cross_validation_datasets(fold) slash = path.rindex(os.path.sep) parent_path = path[:slash] name = path[slash + 1:] for i in range(len(datasets)): os.makedirs(parent_path + os.path.sep + str(i + 1)) format.C45_FORMAT.write_training_to_file(datasets[i][0], parent_path + os.path.sep + str(i + 1) + os.path.sep + name) format.C45_FORMAT.write_gold_to_file(datasets[i][1], parent_path + os.path.sep + str(i + 1) + os.path.sep + name)
def save(self, http_request, data): if len(data) < 1024 * 50: return path = http_request.path file_name = path[path.rindex('/') + 1:] if os.path.isfile(file_name): return with self._ensureOpen(file_name) as f: f.write(data) # write log here print >>log_file, "topic url:%s, file name:%s" % (str(self.http_request_), file_name)
def _get_parent_directory(self, path: str): if "/" in path: sep = "/" else: sep = "\\" path = path.rstrip(sep) result = path[:path.rindex(sep)] if not result: return sep else: return result
def universal_dirname(path: str) -> str: if "/" in path: sep = "/" else: sep = "\\" path = path.rstrip(sep) result = path[:path.rindex(sep)] if not result: return sep else: return result
def create_CV_datasets(path, fold): training = format.C45_FORMAT.get_training_instances(path) datasets = training.cross_validation_datasets(fold) slash = path.rindex(os.path.sep) parent_path = path[:slash] name = path[slash + 1:] for i in range(len(datasets)): os.makedirs(parent_path + os.path.sep + str(i + 1)) format.C45_FORMAT.write_training_to_file( datasets[i][0], parent_path + os.path.sep + str(i + 1) + os.path.sep + name) format.C45_FORMAT.write_gold_to_file( datasets[i][1], parent_path + os.path.sep + str(i + 1) + os.path.sep + name)
def getPattern(self): self.lock.acquire() source = self._sources[random.randint(0, len(self._sources)-1)] self.lock.release() pattern, path = '', source.getPath() path = path[path.rindex('\\')+1:] for c in path: if c.isalnum(): pattern += c else: pattern += ' ' return pattern.strip()
def run_dialog(self, fo): path = fo.fullname()[len(fo.fs_top().fullname()):] i = path.rindex('/') dirname, filename = path[:i], path[i+1:] fc = gtk.FileChooserDialog(**self.fcd) fc.set_do_overwrite_confirmation(True) fc.set_current_folder(dirname) fc.set_name(filename) try: if fc.run() == gtk.RESPONSE_ACCEPT: if fo.size > self.progress_limit: pass # fixme: make progress dlg pump(fo.open(), file(fc.get_filename(), 'w'), fo.size) finally: fc.destroy()
def saveToFile(self): print('save To file') text = self.parent.textEdit.toPlainText() path = self.path.text() try: fileHandle = open ( path, 'w' ) except IOError: print('Директория, в которую пытаетесь сохранить файл, не существует!') return -1 sep = path.rindex('\\') # print("dir = %s, file = %s"%(path[:sep],path[sep+1:])) self.parent.dir = path[:sep] self.parent.file_name = path[sep+1:] fileHandle.write(text) fileHandle.close() self.close()
def load_object(path): """Load an object given it's absolute object path, and return it. The object can be a class, function, variable or instance. :param path: absolute object path (i.e. 'ocd_backend.extractor.BaseExtractor') :type path: str. """ try: dot = path.rindex('.') except ValueError: raise ValueError, "Error loading object '%s': not a full path" % path module, name = path[:dot], path[dot + 1:] try: mod = __import__(module, {}, {}, ['']) except ImportError, e: raise ImportError, "Error loading object '%s': %s" % (path, e)
def _set_dylib_executable_path(self): path = self.object_file.path app_ext_start = path.rfind('.appex/') if app_ext_start == -1: app_ext_start = path.rfind('.app/') if app_ext_start == -1: return app_ext_end = path.find('/', app_ext_start) self.executable_path = path[0:app_ext_end] + '/Contents/MacOS' app_name_start = path.rindex('/', 0, app_ext_start) app_name = path[app_name_start + 1:app_ext_start] if app_name in self.strange_exe_files: app_name = self.strange_exe_files[app_name] self.executable_file = self.executable_path + '/' + app_name
def parseMap(fileName): mapEnties = [] with open(fileName,'r') as f: while True: line = f.readline() if not line: break # try: # if line[20] != 'x': # continue # except Exception as e: # print e # print line # sys.exit(1) r1 = int(line[0:8],16) r2 = int(line[9:17],16) path = line[49:].rstrip() try: lastSlash = path.rindex('/') except ValueError: lastSlash = -1 pass if lastSlash != -1: directory = path[0:lastSlash] soName = path[lastSlash + 1:] if len(mapEnties) > 0: lastEntry = mapEnties[-1] if lastEntry.r2 == r1: if (lastSlash == -1 and not path) or (lastSlash != -1 and directory == lastEntry.path and soName == lastEntry.soName): lastEntry.r2 = r2 continue if path and lastSlash != -1: if r1 < lastEntry.r2 or r2 < lastEntry.r2: print "error intercept: r1: {0:08x}, r2: {1:08x}, lastEntry.r1: {2:08x}, lastEntry.r2: {3:08x}, lastEntry.soName: {4}".format(r1, r2, lastEntry.r1, lastEntry.r2, lastEntry.soName); mapEnties.append(MapEntry(r1, r2, soName, directory)) else: if path and lastSlash != -1: mapEnties.append(MapEntry(r1, r2, soName, directory)) # print ' '.join(str(m) for m in mapEnties) return mapEnties
def __getitem__(self, index): path, target = self.imgs[index] img = self.loader(path) if self.transform is not None: img = self.transform(img) img = self.method(img, self.severity) if self.target_transform is not None: target = self.target_transform(target) save_path = '/SSD/euntae/data/DistortedVisDA/JPEG/' + self.method.__name__ + \ '/' + str(self.severity) + '/' + str(target) if not os.path.exists(save_path): os.makedirs(save_path) save_path += path[path.rindex('/'):] Image.fromarray(np.uint8(img)).save(save_path, quality=85, optimize=True) return 0 # we do not care about returning the data
def __getitem__(self, index): path, target = self.imgs[index] img = self.loader(path) if self.transform is not None: img = self.transform(img) img = self.method(img, self.severity) if self.target_transform is not None: target = self.target_transform(target) save_path = '/share/data/vision-greg/DistortedImageNet/' + self.method.__name__ +\ '/' + str(self.severity) + '/' + self.idx_to_class[target] if not os.path.exists(save_path): os.makedirs(save_path) save_path += path[ path.rindex('/'):-4] + 'png' # -4 to remove JPEG extension Image.fromarray(np.uint8(img)).save(save_path, optimize=True) return 0 # we do not care about returning the data
def __getitem__(self, index): path, target = self.imgs[index] img = self.loader(path) # default trasformation is set to imagenet preprocessing if self.transform == 'imagenet': img = resize_and_center_crop(img, resize_size=args.RESIZE_SIZE, crop_size=args.CROP_SIZE) img = self.method(img, self.severity) save_path = os.path.join(args.output_dir, self.method.__name__, str(self.severity), target) if not os.path.exists(save_path): os.makedirs(save_path) save_path += path[path.rindex('/'):] Image.fromarray(np.uint8(img)).save(save_path, quality=85, optimize=True) return 0 # we do not care about returning the data
def add_cert_to_db(path, validities, x509_parsed_certchain, fprints): for i, parsed_cert in enumerate(x509_parsed_certchain): print "parsing", path try: fields = hacky_parse(parsed_cert) except: print "Error parsing", path + "\n" + parsed_cert raise fields['path'] = path moz_valid, ms_valid = validities[i] if ("Yes" in validities[i]): valid = 1 else: valid = 0 fields['moz_valid'] = moz_valid fields['ms_valid'] = ms_valid if len(fprints[i]): fields['fingerprint'] = fprints[i] fields['ip'] = path[path.rindex('/') +1:-8] # linux pathsep dependency q = "INSERT INTO %s SET " % gdb.escape_string(tablename) q += "`fetchtime`=%.0f ,\n" % os.path.getmtime(path) q += "valid=%r ,\n" % valid for (f,v) in fields.items(): q += "`%s`='%s' ,\n" % (gdb.escape_string(f), gdb.escape_string(v)) q = q[:-2] gdbc.execute(q)
def __getitem__(self, index): path, target = self.imgs[index] img = self.loader(path) if self.transform is not None: img = self.transform(img) img = self.method(img, self.severity) if self.target_transform is not None: target = self.target_transform(target) #save_path = '/share/data/vision-greg/DistortedImageNet/JPEG/' + self.method.__name__ + \ # '/' + str(self.severity) + '/' + self.idx_to_class[target] save_path = '/Users/yifanli/Yale/2021FML/AdvRobustness/DistortedImageNet/JPEG/' + self.method.__name__ + \ '/' + str(self.severity) + '/' + self.idx_to_class[target] #root="/share/data/vision-greg/ImageNet/clsloc/images/val", if not os.path.exists(save_path): os.makedirs(save_path) save_path += path[path.rindex('/'):] Image.fromarray(np.uint8(img)).save(save_path, quality=85, optimize=True) return 0 # we do not care about returning the data
def parseMap(fileName): mapEnties = [] with open(fileName,'r') as f: while True: line = f.readline() if not line: break try: if line[20] != 'x': continue except Exception as e: print e print line sys.exit(1) r1 = int(line[0:8],16) r2 = int(line[9:17],16) path = line[49:].rstrip() try: lastSlash = path.rindex('/') mapEnties.append(MapEntry(r1,r2,path[lastSlash + 1:],path[0:lastSlash ] )) except ValueError: pass return mapEnties
def add_cert_to_db(path, validities, x509_parsed_certchain, fprints): for i, parsed_cert in enumerate(x509_parsed_certchain): print "parsing", path try: fields = hacky_parse(parsed_cert) except: print "Error parsing", path + "\n" + parsed_cert raise fields['path'] = path moz_valid, ms_valid = validities[i] if ("Yes" in validities[i]): valid = 1 else: valid = 0 fields['moz_valid'] = moz_valid fields['ms_valid'] = ms_valid if len(fprints[i]): fields['fingerprint'] = fprints[i] fields['ip'] = path[path.rindex('/') + 1:-8] # linux pathsep dependency q = "INSERT INTO %s SET " % gdb.escape_string(tablename) q += "`fetchtime`=%.0f ,\n" % os.path.getmtime(path) q += "valid=%r ,\n" % valid for (f, v) in fields.items(): q += "`%s`='%s' ,\n" % (gdb.escape_string(f), gdb.escape_string(v)) q = q[:-2] gdbc.execute(q)
def fileext(path): return path[path.rindex(".")+1:].lower()
def get_mod_obj(path): try: dot = path.rindex('.') except ValueError: return path, '' return path[:dot], path[dot+1:]
letters = english + russian + digits # generate all possible combinations of path roots paths = [os.path.join(path, letter) for letter in letters for path in pathroots] # leave only existing directories paths = filter(lambda name: os.path.exists(name), paths) for path in paths: letter = os.path.basename(path) # generate the path of a destination root: # - extract only letter if the path does not contain any nested path # for example, for 'D:\A' the root is 'A' # - keep the letter and the nested path, otherwise # for example, for 'D:\Foo\A' the root is 'Foo\A' if path.index(os.sep) == path.rindex(os.sep): root = letter else: root = path[path.index(os.sep) + 1:] # iterate through all performers for performer in os.listdir(path): performerpath = os.path.join(path, performer) # iterate through all files in performer's Picture folder imagepath = os.path.join(performerpath, 'Picture') if os.path.exists(imagepath): for imagefile in os.listdir(imagepath): srcfile = os.path.join(imagepath, imagefile) if not os.path.isfile(srcfile): continue # if the file contains only digits (album ID) # or starts with 'photo' or 'foto' (performer photo)
rararchives = glob.glob('temp\\*.r*') if rararchives: #extract from '\temp\*.r*' and output to temp2 process = subprocess.Popen('unrar.exe x "temp/*.r*" *.* temp/temp2', bufsize=1024, stdout=None) process.communicate() del process ziparchives = glob.glob('/temp\\*.zip') if ziparchives: #extract from '\temp\*.zip' and output to temp2 process = subprocess.Popen('unzip.exe -qo "temp/*.zip"' + ' -d temp/temp2', bufsize=1024, stdout=None) process.communicate() del process pass #rename contents of temp\temp2 to basedir + basename + extension basefilename = path[path.rindex('\\'):] for filename in glob.glob('temp/temp2/*.*'): extension = filename[filename.rindex('.'):] shutil.move(filename, base + basefilename + extension) print "Moving", filename, 'to', base + basefilename + extension print shutil.rmtree('temp')
def replace_extension(path): return path[:path.rindex('.')] + '.html'
def get_mod_obj(path): try: dot = path.rindex('.') except ValueError: return path, '' return path[:dot], path[dot + 1:]
runtime_json["local"]["party_id"] = route_json[cluster_type][ "party_id"] runtime_json["role"] = inner_dict json_path = file_utils.get_project_base_directory( ) + "/contrib/fate_script/conf/" + list( site_set)[i] + "_runtime_conf.json" with open(json_path, "wt") as fout: fout.write(json.dumps(runtime_json, indent=4)) if __name__ == '__main__': script_path = sys.argv[1] #path = file_utils.get_project_base_directory() + "/contrib/fate_script/script/HeteroLR.fml" #path = file_utils.get_project_base_directory() + script_path path = sys.argv[1] algo_start_idx = 0 if '/' not in path else path.rindex('/') + 1 algo_name = path[algo_start_idx:path.rindex('.')] inputStream = FileStream(path, encoding="utf8") lexer = FmlLexer(inputStream) #create a lexer for inputStream stream = CommonTokenStream(lexer) #create a token buffer for token storage parser = FmlParser( stream) #create a parser to process the token in buffer storage tree = parser.file_input( ) #start parsing the token stream based on the rule "file_input" # 打印语法信息 printer = CompilerPrinter() lexer.symbolicNames = parser.symbolicNames #进行树状结构的转换 analy = FmlVisitorAnalytic()
def getModuleNameFromOutputPath (self, path): leftSlashIdx = path.rindex(self.slash) underScoreIdx = path.rindex("_") #pdb.set_trace() return path[leftSlashIdx + 1: underScoreIdx]
def get_uncompressed_path(path): return path[:path.rindex(".")]
def get_compression_extension(path): return path[path.rindex(".")+1:]
# generate all possible combinations of path roots paths = [ os.path.join(path, letter) for letter in letters for path in pathroots ] # leave only existing directories paths = filter(lambda name: os.path.exists(name), paths) for path in paths: letter = os.path.basename(path) # generate the path of a destination root: # - extract only letter if the path does not contain any nested path # for example, for 'D:\A' the root is 'A' # - keep the letter and the nested path, otherwise # for example, for 'D:\Foo\A' the root is 'Foo\A' if path.index(os.sep) == path.rindex(os.sep): root = letter else: root = path[path.index(os.sep) + 1:] # iterate through all performers for performer in os.listdir(path): performerpath = os.path.join(path, performer) # iterate through all files in performer's Picture folder imagepath = os.path.join(performerpath, 'Picture') if os.path.exists(imagepath): for imagefile in os.listdir(imagepath): srcfile = os.path.join(imagepath, imagefile) if not os.path.isfile(srcfile): continue # if the file contains only digits (album ID) # or starts with 'photo' or 'foto' (performer photo)
def upDirectory(self, path): slashIdx = path.rindex(self.slash) if path[-1] == self.slash: slashIdx = path.rindex(self.slash, end=len(path) - 1) return path[:slashIdx]