def _pick_filepath(formats, name): ''' Starting with the most specific and preferred filename format, check if there is an existing database that matches the name we're looking for, and return that path. If none of them exist, then use the most preferred filepath. ''' paths = [ pathclass.Path(format.format(name=name)) for format in formats ] for path in paths: if path.is_file: return path return paths[-1]
def add_directory(directory): ''' Add a directory to the gitcheckup.txt file, creating that file if it does not exist. ''' directory = pathclass.Path(directory) try: directories = set(read_directories_file()) except NoConfigFile: directories = set() directories.add(directory) write_directories_file(directories)
def empty_directories_argparse(args): if args.patterns: patterns = pipeable.input_many(args.patterns, skip_blank=True, strip=True) directories = (pathclass.Path(d) for pattern in patterns for d in winglob.glob(pattern)) else: directories = pathclass.cwd().listdir() directories = (d for d in directories if d.is_dir) for directory in directories: if len(directory.listdir()) == 0: pipeable.stdout(directory.absolute_path)
def bitwise_or_argparse(args): patterns = pipeable.input_many(args.files, skip_blank=True, strip=True) files = [file for pattern in patterns for file in winglob.glob(pattern)] files = [pathclass.Path(file) for file in files] if len(files) < 2: log.fatal('Need at least two input files.') return 1 handles = [file.open('rb') for file in files] output = pathclass.Path(args.output) if output.is_dir: log.fatal('Output path "%s" is a directory.', args.output) return 1 if not output.exists: pass elif args.overwrite: pass elif not interactive.getpermission(f'Overwrite "{output.absolute_path}"?'): return 1 output_handle = output.open('wb') while True: chunk = 0 length = 1 for handle in handles[:]: read = handle.read(CHUNK_SIZE) length = max(length, len(read)) if not read: handles.remove(handle) chunk |= int.from_bytes(read, 'big') if not handles: break output_handle.write(chunk.to_bytes(length, 'big')) pipeable.stdout(output.absolute_path)
def _for_object_helper(cls, name, path_formats, do_create=True, fix_name=False): if name != os.path.basename(name): filepath = pathclass.Path(name) else: filepath = cls._pick_filepath(formats=path_formats, name=name) database = cls(filepath=filepath, do_create=do_create) if fix_name: return (database, name_from_path(name)) return database
def init_db(): log.debug('Initializing database.') db_path = pathclass.Path(__file__).parent.with_child('sb.db') if db_path.is_link and not db_path.is_file: raise RuntimeError(f'{db_path.absolute_path} is a broken link.') db_exists = db_path.is_file sql = sqlite3.connect(db_path.absolute_path) cur = sql.cursor() if not db_exists: log.debug('Running first-time database setup.') sqlhelpers.executescript(conn=sql, script=DB_INIT) return (sql, cur)
def add_channel( self, channel_id, *, commit=True, download_directory=None, queuefile_extension=None, get_videos=False, name=None, ): try: return self.get_channel(channel_id) except exceptions.NoSuchChannel: pass if name is None: name = self.youtube.get_user_name(channel_id) if download_directory is not None: download_directory = pathclass.Path( download_directory).absolute_path self.log.info('Adding channel %s %s', channel_id, name) data = { 'id': channel_id, 'name': name, 'uploads_playlist': self.youtube.get_user_uploads_playlist_id(channel_id), 'download_directory': download_directory, 'queuefile_extension': queuefile_extension, 'automark': "pending", } self.sql_insert(table='channels', data=data) channel = self.get_cached_instance('channel', data) if get_videos: channel.refresh(commit=False) if commit: self.commit() return channel
def init_db(): global sql global cur log.debug('Initializing database.') db_path = pathclass.Path('hnarchive.db') if db_path.is_link and not db_path.is_file: raise RuntimeError(f'{db_path.absolute_path} is a broken link.') db_exists = db_path.is_file sql = sqlite3.connect(db_path.absolute_path) cur = sql.cursor() if not db_exists: log.debug('Running first-time database setup.') sqlhelpers.executescript(conn=sql, script=DB_INIT)
def grayscale(filename, *, inplace=False): filename = pathclass.Path(filename) basename = filename.replace_extension('').basename if basename.endswith('_gray'): return if inplace: new_filename = filename else: basename += '_gray' new_filename = filename.parent.with_child(basename).add_extension(filename.extension) image = PIL.Image.open(filename.absolute_path).convert('LA') print(f'{new_filename.relative_path}') image.save(new_filename.absolute_path)
def __init__(self, filepath, do_create=True): self.filepath = pathclass.Path(filepath) if not self.filepath.is_file: if not do_create: raise exceptions.DBNotFound(self.filepath) print('New database', self.filepath.relative_path) os.makedirs(self.filepath.parent.absolute_path, exist_ok=True) self.breakdown_dir = self.filepath.parent.with_child('breakdown') self.offline_reading_dir = self.filepath.parent.with_child( 'offline_reading') self.redmash_dir = self.filepath.parent.with_child('redmash') self.styles_dir = self.filepath.parent.with_child('styles') self.wiki_dir = self.filepath.parent.with_child('wiki') existing_database = self.filepath.exists self.sql = sqlite3.connect(self.filepath.absolute_path) self.cur = self.sql.cursor() if existing_database: self.cur.execute('PRAGMA user_version') existing_version = self.cur.fetchone()[0] if existing_version > 0 and existing_version != DATABASE_VERSION: message = ERROR_DATABASE_OUTOFDATE message = message.format(current=existing_version, new=DATABASE_VERSION) raise ValueError(message) statements = DB_INIT.split(';') for statement in statements: self.cur.execute(statement) self.sql.commit() self.config = {} for (key, default_value) in DEFAULT_CONFIG.items(): self.cur.execute('SELECT value FROM config WHERE key == ?', [key]) existing_value = self.cur.fetchone() if existing_value is None: self.cur.execute('INSERT INTO config VALUES(?, ?)', [key, default_value]) self.config[key] = default_value else: existing_value = existing_value[0] if isinstance(default_value, int): existing_value = int(existing_value) self.config[key] = existing_value
def __init__( self, data_directory=None, ): super().__init__() if data_directory is None: data_directory = constants.DEFAULT_DATADIR # DATA DIR PREP data_directory = helpers.remove_path_badchars(data_directory, allowed=':/\\') self.data_directory = pathclass.Path(data_directory) os.makedirs(self.data_directory.absolute_path, exist_ok=True) self.log = logging.getLogger('recipedb:%s' % self.data_directory.absolute_path) self.log.setLevel(logging.DEBUG) # DATABASE self.database_filepath = self.data_directory.with_child( constants.DEFAULT_DBNAME) existing_database = self.database_filepath.exists self.sql = sqlite3.connect(self.database_filepath.absolute_path) if not existing_database: self._first_time_setup() if existing_database: self._check_version() # CONFIG self.config_filepath = self.data_directory.with_child( constants.DEFAULT_CONFIGNAME) self.config = self._load_config() self.log.setLevel(self.config['log_level']) # IMAGE DIRECTORY self.image_directory = self.data_directory.with_child( constants.DEFAULT_IMAGEDIR) os.makedirs(self.image_directory.absolute_path, exist_ok=True) self.on_commit_queue = []
def fpk_argparse(args): destination = pathclass.Path(args.destination) destination.assert_is_directory() return_status = 0 for package in args.packages: package = normalize_package_name(package) try: apk_url = _retry_request(lambda: get_apk_url(package)) except Exception: log.error('%s was unable to get apk url.', package) return_status = 1 continue apk_basename = downloady.basename_from_url(apk_url) if args.folders: this_dest = destination.with_child(package) this_dest.makedirs(exist_ok=True) else: this_dest = destination this_dest = this_dest.with_child(apk_basename) if this_dest.exists: log.info('%s exists.', this_dest.absolute_path) continue log.info('Downloading %s.', this_dest.absolute_path) try: _retry_request(lambda: downloady.download_file( apk_url, this_dest, callback_progress=downloady.Progress2, timeout=30, )) except Exception: log.error('%s was unable to download apk.', package) return_status = 1 continue return return_status
def prune_dirs(starting): starting = pathclass.Path(starting) walker = spinal.walk(starting, yield_directories=True, yield_files=False) double_check = set() def pruneme(directory): if directory == starting or directory not in starting: return if len(directory.listdir()) == 0: print(directory.absolute_path) os.rmdir(directory.absolute_path) double_check.add(directory.parent) for directory in walker: pruneme(directory) while double_check: directory = double_check.pop() pruneme(directory)
def sole_lift_argparse(args): starting = pathclass.Path(args.starting) queue = collections.deque() queue.extend(spinal.walk(starting, yield_files=False, yield_directories=True)) while len(queue) > 0: directory = queue.popleft() if not directory.exists: log.debug('%s no longer exists.', directory) continue if directory not in starting: log.debug('%s is outside of starting.', directory) continue children = directory.listdir() child_count = len(children) if child_count != 1: log.debug('%s has %d children.', directory, child_count) continue child = children[0] if not child.is_dir: log.debug('%s contains a file, not a dir.', directory) continue log.info('Lifting contents of %s.', child.absolute_path) # child is renamed to random hex so that the grandchildren we are about # to lift don't have name conflicts with the child dir itself. # Consider .\abc\abc where the grandchild can't be moved. temp_dir = directory.with_child(passwordy.urandom_hex(32)) os.rename(child.absolute_path, temp_dir.absolute_path) for grandchild in temp_dir.listdir(): shutil.move(grandchild.absolute_path, directory.absolute_path) if temp_dir.listdir(): raise Exception() os.rmdir(temp_dir.absolute_path) queue.append(directory.parent)
def __init__(self, filepath, do_create=True): self.filepath = pathclass.Path(filepath) if not self.filepath.is_file: if not do_create: raise exceptions.DBNotFound(self.filepath) print('New database', self.filepath.relative_path) os.makedirs(self.filepath.parent.absolute_path, exist_ok=True) self.breakdown_dir = self.filepath.parent.with_child('breakdown') self.offline_reading_dir = self.filepath.parent.with_child('offline_reading') self.redmash_dir = self.filepath.parent.with_child('redmash') self.styles_dir = self.filepath.parent.with_child('styles') self.wiki_dir = self.filepath.parent.with_child('wiki') self.sql = sqlite3.connect(self.filepath.absolute_path) self.cur = self.sql.cursor() statements = DB_INIT.split(';') for statement in statements: self.cur.execute(statement) self.sql.commit()
def __init__(self, filepath, *, do_create=True, skip_version_check=False): self.filepath = pathclass.Path(filepath) if not self.filepath.is_file: if not do_create: raise exceptions.DatabaseNotFound(self.filepath) print('New database', self.filepath.relative_path) self.filepath.parent.makedirs(exist_ok=True) self.breakdown_dir = self.filepath.parent.with_child('breakdown') self.offline_reading_dir = self.filepath.parent.with_child( 'offline_reading') self.index_dir = self.filepath.parent.with_child('index') self.styles_dir = self.filepath.parent.with_child('styles') self.wiki_dir = self.filepath.parent.with_child('wiki') existing_database = self.filepath.exists self.sql = sqlite3.connect(self.filepath.absolute_path) self.cur = self.sql.cursor() if existing_database: if not skip_version_check: self._check_version() self._load_pragmas() else: self._first_time_setup() self.config = {} for (key, default_value) in DEFAULT_CONFIG.items(): self.cur.execute('SELECT value FROM config WHERE key == ?', [key]) existing_value = self.cur.fetchone() if existing_value is None: self.cur.execute('INSERT INTO config VALUES(?, ?)', [key, default_value]) self.config[key] = default_value else: existing_value = existing_value[0] if isinstance(default_value, int): existing_value = int(existing_value) self.config[key] = existing_value
def adbinstall_argparse(args): patterns = pipeable.input_many(args.apks, skip_blank=True, strip=True) apks = [file for pattern in patterns for file in winglob.glob(pattern)] installs = [] for apk in args.apks: apk = pathclass.Path(apk) if apk.is_dir: files = apk.glob('*.apk') files.sort(key=lambda x: natural_sorter(x.basename.lower())) apk = files[-1] installs.append(apk) if not args.autoyes: for apk in installs: print(apk.absolute_path) if not interactive.getpermission('Is that okay?', must_pick=True): return 1 for apk in installs: command = f'adb install "{apk.absolute_path}"' log.info(command) os.system(command)
def download_video(self, video, commit=True, force=False): ''' Create the queuefile within the channel's associated directory, or the default directory from the config file. ''' if isinstance(video, ytapi.Video): video_id = video.id else: video_id = video video = self.get_video(video_id) if video.state != 'pending' and not force: self.log.debug('%s does not need to be downloaded.', video_id) return try: channel = self.get_channel(video.author_id) download_directory = channel.download_directory or self.config[ 'download_directory'] extension = channel.queuefile_extension or self.config[ 'queuefile_extension'] except exceptions.NoSuchChannel: download_directory = self.config['download_directory'] extension = self.config['queuefile_extension'] self.log.info('Creating queuefile for %s.', video_id) download_directory = pathclass.Path(download_directory) download_directory.makedirs(exist_ok=True) queuefile = download_directory.with_child(video_id).replace_extension( extension) queuefile.touch() video.mark_state('downloaded', commit=False) if commit: self.commit()
def loop_once(extension, regex=None): try: text = pyperclip.paste() except Exception: return text = text.strip() if len(text.split(sep=None, maxsplit=1)) > 1: return if 'http://' not in text and 'https://' not in text: return if regex and not re.search(regex, text): return path = pathclass.Path(passwordy.urandom_hex(12)).add_extension(extension) pyperclip.copy('') print(path.basename, text) h = path.open('w', encoding='utf-8') h.write(text) h.close()
def assert_enough_space(pathsize, workdir, moveto, rec, rev, par): plus_percent = (rec + rev + par) / 100 needed = pathsize * (1 + plus_percent) reserve = RESERVE_SPACE_ON_DRIVE + needed workdir_drive = os.path.splitdrive(workdir.absolute_path)[0] + os.sep free_space = shutil.disk_usage(workdir_drive).free if moveto is not None: moveto_drive = os.path.splitdrive(moveto.absolute_path)[0] moveto_drive = pathclass.Path(moveto_drive) free_space = min(free_space, shutil.disk_usage(moveto_drive.absolute_path).free) message = ' '.join([ f'For {bytestring.bytestring(pathsize)},', f'reserve {bytestring.bytestring(reserve)}', f'out of {bytestring.bytestring(free_space)}.', ]) log.debug(message) if reserve > free_space: raise NotEnoughSpace(message)
import datetime import flask from flask import request import json import mimetypes import os import traceback import bot import ycdl from voussoirkit import pathclass from . import jinja_filters root_dir = pathclass.Path(__file__).parent.parent TEMPLATE_DIR = root_dir.with_child('templates') STATIC_DIR = root_dir.with_child('static') FAVICON_PATH = STATIC_DIR.with_child('favicon.png') youtube_core = ycdl.ytapi.Youtube(bot.YOUTUBE_KEY) youtube = ycdl.YCDL(youtube_core) site = flask.Flask( __name__, template_folder=TEMPLATE_DIR.absolute_path, static_folder=STATIC_DIR.absolute_path, ) site.config.update( SEND_FILE_MAX_AGE_DEFAULT=180,
import recipedb from voussoirkit import pathclass image_dir = pathclass.Path(__file__).parent.with_child('sample_images') rdb = recipedb.RecipeDB() angela = rdb.new_user(username="******", display_name="Angela", password="******", bio_text="Hello! This is a sample biography for Angela.", profile_image=rdb.new_image( image_dir.with_child('angel_cake.jpg'))) bob = rdb.new_user(username="******", display_name="Bob", password="******", bio_text="Hello! This is a sample biography for Bob.", profile_image=rdb.new_image( image_dir.with_child('homemade_pizza.jpg'))) caitlyn = rdb.new_user( username="******", display_name="Caitlyn", password="******", bio_text="Hello! This is a sample biography for Caitlyn.", profile_image=rdb.new_image(image_dir.with_child('meringue.jpg'))) anonymous = rdb.new_user( username='******', display_name='Rainbowman',
def url_to_path(path): path = urllib.parse.unquote(path) path = path.strip('/') path = path.split('?')[0] return pathclass.Path(path)
def rarpar( path, *, basename=None, compression=None, dictionary_size=None, dry=False, moveto=None, par=None, password=None, rar_profile=None, recycle_original=False, rec=None, rev=None, solid=False, volume=None, workdir='.', ): path = pathclass.Path(path) # Validation ################################################################################### path.assert_exists() path.correct_case() workdir = pathclass.Path(workdir) workdir.assert_is_directory() if moveto is not None: moveto = pathclass.Path(moveto) moveto.assert_is_directory() if compression not in [None, 0, 1, 2, 3, 4, 5]: raise ValueError( f'compression must be 0-5 or None, not {compression}.') dictionary_size = normalize_dictionary_size(dictionary_size) if type(solid) is not bool: raise TypeError(f'solid must be True or False, not {solid}.') password = normalize_password(password) pathsize = path.size volume = normalize_volume(volume, pathsize) rec = normalize_percentage(rec) rev = normalize_percentage(rev) par = normalize_percentage(par) if RESERVE_SPACE_ON_DRIVE: assert_enough_space( pathsize, workdir=workdir, moveto=moveto, rec=rec or 0, rev=rev or 0, par=par or 0, ) date = time.strftime('%Y-%m-%d') timestamp = time.strftime('%Y-%m-%d_%H-%M-%S') if not basename: basename = f'{path.basename} ({timestamp})' else: basename = basename.format(basename=path.basename, date=date, timestamp=timestamp) existing = None if workdir: existing = existing or workdir.glob(f'{basename}*.rar') if moveto: existing = existing or moveto.glob(f'{basename}*.rar') if existing: raise RarExists(f'{existing[0].absolute_path} already exists.') # Script building ############################################################################## script = [] rarcommand = RARCOMMAND( path=path, basename=basename, compression=compression, dictionary_size=dictionary_size, password=password, profile=rar_profile, rec=rec, rev=rev, solid=solid, volume=volume, workdir=workdir, ) script.append(rarcommand) if par: parcommand = PARCOMMAND( basename=basename, par=par, workdir=workdir, ) script.append(parcommand) def move_rars(): move(f'{workdir.absolute_path}\\{basename}*.rar', f'{moveto.absolute_path}') def move_revs(): move(f'{workdir.absolute_path}\\{basename}*.rev', f'{moveto.absolute_path}') def move_pars(): move(f'{workdir.absolute_path}\\{basename}*.par2', f'{moveto.absolute_path}') if moveto: if True: script.append(move_rars) if rev: script.append(move_revs) if par: script.append(move_pars) def recycle(): send2trash.send2trash(path.absolute_path) if recycle_original: script.append(recycle) #### #### status = run_script(script, dry) return status
def search( *, yes_all=None, yes_any=None, not_all=None, not_any=None, case_sensitive=False, content_args=None, do_expression=False, do_glob=False, do_regex=False, do_strip=False, line_numbers=False, local_only=False, only_dirs=False, only_files=False, root_path='.', text=None, ): terms = { 'yes_all': yes_all, 'yes_any': yes_any, 'not_all': not_all, 'not_any': not_any } terms = {k: ([v] if isinstance(v, str) else v or []) for (k, v) in terms.items()} #print(terms, content_args) do_plain = not (do_glob or do_regex) if all(v == [] for v in terms.values()) and not content_args: raise NoTerms('No terms supplied') def term_matches(line, term): if not case_sensitive: line = line.lower() if do_expression: return term.evaluate(line) return ( (do_plain and term in line) or (do_regex and re.search(term, line)) or (do_glob and winglob.fnmatch(line, term)) ) if do_expression: # The value still needs to be a list so the upcoming any() / all() # receives an iterable as it expects. It just happens to be 1 tree. trees = {} for (term_type, term_expression) in terms.items(): if term_expression == []: trees[term_type] = [] continue tree = ' '.join(term_expression) tree = expressionmatch.ExpressionTree.parse(tree) if not case_sensitive: tree.map(str.lower) trees[term_type] = [tree] terms = trees elif not case_sensitive: terms = {k: [x.lower() for x in v] for (k, v) in terms.items()} if text is None: search_objects = spinal.walk( root_path, recurse=not local_only, yield_directories=True, ) elif isinstance(text, (list, tuple)): search_objects = text else: search_objects = text.splitlines() for (index, search_object) in enumerate(search_objects): # if index % 10 == 0: # print(index, end='\r', flush=True) if isinstance(search_object, pathclass.Path): if only_files and not search_object.is_file: continue if only_dirs and not search_object.is_dir: continue search_text = search_object.basename result_text = search_object.absolute_path elif isinstance(search_object, HeaderedText): search_text = search_object.text result_text = search_object.with_header else: search_text = search_object result_text = search_object if not all_terms_match(search_text, terms, term_matches): continue if do_strip: result_text = result_text.strip() if line_numbers: result_text = f'{index+1:>4} | {result_text}' if not content_args: yield result_text continue filepath = pathclass.Path(search_object) if not filepath.is_file: continue if filepath.extension == 'lnk' and winshell: yield from search_contents_windows_lnk(filepath, content_args) else: yield from search_contents_generic(filepath, content_args)
def search( *, yes_all=None, yes_any=None, not_all=None, not_any=None, case_sensitive=False, content_args=None, do_expression=False, do_glob=False, do_regex=False, line_numbers=False, local_only=False, text=None, ): if text is None: print('starting search') terms = { 'yes_all': yes_all, 'yes_any': yes_any, 'not_all': not_all, 'not_any': not_any } terms = {k: ([v] if isinstance(v, str) else v or []) for (k, v) in terms.items()} #print(terms, content_args) if all(v == [] for v in terms.values()) and not content_args: raise ValueError('No terms supplied') def term_matches(line, term): if not case_sensitive: line = line.lower() if do_expression: return term.evaluate(line) return ( (term in line) or (do_regex and re.search(term, line)) or (do_glob and fnmatch.fnmatch(line, term)) ) if do_expression: # The value still needs to be a list so the upcoming any() / all() # receives an iterable as it expects. It just happens to be 1 tree. trees = {} for (key, value) in terms.items(): if value == []: trees[key] = [] continue tree = ' '.join(value) tree = expressionmatch.ExpressionTree.parse(tree) if not case_sensitive: tree.map(str.lower) trees[key] = [tree] terms = trees elif not case_sensitive: terms = {k: [x.lower() for x in v] for (k, v) in terms.items()} if text is None: search_objects = spinal.walk_generator( depth_first=False, recurse=not local_only, yield_directories=True, ) else: search_objects = text.splitlines() for (index, search_object) in enumerate(search_objects): if index % 10 == 0: #print(index, end='\r', flush=True) pass if isinstance(search_object, pathclass.Path): search_text = search_object.basename result_text = search_object.absolute_path else: search_text = search_object result_text = search_object if line_numbers: result_text = '%4d | %s' % (index+1, result_text) if all_terms_match(search_text, terms, term_matches): if not content_args: yield result_text else: filepath = pathclass.Path(search_object) if not filepath.is_file: continue try: with open(filepath.absolute_path, 'r') as handle: text = handle.read() except UnicodeDecodeError: try: with open(filepath.absolute_path, 'r', encoding='utf-8') as handle: text = handle.read() except UnicodeDecodeError: #safeprint.safeprint(filepath.absolute_path) #traceback.print_exc() continue except Exception: safeprint.safeprint(filepath.absolute_path) traceback.print_exc() continue content_args['text'] = text content_args['line_numbers'] = True results = search(**content_args) results = list(results) if not results: continue yield filepath.absolute_path yield from results yield ''
import gevent.pywsgi import sys from voussoirkit import pathclass from voussoirkit import vlogging import bot import ycdl import backend #################################################################################################### site = backend.site HTTPS_DIR = pathclass.Path(__file__).parent.with_child('https') LOG_LEVEL = vlogging.NOTSET def ycdl_flask_launch( *, create, localhost_only, port, refresh_rate, use_https, ): if use_https is None: use_https = port == 443 if use_https:
<html> <body> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"/> <style type="text/css">Body {{font-family:Consolas}}</style> <form action="/password" method="post"> <input type="text" autofocus autocapitalize="off" name="password" placeholder="password" autocomplete="off"/> <input type="hidden" name="goto" value="{goto}"/> <input type="submit" value="Submit"/> </form> </body> </html> ''' ROOT_DIRECTORY = pathclass.Path(os.getcwd()) HIDDEN_FILENAMES = {'thumbs.db', 'desktop.ini', '$recycle.bin', 'system volume information'} TOKEN_COOKIE_NAME = 'simpleserver_token' # SERVER ########################################################################################### class RequestHandler(http.server.BaseHTTPRequestHandler): def __init__(self, request, client_info, server, individual_ratelimit): self.individual_ratelimit = ratelimiter.Ratelimiter(individual_ratelimit) super().__init__(request, client_info, server) @property def auth_cookie(self): cookie = self.headers.get('Cookie') if not cookie: return None
''' Drag a file on top of this .py file, and it will have its filename scrambled into a combination of 12 digits. ''' import os import random import string import sys from voussoirkit import pathclass from voussoirkit import winglob argv = sys.argv[1:] for pattern in argv: for path in winglob.glob(pattern): path = pathclass.Path(path) newname = [random.choice(string.digits) for x in range(12)] newname = ''.join(newname) + path.dot_extension newname = path.parent.with_child(newname) os.rename(path.absolute_path, newname.absolute_path) print('%s -> %s' % (path.absolute_path, newname.basename))
def extension_registry_argparse(args): return extension_registry( ico_file=pathclass.Path(args.ico_file), human_name=args.name, )