def double_dump_test(): """ Perform a "double dump test" on every demo database. TODO: convert this to a Lino management command. """ raise Exception("Not yet converted after 20150129") if len(env.demo_databases) == 0: return a = Path(env.temp_dir, 'a') b = Path(env.temp_dir, 'b') rmtree_after_confirm(a) rmtree_after_confirm(b) #~ if not confirm("This will possibly break the demo databases. Are you sure?"): #~ return #~ a.mkdir() with lcd(env.temp_dir): for db in env.demo_databases: if a.exists(): a.rmtree() if b.exists(): b.rmtree() local("django-admin.py dump2py --settings=%s --traceback a" % db) local( "django-admin.py run --settings=%s --traceback a/restore.py" % db) local("django-admin.py dump2py --settings=%s --traceback b" % db) local("diff a b")
def check_avatar(self): avatar_path = Path("/var/www/loka3/static/media/" + str(self.avatar)) avatar_sm_path = Path("/var/www/loka3/static/media/" + str(self.avatar_sm)) print "{} exists: {}".format(avatar_path, avatar_path.exists()) if not avatar_path.exists() or not avatar_sm_path.exists(): print "Wiping avatars for", self.name self.avatar = None self.avatar_sm = None self.save()
def handle(self, *args, **options): directionals_path = Path(self.BASE_DIR, 'directionals.txt') if directionals_path.exists(): Directional.load_directionals(directionals_path) states_path = Path(self.BASE_DIR, 'states.txt') if states_path.exists(): State.load_states(states_path) street_types_path = Path(self.BASE_DIR, 'street_types.txt') if street_types_path.exists(): StreetType.load_street_types(street_types_path)
def py_clean(ctx, batch=False): """ Delete :xfile:`.pyc` files, :xfile:`.eggs` and :xfile:`__cache__` directories under the project's root direcotory. """ paths = [] for root, dirs, files in os.walk(ctx.root_dir): p = Path(root).child('__pycache__') if p.exists(): paths.append(p) if len(paths): if batch or confirm("Remove {0} __pycache__ directories".format( len(paths))): for p in paths: rmtree_after_confirm(p, True) for root, dirs, files in os.walk(ctx.root_dir): for fn in files: if fn.endswith(".pyc"): full_path = os.path.join(root, fn) if batch or confirm("Remove file %s:" % full_path): os.remove(full_path) # cleanup_pyc(ctx.root_dir, batch) # if atelier.current_project.main_package is not None: # try: # p = Path(atelier.current_project.main_package.__file__).parent # cleanup_pyc(atelier.current_project.root_dir, batch) # except AttributeError: # # happened 20170310 in namespace package: # # $ pywhich commondata # # Traceback (most recent call last): # # File "<string>", line 1, in <module> # # AttributeError: 'module' object has no attribute '__file__' # pass p = ctx.root_dir.child('.eggs') if p.exists(): rmtree_after_confirm(p, batch) files = [] for pat in ctx.cleanable_files: for p in glob.glob(os.path.join(ctx.root_dir, pat)): files.append(p) if len(files): if batch or confirm("Remove {0} cleanable files".format(len(files))): for p in files: os.remove(p)
def download_frames(service_frames, output_dir, pdfs_only): output_dir = Path(output_dir) if not output_dir.exists(): os.makedirs(output_dir) y1, y2 = output_dir.child('y1'), output_dir.child('y2') if not y1.exists(): os.makedirs(y1) if not y2.exists(): os.makedirs(y2) args = [] if service_frames in FRAMES_BY_SERVICE: frames_ranges = FRAMES_BY_SERVICE[service_frames] else: print('Service "{}" does not have frames ranges. Options are:'.format( service_frames)) for k in FRAMES_BY_SERVICE: print('\t- {}'.format(k)) return if pdfs_only: for pdf in get_detectron_pdfs_frames_paths(*frames_ranges): args.append((pdf, output_dir)) else: for frame in get_video_frames_path(frames_ranges[0], []): if not check_if_not_downlowaded(frame, y1): args.append((frame, y1)) for frame in get_video_frames_path([], frames_ranges[1]): if not check_if_not_downlowaded(frame, y2): args.append((frame, y2)) print('Donwloading {} frames...'.format(len(args))) with Pool(NUM_OF_PROCESSES) as pool: pool.starmap(download_s3_file, args)
def superuser(pubkey=None, username=None): """ fab env superuser """ env.user = '******' keyfile = Path(pubkey or Path('~', '.ssh', 'id_rsa.pub')).expand() if not keyfile.exists(): abort('Public key file does not exist: %s' % keyfile) username = username or prompt('Username: '******'Password: '******'perl -e \'print crypt(\"%s\", \"password\")\'' % (password), capture=True) with open(keyfile, 'r') as f: pubkey = f.read(65535) commands = ( 'useradd -m -s /bin/bash -p {password} {username}', 'mkdir ~{username}/.ssh -m 700', 'echo "{pubkey}" >> ~{username}/.ssh/authorized_keys', 'chmod 644 ~{username}/.ssh/authorized_keys', 'chown -R {username}:{username} ~{username}/.ssh', 'usermod -a -G sudo {username}', ) for command in commands: run(command.format(**locals()))
def handle(self, *args, **options): from ....media.models import MediaFile from ...models import Transcript if len(args) != 1: raise CommandError('Provide media URL.') (url,) = args local_path = Path(url) if local_path.exists(): url = 'file://{}'.format(local_path.absolute()) media_file = MediaFile.objects.create( data_url=url, ) if options['verbosity']: self.stdout.write('Created media file: {}'.format(media_file)) transcript = Transcript.objects.create( name=url, ) if options['verbosity']: self.stdout.write('Created transcript: {}'.format(transcript))
def test_parameters(image_path, test_parameter, out_dir): image_path = Path(image_path) out_dir = Path(out_dir) if not out_dir.exists(): os.mkdir(out_dir) config = NightmareConfig(out_dir=out_dir) if test_parameter == 'layers': for i in range(1, 21): config.layers = i deep_dream(image_path, config) elif test_parameter == 'rounds': for i in range(1, 21): config.rounds = i deep_dream(image_path, config) elif test_parameter == 'iters': for i in range(1, 31): config.iters = i deep_dream(image_path, config) elif test_parameter == 'range': for i in range(1, 11): config.range = i deep_dream(image_path, config) elif test_parameter == 'octaves': for i in range(1, 21): config.octaves = i deep_dream(image_path, config) elif test_parameter == 'rate': for i in range(1, 11): config.rate = i / 10 deep_dream(image_path, config) else: print('Parameter "{}" is invalid.'.format(test_parameter)) return
def migrar_dados(interativo=True): # restaura dump arq_dump = Path( DIR_DADOS_MIGRACAO.child('dumps_mysql', '{}.sql'.format(NOME_BANCO_LEGADO))) assert arq_dump.exists(), 'Dump do mysql faltando: {}'.format(arq_dump) info('Restaurando dump mysql de [{}]'.format(arq_dump)) normaliza_dump_mysql(arq_dump) roda_comando_shell('mysql -uroot < {}'.format(arq_dump)) # executa ajustes pré-migração, se existirem arq_ajustes_pre_migracao = DIR_DADOS_MIGRACAO.child( 'ajustes_pre_migracao', '{}.sql'.format(sigla_casa)) if arq_ajustes_pre_migracao.exists(): exec_legado(arq_ajustes_pre_migracao.read_file()) uniformiza_banco() # excluindo database antigo. if interativo: info('Todos os dados do banco serão excluidos. ' 'Recomendamos que faça backup do banco sapl ' 'antes de continuar.') info('Deseja continuar? [s/n]') resposta = input() if resposta.lower() in ['s', 'sim', 'y', 'yes']: pass else: info('Migração cancelada.') return 0 info('Excluindo entradas antigas do banco destino.') call([ PROJECT_DIR.child('manage.py'), 'flush', '--database=default', '--no-input' ], stdout=PIPE) # apaga tipos de autor padrão (criados no flush acima) TipoAutor.objects.all().delete() fill_vinculo_norma_juridica() fill_dados_basicos() info('Começando migração: ...') try: ocorrencias.clear() migrar_todos_os_models() except Exception as e: ocorrencias['traceback'] = str(traceback.format_exc()) raise e finally: # grava ocorrências arq_ocorrencias = Path(REPO.working_dir, 'ocorrencias.yaml') with open(arq_ocorrencias, 'w') as arq: pyaml.dump(ocorrencias, arq, vspacing=1) REPO.git.add([arq_ocorrencias.name]) info('Ocorrências salvas em\n {}'.format(arq_ocorrencias)) # recria tipos de autor padrão que não foram criados pela migração cria_models_tipo_autor()
def read_look_dream(out_dir): out_dir = Path(out_dir) download_dir = out_dir.child('download') if not out_dir.exists(): os.mkdir(out_dir) if not download_dir.exists(): os.mkdir(download_dir) for i, text in enumerate(FIRST_PAGE): print('Reading "{}"...'.format(text)) image = search_random_image(text, download_dir) print('Dreaming "{}"...'.format(text)) dirname = '000{} {}'.format(i, text).replace(' ', '_') config = NightmareConfig(out_dir.child(dirname)) os.mkdir(config.out_dir) config.force_all_random() deep_dream(image, config) for d in config.list_output_for_image(image): print(" New dream: {}".format(d)) sleep(10) print()
def content(request=None): base = Path(current_app.config.get('INUPYPI_REPO', Path('.', 'packages'))) if request: repo = Path(base, request) else: repo = base try: repo = repo.absolute() base = base.absolute() if not repo.exists(): if base == repo: raise InuPyPIMissingRepoPath # sets the request to lowercase and compares it with # the existing items in the repository in lowercase repo = search_path(repo, base) if not repo: raise InuPyPI404Exception if repo.isdir(): return Dirs(repo) if repo.isfile(): return repo except InuPyPIMissingRepoPath: abort(500, 'Missing repository or package path!') except InuPyPI404Exception: abort(404, 'Path or File could not be found!') except: abort(500, 'Internal Server Error!') return repo
def boostrap_nltk_data(): nltk.data.path.append('./data/') nltkdata_exists = Path('./data/tokenizers/punkt/english.pickle') if not nltkdata_exists.exists(): logging.info("Downloading NLTK Data") nltk.download('punkt', './data')
def run(ctx, experiment): """Run an experiment in "experiments/" or from a config file. Run an experiment by passing in a path to a config file. Experiments will be searched for in the experiments directory. To list available experiments, pass 'list' as the first argument. Experiments in this directory can be run by the stem name of the file. To run all the available experiments, pass 'all'. $ inv run path/to/my/exp-1.yaml # run a single experiment $ inv run list # list experiments in "experiments/" $ inv run exp-1 # run "experiments/exp-1.yaml" $ inv run all # run all available experiments """ if experiment == 'list': print('Experiments:') for experiment in EXPERIMENTS.listdir('*.yaml'): print(' - ' + experiment.stem) return elif experiment == 'all': experiments = EXPERIMENTS.listdir('*.yaml') elif Path(experiment).exists(): experiments = [Path(experiment)] else: experiment = Path(EXPERIMENTS, experiment + '.yaml') assert experiment.exists(), 'experiment %s not found' % experiment experiments = [experiment] for experiment in experiments: output = Path(R_PKG, 'data-raw', experiment.stem + '.csv') print('Running experiment { %s }' % experiment.stem) peaks.run_experiment(experiment, output=output)
def setup_babel_userdocs(babelcmd): """Create userdocs .po files if necessary.""" userdocs = env.root_dir.child('userdocs') if not userdocs.isdir(): return locale_dir = userdocs.child('translations') for domain in locale_dir.listdir('*.pot', names_only=True): domain = domain[:-4] for loc in env.languages: if loc != env.languages[0]: po_file = Path(locale_dir, loc, 'LC_MESSAGES', '%s.po' % domain) mo_file = Path(locale_dir, loc, 'LC_MESSAGES', '%s.mo' % domain) pot_file = Path(locale_dir, '%s.pot' % domain) if babelcmd == 'init_catalog' and po_file.exists(): print("Skip %s because file exists." % po_file) #~ elif babelcmd == 'compile_catalog' and not mo_file.needs_update(po_file): #~ print "Skip %s because newer than .po" % mo_file else: args = ["python", "setup.py"] args += [babelcmd] args += ["-l", loc] args += ["--domain", domain] args += ["-d", locale_dir] #~ args += [ "-o" , po_file ] #~ if babelcmd == 'init_catalog': if babelcmd == 'compile_catalog': args += ["-i", po_file] else: args += ["-i", pot_file] cmd = ' '.join(args) #~ must_confirm(cmd) local(cmd)
def transcrypt_sketch(sketch_name, sketch_dir, pyp5js): """ Command to generate the P5.js code for a python sketch Params: - sketch_name: name of the sketch (will create a {sketch_name}.py) Opitionals - sketch_dir: sketch's directory (defaults to ./{sketch_name}) - pyp5hs: path to the pyp5js main file (defaults to local install) """ SKETCH_DIR = Path(sketch_dir or f'./{sketch_name}') if not SKETCH_DIR.exists(): cprint.warn(f"Couldn't find the sketch.") cprint.err(f"The directory {SKETCH_DIR} doesn't exist.", interrupt=True) sketch = SKETCH_DIR.child(f"{sketch_name}.py") pyp5js = Path(pyp5js or PYP5_DIR) command = ' '.join([ str(c) for c in ['transcrypt', '-xp', pyp5js, '-b', '-m', '-n', sketch] ]) cprint.info(f"Command:\n\t {command}") transcrypt = subprocess.Popen(shlex.split(command)) transcrypt.wait()
def create_file(self, name, contents): """ Creates a gzip file :param name: (str) name of the file to be created :param contents: (str) contents to be written in the file :return: (str or False) path of the created file """ # write a tmp file tmp = mkstemp()[1] with gzip.open(tmp, 'wb') as handler: handler.write(contents) # send it to the FTP server if self.ftp: self.ftp.storbinary('STOR {}'.format(name), open(tmp, 'rb')) return '{}{}'.format(self.path, name) # or save it locally else: new_path = Path(self.path).child(name) tmp_path = Path(tmp) tmp_path.copy(new_path) if new_path.exists(): return new_path.absolute() return False
def descargarArchivosUrl(urlDescargar=None, urlGuardar=None, accion=0): """Descarga un archivo de la web mediante la url y lo guarda en disco. Devuelve True si se ha descargado y guardado el archivo correctamente, sino devolvera False. Parametros: urlDescargar -- url del archivo a descargar urlGuardar -- ruta donde se desea guardar el dataset (incluido el nombre del fichero) accion -- 0 = reemplazar el archivo si existe, 1 = guardar con otro nombre sin reemplazar (nombre (n), n=1,2,3...), 2 = no sustituir ni guardar el archivo si existe. Excepciones: ValueError -- Si (urlDescargar = None) o (urlGuardar = None) """ if (urlDescargar == None) or (urlGuardar == None): raise ValueError('Error en las rutas') else: f = Path(urlGuardar) existe = f.exists() if (accion == 2) and (existe == True): return False else: if (accion == 0) and (existe == True): os.remove(urlGuardar) wget.download(urlDescargar, urlGuardar) return True
def run(self, username=None, pubkey=None, as_root=False, with_password=True): if as_root: remote_user = '******' execute = run else: remote_user = env.local_user execute = sudo with settings(user=remote_user): keyfile = Path(pubkey or Path('~', '.ssh', 'id_rsa.pub')).expand() if not keyfile.exists(): abort('Public key file does not exist: %s' % keyfile) pubkey = keyfile.read_file().strip() username = username or prompt('Username: '******'s password: "******"%s\", \"password\")\'' % (password), capture=True) else: password = '******' # empty means disabled for command in self.commands: execute(command.format(**locals()))
def enviar_email_con_cupon(modeladmin, request, queryset): leads_incorrectos = 0 leads_correctos = 0 for lead in queryset: if lead.enviado_en_csv is True and lead.enviado_cupon is False and lead.colectivo_validado is True: for fichero in os.listdir(settings.COUPONS_ROOT): if fnmatch.fnmatch(fichero, str(lead.id)+'_*.pdf'): cupon_fichero = Path(settings.COUPONS_ROOT, fichero) if cupon_fichero.exists(): codigo = fichero.split("_")[1].split(".")[0] url_cupon = settings.BASE_URL+'/static/coupons/'+fichero mail = EmailMultiAlternatives( subject="Mi cupón de 10€ de Juguetes Blancos", body='Descarga tu cupon aqui: '+url_cupon+' </p>', from_email="Rocio, JueguetesBlancos <*****@*****.**>", to=[lead.email] ) mail.attach_alternative(render_to_string('leads/email_cupon.html', {'lead': lead, 'url_cupon': url_cupon}), "text/html") mail.send() lead.enviado_cupon = True lead.codigo_cupon = codigo lead.save() leads_correctos = leads_correctos+1 else: leads_incorrectos = leads_incorrectos+1 messages.success(request, str(leads_correctos)+' Email/s enviado Correctamente') messages.error(request, str(leads_incorrectos)+' Leads no cumplian las condiciones.')
def append(self, key): key_path = Path(str(key)) if key_path.isfile(): with open(str(key_path)) as f: key = f.read() if not isinstance(key, bytes): key = key.encode('utf-8') if key in self: return directory = Path(self.user.path, 'keydir', self.user.name, hashlib.md5(key.strip().split()[1]).hexdigest()) directory.mkdir(parents=True) key_file = Path(directory, "%s.pub" % self.user.name) if key_file.exists() and key_file.read_file() == key: return key_file.write_file(key, mode='wb') self.user.git.commit(['keydir'], 'Added new key for user %s' % self.user.name) super(ListKeys, self).append(key)
def run(self, username=None, pubkey=None, as_root=False): if as_root: remote_user = '******' execute = run else: remote_user = env.local_user execute = sudo with settings(user=remote_user): keyfile = Path(pubkey or Path('~', '.ssh', 'id_rsa.pub')).expand() if not keyfile.exists(): abort('Public key file does not exist: %s' % keyfile) with open(keyfile, 'r') as f: pubkey = f.read(65535) username = username or prompt('Username: '******'s password: "******"%s\", \"password\")\'' % (password), capture=True) for command in self.commands: execute(command.format(**locals()))
def delete(self, name): """ Given a name, delete the group. Idempotent. :param name: the group name to delete. :type name: str :returns: The deletion status. :rtype: bool """ #1. Remove the conf file path = Path( os.path.join(self.path, 'conf', 'groups', '{}.conf'.format(name))) if not path.exists(): #Already exist return False path.remove() #2. Remove it from the repos file. for repo in Path(self.path, 'conf', 'repos').walk(): if repo.isdir(): continue with open(str(repo)) as f: if name in f.read(): Repository.get( os.path.splitext(os.path.basename(repo))[0], self.path, self.git).replace(r'.*= *@%s\n' % name, '') #3. Commit self.git.commit([str(path)], 'Deleted group {}.'.format(name)) return True
class PickleStorage(QueueStorage): """docstring for PickleStorage""" def __init__(self): super(PickleStorage, self).__init__() self._prepareStorage() def _prepareStorage(self): default = Path(ingo.active_project.base_path, '/queue') self._storage_path = Path(self.config.get('path', default)) self._storage_name = 'items.pckl' if not self._storage_path.exists(): raise Exception("PickleStorage storage path (%s) doesn't exist!" % self._storage_path) def load(self): storage_file = open(Path(self._storage_path, self._storage_name), 'rb') results = pickle.load(storage_file) storage_file.close() return results def store(self, items): storage_file = open(Path(self._storage_path, self._storage_name), 'wb') results = pickle.dump(items, storage_file) storage_file.close() return results def clear(self): self.store([])
def setup_from_tasks(globals_dict, main_package=None, settings_module_name=None, **kwargs): """ This is the function you must call from your :xfile:`tasks.py` file in order to activate the tasks defined by atelier. Arguments: - `globals_dict` must be the `globals()` of the calling script. - Optional `main_package` is the name of the main Python package provided by this project. - Optional `settings_module_name` will be stored in the :envvar:`DJANGO_SETTINGS_MODULE`, and certain project configuration options will get their default value from that module. - All remaining keyword arguments are project configuration parameters and stored to the :ref:`project configuration options <atelier.prjconf>`. """ if '__file__' not in globals_dict: raise Exception( "No '__file__' in %r. " "First argument to setup_from_tasks() must be `globals()`." % globals_dict) tasks_file = Path(globals_dict['__file__']) if not tasks_file.exists(): raise Exception("No such file: %s" % tasks_file) # print("20180428 setup_from_tasks() : {}".format(root_dir)) from atelier.invlib import tasks from atelier.projects import get_project_from_tasks prj = get_project_from_tasks(tasks_file.parent) if atelier.current_project is None: atelier.current_project = prj if kwargs: prj.config.update(kwargs) if settings_module_name is not None: os.environ['DJANGO_SETTINGS_MODULE'] = settings_module_name from django.conf import settings prj.config.update( languages=[lng.name for lng in settings.SITE.languages]) if isinstance(main_package, str): main_package = import_module(main_package) if main_package: prj.set_main_package(main_package) self = Collection.from_module(tasks) prj.set_namespace(self) return self
def test_pdf_to_png(self): testdir = Path(r"C:\tmp\pdfprocessing\test") testdir.chdir() input_file = "testpdf.pdf" output_file = Path(r"C:\tmp\pdfprocessing\test\test_gs_pdf_to_png.png") gs = GhostScript() gs.pdf_to_png(input_file,output_file) self.assertTrue(output_file.exists(),"File")
def read_json_data(json_filename): path = Path(DATA_DIR.child(f'{json_filename}.json')) if not path.exists(): cprint.err(f"JSON file {path} does not exists.", True) with open(path) as fd: return json.load(fd)
def delete(self, lookup_repo_name): repo = Repository(lookup_repo_name, self.path, self.git) if not repo: return dest = Path(self.path, 'conf/repos/%s.conf' % lookup_repo_name) if dest.exists(): dest.remove() self.git.commit([str(dest)], 'Deleted repo %s.' % lookup_repo_name)
class ReleaseUnpackerRarFile(object): """Release unpacker RAR file.""" def __init__(self, rar_file_path): """Initialize and validate rar file path.""" self.rar_file_path = Path(rar_file_path) if (not self.rar_file_path.exists() or not self.rar_file_path.isfile() or not self.rar_file_path.ext == ".rar"): raise ReleaseUnpackerRarFileError("Invalid RAR file {}".format( self.rar_file_path)) self.rar_file_path_abs = self.rar_file_path.absolute() self.rar_file = rarfile.RarFile(self.rar_file_path) def __repr__(self): """Return object string representation.""" return "<ReleaseUnpackerRarFile: {}>".format(self.rar_file_path_abs) @lazy def name(self): """Return name of release folder.""" if self.subs_dir: name = self.rar_file_path.parent.parent.name else: name = self.rar_file_path.parent.name return str(name) @lazy def subs_dir(self): """Return True if RAR file is located in a Subs folder.""" if self.rar_file_path.parent.name.lower() in ("subs", "sub"): return True else: return False @lazy def file_list(self): """Return file list of RAR file.""" files = [] for file in self.rar_file.infolist(): files.append({"name": Path(file.filename), "size": file.file_size}) return files def extract_file(self, file_name, unpack_dir): """Extract file_name and return extracted file path.""" self.rar_file.extract(file_name, path=unpack_dir) self.extracted_file_path = Path(unpack_dir, file_name) # Set the mtime to current time self.set_mtime() return self.extracted_file_path def set_mtime(self): """Set mtime of extracted file path to current time.""" os.utime(self.extracted_file_path, None)
def delete(self, lookup_repo_name): repo = Repository(lookup_repo_name, self.path, self.git) dest = Path(self.path, 'conf/repos/%s.conf' % lookup_repo_name) if not dest.exists(): raise ValueError('Repository %s not existing.' % lookup_repo_name) dest.remove() self.git.commit([str(dest)], 'Deleted repo %s.' % lookup_repo_name) return repo
def get(self, id): upload_directory = os.path.join( current_app.config.get("UPLOAD_FOLDER", "uploads"), "secciones") f = Path(os.path.join(upload_directory, "%s.jpg" % str(id))) if (f.exists() == False): return send_file('uploads/secciones/default.jpg') image_path = os.path.join(upload_directory, "%s.jpg" % str(id)) return send_file(image_path)
def delete(self, name): user = User(self.path, self.git, name) dest = Path(self.path, 'keydir/%s' % name) if not dest.exists(): raise ValueError('Repository %s not existing.' % name) dest.rmtree() self.git.commit([str(dest)], 'Deleted user %s.' % name) return user
def test_resolve(self): p1 = Path(self.link_to_images_dir, "image3.png") p2 = p1.resolve() assert p1.components()[-2:] == ["link_to_images_dir", "image3.png"] assert p2.components()[-2:] == ["images", "image3.png"] assert p1.exists() assert p2.exists() assert p1.same_file(p2) assert p2.same_file(p1)
def boostrap_crawled_files(): # If we don't have the right files, grab them. crawled_page = Path('./data/1.html') if not crawled_page.exists(): logging.info("Crawling Exeter Book from en.wikisource.org") run_spider() else: logging.info("Crawled files in place")
def convert_wav_to_mp3(src_dir=None, dst_dir=None): """Convert wav sounds to mp3.""" if src_dir is None: src_dir = seeds_dir if dst_dir is None: dst_dir = src_dir src_dir = Path(src_dir) assert src_dir.exists() dst_dir = Path(dst_dir) if not dst_dir.exists(): dst_dir.mkdir(True) wav_seeds = Path(src_dir).listdir('*.wav', names_only=True) for wav in wav_seeds: mp3 = Path(wav).stem + '.mp3' cmd = 'ffmpeg -i {} -codec:a libmp3lame -qscale:a 2 {}' local_run(cmd.format(Path(src_dir, wav), Path(dst_dir, mp3)))
def get_backups_folder(self) -> Path: """Get the path to the local backup folder. Automatically creates folder if does not exist. Returns: path: List of Paths """ backup_folder = Path(settings.BACKUP['folder']) if not backup_folder.exists(): backup_folder.mkdir() return backup_folder
def remove(self, key): directory = Path(self.user.path, 'keydir', self.user.name, hashlib.md5(key.strip().split()[1]).hexdigest()) key_file = Path(directory, "%s.pub" % self.user.name) if key_file.exists(): key_file.remove() key_file.parent.rmdir() self.user.git.commit(['keydir'], 'Removed key for user %s' % self.user.name)
def files_iter(path): origin_file = path.name.replace('.part1', '') file_name_base = '{file_name}.part%d'.format(file_name=origin_file) i = 1 while True: p = Path(file_name_base % i) if p.exists(): yield p else: break i += 1
def get_faces(output_dir): output_dir = Path(output_dir) if not output_dir.exists(): os.makedirs(output_dir) bienal = BienalClient() for col in bienal.get_all_collections(): print('Processando imagens da coleção "{}"'.format(col['title'])) for image in tqdm(col.images): extract_faces(image, output_dir) print()
def get(self, id): upload_directory = os.path.join( current_app.config.get("UPLOAD_FOLDER", "uploads"), "categorias") f = Path(os.path.join(upload_directory, "%s_thumbnail.jpg" % str(id))) if (f.exists() == False): return send_file(os.path.join(upload_directory, 'default.jpg')) image_path = os.path.join(upload_directory, "%s_thumbnail.jpg" % str(id)) return send_file(image_path)
def get(self, id): import os.path upload_directory = os.path.join( current_app.config.get("UPLOAD_FOLDER", "uploads"), "alumnos") f = Path(os.path.join(upload_directory, "%s_thumbnail.jpg" % str(id))) if (f.exists() == False): return send_file('uploads/alumnos/default_thumbnail.jpg') image_path = os.path.join(upload_directory, "%s_thumbnail.jpg" % str(id)) return send_file(image_path)
def sketch_dir(self): sketch_dir = Path(self._sketch_dir) if not sketch_dir: return sketch_dir.child(f'{self.sketch_name}') if self.check_sketch_dir and not sketch_dir.exists(): cprint.err(f"The directory {sketch_dir} does not exists.", interrupt=True) return sketch_dir
def find_labels(): files = {} work_dir = Path("tmp/segments") if not work_dir.exists(): raise Exception("You must first create the labels") files = {} labels = work_dir.listdir(filter=DIRS_NO_LINKS) for label in labels: files[str(label.name)] = label.listdir(pattern='*.ppm') return files
def __check_directory(self): """ Check if the entered directory exists :return: (unipath.Path or False) the path to the existing directory """ directory = Path(self.arguments['<directory>']) if not directory.exists() or not directory.isdir(): msg = '{} is not a valid directory'.format(directory.absolute()) self.__output(msg, error=True) return False return directory
def addkey(pub_file): ''' fab env setup.addkey:id_rsa.pub ''' f = Path(pub_file) if not f.exists(): abort('Public key file not found: %s' % keyfile) pub_key = f.read_file().strip() append('~/.ssh/authorized_keys', pub_key)
def dict2dir(dir, dic, mode="w"): dir = Path(dir) if not dir.exists(): dir.mkdir() for filename, content in dic.items(): p = Path(dir, filename) if isinstance(content, dict): dict2dir(p, content) continue f = open(p, mode) f.write(content) f.close()
def remove(self, key): directory = Path(self.user.path, 'keydir', self.user.name, hashlib.md5(key.strip().split()[1]).hexdigest()) key_file = Path(directory, "%s.pub" % self.user.name) if not key_file.exists(): raise ValueError("Invalid key") key_file.remove() key_file.parent.rmdir() self.user.git.commit(['keydir'], 'Removed key for user %s' % self.user.name)
def cleanup(self): """Remove subtitle files left over where the media file is removed""" log.debug('Running subtitle cleanup on dir {}'.format(self.search_dir)) subtitle_extensions = ('.srt', '.sub', '.idx') for file_path in self.search_dir.walk(filter=FILES, top_down=False): if not file_path.ext in subtitle_extensions: continue # Remove the subtitle file if no media file exists in the same dir media_file_path_mkv = Path(file_path.parent, '{}.mkv'.format( file_path.stem)) media_file_path_avi = Path(file_path.parent, '{}.avi'.format( file_path.stem)) if (not media_file_path_mkv.exists() and not media_file_path_avi.exists()): log.info('Removing leftover subtitle file {}'.format( self.relative_path(file_path, self.search_dir))) file_path.remove()
class ReleaseUnpackerRarFile(object): def __init__(self, rar_file_path): self.rar_file_path = Path(rar_file_path) if (not self.rar_file_path.exists() or not self.rar_file_path.isfile() or not self.rar_file_path.ext == '.rar'): raise ReleaseUnpackerRarFileError('Invalid RAR file {}'.format( self. rar_file_path)) self.rar_file_path_abs = self.rar_file_path.absolute() self.rar_file = rarfile.RarFile(self.rar_file_path) def __repr__(self): return '<ReleaseUnpackerRarFile: {}>'.format(self.rar_file_path_abs) @lazy def name(self): if self.subs_dir: name = self.rar_file_path.parent.parent.name else: name = self.rar_file_path.parent.name return str(name) @lazy def subs_dir(self): if self.rar_file_path.parent.name.lower() in ('subs', 'sub'): return True else: return False @lazy def file_list(self): files = [] for file in self.rar_file.infolist(): files.append({'name': Path(file.filename), 'size': file.file_size}) return files def set_mtime(self): with file(self.extracted_file_path, 'a'): os.utime(self.extracted_file_path, None) def extract_file(self, file_name, unpack_dir): self.rar_file.extract(file_name, path=unpack_dir) self.extracted_file_path = Path(unpack_dir, file_name) # Set the mtime to current time self.set_mtime() return self.extracted_file_path
def download_file(downloadable_url): """ File is only downloaded if it doesn't exist in DOWNLOADS_DIR This function does not rename the file. It only downloads the file if the expected name is not present in the DOWNLOADS_DIR. Returns a django.core.files.File object that can be stored in a FileField. """ download_dir = Path(settings.DOWNLOADS_DIR) if not download_dir.exists(): download_dir.mkdir() name_in_url = Path(downloadable_url).name expected_loc = Path(download_dir, name_in_url) # only download if necessary if not expected_loc.exists(): response = requests.get(downloadable_url, stream = True) with open(expected_loc, 'wb') as expected_loc_handle: for chunk in response.iter_content(chunk_size = 1024): expected_loc_handle.write(chunk) return File(open(expected_loc, 'rb'))
def download_survey_responses(survey_name): """Download the survey data. Args: survey_name: 'sound_similarity_6' or 'sound_similarity_4' """ qualtrics = Qualtrics(**get_creds()) responses = qualtrics.get_survey_responses(survey_name) survey_dir = Path(exp_dir, survey_name) if not survey_dir.exists(): survey_dir.mkdir() output = Path(survey_dir, survey_name + '.csv') responses.to_csv(output, index=False)
def create(self, entity): repo_file = Path(self.path, 'conf/repos/%s.conf' % entity) if repo_file.exists(): raise ValueError('Repository %s already exists' % entity) # If there are missing parent paths in the repo path, create them so we don't get IOErrors # In the case of a repo having names with slashes (e.g. "username/reponame") elif repo_file.parent != Path(""): repo_file.parent.mkdir(parents=True) repo_file.write_file("repo %s\n" % entity) self.git.commit([str(repo_file)], 'Created repo %s' % entity) return Repository(entity, self.path, self.git)
class SheerSite(object): def __init__(self, slug): self.slug = slug if slug in settings.SHEER_SITES: self.path = Path(settings.SHEER_SITES[slug]) else: self.path = None @property def urls(self): return self.urls_for_prefix() def urls_for_prefix(self, prefix='.'): url_patterns = [] if self.path is None or not self.path.exists(): return url_patterns prefixed_path = Path(self.path, prefix) for html_path in prefixed_path.walk(): # skip files that don't end in .html if not html_path.endswith('.html'): continue rel_path = self.path.rel_path_to(html_path) prefix_rel_path = prefixed_path.rel_path_to(html_path) # skip files in underscore directories if rel_path.startswith('_'): continue view = SheerTemplateView.as_view( template_engine=self.slug, template_name=str(rel_path)) regex_template = r'^%s$' index_template = r'^%s/$' if rel_path.name == 'index.html': if prefix_rel_path.parent: slash_regex = index_template % prefix_rel_path.parent else: slash_regex = r'^$' pattern = url(slash_regex, view) redirect_regex = regex_template % prefix_rel_path index_redirect = RedirectView.as_view(url='./', permanent=True) redirect_pattern = url(redirect_regex, index_redirect) url_patterns += [pattern, redirect_pattern] else: regex = regex_template % prefix_rel_path pattern = url(regex, view) url_patterns.append(pattern) return url_patterns
def create_file_name(path): output_file_path = Path(path.name.replace('.part1', '')) output_file_ext = output_file_path.ext or '.dat' c = 0 while output_file_path.exists(): aux = output_file_path.name.replace( ' (%d)%s' % ((c - 1), output_file_ext), output_file_ext ).replace( output_file_ext, " (%d)%s" % (c, output_file_ext) ) output_file_path = Path(aux) c += 1 return output_file_path