def run(self, username=None, pubkey=None, as_root=False, with_password=True): if as_root: remote_user = '******' execute = run else: remote_user = env.local_user execute = sudo with settings(user=remote_user): keyfile = Path(pubkey or Path('~', '.ssh', 'id_rsa.pub')).expand() if not keyfile.exists(): abort('Public key file does not exist: %s' % keyfile) pubkey = keyfile.read_file().strip() username = username or prompt('Username: '******'s password: "******"%s\", \"password\")\'' % (password), capture=True) else: password = '******' # empty means disabled for command in self.commands: execute(command.format(**locals()))
def append(self, key): key_path = Path(str(key)) if key_path.isfile(): with open(str(key_path)) as f: key = f.read() if not isinstance(key, bytes): key = key.encode('utf-8') if key in self: return directory = Path(self.user.path, 'keydir', self.user.name, hashlib.md5(key.strip().split()[1]).hexdigest()) directory.mkdir(parents=True) key_file = Path(directory, "%s.pub" % self.user.name) if key_file.exists() and key_file.read_file() == key: return key_file.write_file(key, mode='wb') self.user.git.commit(['keydir'], 'Added new key for user %s' % self.user.name) super(ListKeys, self).append(key)
def normaliza_dump_mysql(nome_arquivo): arquivo = Path(nome_arquivo).expand() banco = arquivo.stem conteudo = arquivo.read_file() inicio = re.finditer('--\n-- Table structure for table .*\n--\n', conteudo) inicio = next(inicio).start() conteudo = cabecalho.format(banco=banco) + conteudo[inicio:] arquivo.write_file(conteudo)
def addkey(pub_file): ''' fab env setup.addkey:id_rsa.pub ''' f = Path(pub_file) if not f.exists(): abort('Public key file not found: %s' % keyfile) pub_key = f.read_file().strip() append('~/.ssh/authorized_keys', pub_key)
def get_thesaurus_text(): p = Path('./thesaurus.txt') text = None if not p.exists(): logging.info("File doesn't exist, parsing from internet") text = _retrieve_thesaurus_text() p.write_file(text.encode('utf-8')) else: logging.info("File exists, reading in") text = p.read_file().decode('utf-8') logging.info("Read text length: %s" % len(text)) return text
def dump_sapl(sigla): sigla = sigla[-3:] # ignora prefixo (por ex. 'sapl_cm_') data_fs_path, documentos_fs_path = [ DIR_DADOS_MIGRACAO.child('datafs', '{}_cm_{}.fs'.format(prefixo, sigla)) for prefixo in ('Data', 'DocumentosSapl') ] assert exists(data_fs_path), 'Origem não existe: {}'.format(data_fs_path) if not exists(documentos_fs_path): documentos_fs_path = data_fs_path nome_banco_legado = 'sapl_cm_{}'.format(sigla) destino = DIR_DADOS_MIGRACAO.child('repos', nome_banco_legado) destino.mkdir(parents=True) repo = git.Repo.init(destino) if TAG_ZOPE in repo.tags: print( '{}: A exportação de documentos já está feita -- abortando'.format( sigla)) return repo_execute(repo, 'git annex init') repo_execute(repo, 'git config annex.thin true') salvar = build_salvar(repo) try: finalizado = False arq_mtimes = Path(repo.working_dir, 'mtimes.yaml') mtimes = yaml.load( arq_mtimes.read_file()) if arq_mtimes.exists() else {} _dump_sapl(data_fs_path, documentos_fs_path, destino, salvar, mtimes) finalizado = True finally: # grava mundaças repo_execute(repo, 'git annex add sapl_documentos') arq_mtimes.write_file(yaml.safe_dump(mtimes, allow_unicode=True)) repo.git.add(A=True) # atualiza repo if 'master' not in repo.heads or repo.index.diff('HEAD'): # se de fato existe mudança status = 'completa' if finalizado else 'parcial' repo.index.commit(u'Exportação do zope {}'.format(status)) if finalizado: repo.git.execute('git tag -f'.split() + [TAG_ZOPE])
def id3(audio, episode, title, image): """ Update ID3 tags with episode info. """ if not episode: # Try to detect episode from audio filename match = re.search(r'e(\d{2,})', audio) if not match: raise click.UsageError('Could not be detected episode. Use --episode option.') episode = int(match.group(1)) id3 = eyed3.load(audio) id3.initTag(version=ID3_V2_3) id3.tag.title = title id3.tag.artist = 'Henrique Bastos' id3.tag.album = 'Curto Circuito Podcast' id3.tag.track_num = episode id3.tag.genre = 'Podcast' if image: image = Path(image) data = image.read_file('rb') mime = 'image/' + image.ext[1:] # HACK to make APIC header readable on iTunes # EYED3 sets v2.3 encoding to UTF-16, but iTunes only reads LATIN-1 from eyed3.id3 import frames def force_latin1(self): self.encoding = eyed3.id3.LATIN1_ENCODING setattr(frames.ImageFrame, '_initEncoding', force_latin1) # force mime as str because eyeD3 uses it to compose the binary header # PUBLISHER_LOGO == 0x14 id3.tag.images.set(0x14, data, str(mime)) id3.tag.save(version=ID3_V2_3) # Print id3 shell(['eyeD3', audio])
def unused_run_sphinx_doctest(): """ Run Sphinx doctest tests. Not maintained because i cannot prevent it from also trying to test the documents in `django_doctests` which must be tested separately. """ #~ clean_sys_path() #~ if sys.path[0] == '': #~ del sys.path[0] #~ print sys.path #~ if len(sys.argv) > 1: #~ raise Exception("Unexpected command-line arguments %s" % sys.argv) onlythis = None #~ onlythis = 'docs/tutorials/human/index.rst' args = ['sphinx-build', '-b', 'doctest'] args += ['-a'] # all files, not only outdated args += ['-Q'] # no output if not onlythis: args += ['-W'] # consider warnings as errors build_dir = env.ROOTDIR.child('docs', env.build_dir_name) args += [env.ROOTDIR.child('docs'), build_dir] if onlythis: # test only this document args += [onlythis] #~ args = ['sphinx-build','-b','doctest',env.DOCSDIR,env.BUILDDIR] #~ raise Exception(' '.join(args)) #~ env.DOCSDIR.chdir() #~ import os #~ print os.getcwd() exitcode = sphinx.main(args) if exitcode != 0: output = Path(build_dir, 'output.txt') #~ if not output.exists(): #~ abort("Oops: no file %s" % output) # six.print_("arguments to spxhinx.main() were",args) abort(""" ======================================= Sphinx doctest failed with exit code %s ======================================= %s""" % (exitcode, output.read_file()))
def move_to_venv(self, which_one): """ Moves the created config_files into the bin folder to be executed. Does this by first pasting all the contents of the temporary file into the new or existing target file and then deleting the temp file. """ target = Path(self.venv_folder, self.project_name, 'bin', which_one) source = Path(self.install_path, which_one) logger.info('target: %s, move_orig: %s' % (target, source)) if source.exists(): logger.info('Moving %s into place ...' % which_one) content = source.read_file() #make sure the directory exists if not target.parent.exists(): target.parent.mkdir(parents=True) target.write_file(content, 'w+') source.remove() logger.info('...done')
def append(self, key): key_path = Path(key) if key_path.isfile(): with open(str(key_path)) as f: key = f.read() if key in self: return directory = Path(self.user.path, 'keydir', self.user.name, hashlib.md5(key.strip().split()[1]).hexdigest()) directory.mkdir(parents=True) key_file = Path(directory, "%s.pub" % self.user.name) if key_file.exists() and key_file.read_file() == key: return key_file.write_file(key) self.user.git.commit(['keydir'], 'Added new key for user %s' % self.user.name) super(ListKeys, self).append(key)
def objects(): Project = rt.modules.tickets.Project Ticket = rt.modules.tickets.Ticket TicketStates = rt.modules.tickets.TicketStates prj = Project(name="Lino") yield prj settings.SITE.loading_from_dump = True for ln in TICKETS.splitlines(): ln = ln.strip() if ln: a = ln.split(':') state = TicketStates.accepted a2 = [] for i in a: if '[closed]' in i: state = TicketStates.closed i = i.replace('[closed]', '') a2.append(i.strip()) num = a2[0][1:] title = a2[1] import lino fn = Path(lino.__file__).parent.parent.child('docs', 'tickets') fn = fn.child(num + '.rst') kw = dict() kw.update(created=datetime.datetime.fromtimestamp(fn.ctime())) kw.update(modified=datetime.datetime.fromtimestamp(fn.mtime())) kw.update(id=int(num), summary=title, project=prj, state=state) logger.info("%s %s", fn, kw['modified']) kw.update(description=fn.read_file()) # fd = open(fn) yield Ticket(**kw)
def objects(): Project = rt.models.tickets.Project Ticket = rt.models.tickets.Ticket TicketStates = rt.models.tickets.TicketStates prj = Project(name="Lino") yield prj settings.SITE.loading_from_dump = True for ln in TICKETS.splitlines(): ln = ln.strip() if ln: a = ln.split(':') state = TicketStates.accepted a2 = [] for i in a: if '[closed]' in i: state = TicketStates.closed i = i.replace('[closed]', '') a2.append(i.strip()) num = a2[0][1:] title = a2[1] import lino fn = Path(lino.__file__).parent.parent.child('docs', 'tickets') fn = fn.child(num + '.rst') kw = dict() kw.update(created=datetime.datetime.fromtimestamp(fn.ctime())) kw.update(modified=datetime.datetime.fromtimestamp(fn.mtime())) kw.update(id=int(num), summary=title, project=prj, state=state) logger.info("%s %s", fn, kw['modified']) kw.update(description=fn.read_file()) # fd = open(fn) yield Ticket(**kw)
def index(notebook): #return '<h1>Hello World!</h1>' if notebook == None: notebooks_list = db.all(); return render_template('index.html', notebooks=notebooks_list) else: if request.method == 'POST': # Check if the notebook already exists selected_notebook = db.search(where('slug') == notebook) notebook_data = {} if selected_notebook: notebook_data['title'] = selected_notebook[0].get('title') notebook_data['slug'] = selected_notebook[0].get('slig') notebook_data['desc'] = selected_notebook[0].get('desc') new_md_file = Path(notebooks_dir, notebook + '.md') new_md_file.write_file('\n'.join(request.form['content'].split('\r\n'))) db.update({'desc':request.form['desc'], 'title':request.form['title']}, where('slug') == notebook) else: notebook_data['title'] = request.form['title'] notebook_data['slug'] = slugify(request.form['title']) notebook_data['desc'] = request.form['desc'] #notebook_data['content'] = request.form['content'].split('\r\n') #Create markdown file under notebooks dir new_md_file = Path(notebooks_dir, notebook_data['slug'] + '.md') new_md_file.write_file('\n'.join(request.form['content'].split('\r\n'))) db.insert(notebook_data) notebook = notebook_data['slug'] #return render_template('notebook.html', notebook=notebook_data) return redirect(notebook) else: notebook_data = {} notebook_html = '' mode = request.args.get('m') selected_notebook = db.search(where('slug') == notebook) notebook_path = Path(notebooks_dir, notebook + '.md') if selected_notebook: notebook_data['title'] = selected_notebook[0].get('title') notebook_data['desc'] = selected_notebook[0].get('desc') else: notebook_data['title'] = '' notebook_data['desc'] = '' notebook_data['content'] = '' if mode == None: if selected_notebook: if notebook_path.exists(): notebook_html = md.markdown(notebook_path.read_file(), extras=["code-friendly", "fenced-code-blocks", "tables", "metadata", "cuddled-lists", "link-patterns"], link_patterns=link_patterns) notebook_data['content'] = notebook_html #notebook_data['content'] = '\n'.join(selected_notebook[0].get('content')) return render_template('notebook.html', notebook=notebook_data) else: if selected_notebook: if notebook_path.exists(): notebook_html = notebook_path.read_file() notebook_data['content'] = notebook_html return render_template('notebook-edit.html', notebook=notebook_data)
def test_chef(self): p = Path(self.d, "swedish", "chef", "bork", "bork") assert p.read_file() == "bork!"
if TAG_ZOPE in repo.tags: print('{}: A exportação de documentos já está feita -- abortando'.format(sigla)) return repo_execute(repo, 'git annex init') repo_execute(repo, 'git config annex.thin true') salvar = build_salvar(repo) try: finalizado = False <<<<<<< HEAD _dump_sapl(data_fs_path, documentos_fs_path, destino, salvar) ======= arq_mtimes = Path(repo.working_dir, 'mtimes.yaml') mtimes = yaml.load( arq_mtimes.read_file()) if arq_mtimes.exists() else {} _dump_sapl(data_fs_path, documentos_fs_path, destino, salvar, mtimes) >>>>>>> 3.1.x finalizado = True finally: # grava mundaças repo_execute(repo, 'git annex add sapl_documentos') arq_mtimes.write_file(yaml.safe_dump(mtimes, allow_unicode=True)) repo.git.add(A=True) # atualiza repo if 'master' not in repo.heads or repo.index.diff('HEAD'): # se de fato existe mudança status = 'completa' if finalizado else 'parcial' repo.index.commit(u'Exportação do zope {}'.format(status)) if finalizado: repo.git.execute('git tag -f'.split() + [TAG_ZOPE])