Пример #1
0
def write_to_file(file, content):
    from unipath import Path

    p = Path(os.path.realpath(__name__))
    u = Path(p.parent, "news", file)

    u.write_file(html_decode(content))
Пример #2
0
    def append(self, key):
        key_path = Path(str(key))

        if key_path.isfile():
            with open(str(key_path)) as f:
                key = f.read()

        if not isinstance(key, bytes):
            key = key.encode('utf-8')

        if key in self:
            return

        directory = Path(self.user.path, 'keydir', self.user.name,
                         hashlib.md5(key.strip().split()[1]).hexdigest())
        directory.mkdir(parents=True)

        key_file = Path(directory, "%s.pub" % self.user.name)
        if key_file.exists() and key_file.read_file() == key:
            return

        key_file.write_file(key, mode='wb')

        self.user.git.commit(['keydir'],
                             'Added new key for user %s' % self.user.name)

        super(ListKeys, self).append(key)
Пример #3
0
def add_deployment(directory, name, templates_dir='templates', deployment_dir='deployment', mode=0777):
    """ Adds new deployment if not exists
    """
    context = {
        'datetime': datetime.datetime.now(),
        'name': name,
        'project_name': get_project_name(directory)
    }

    dd, df = get_deployment_info(directory, name)

    if df.exists():
        raise ExistingDeploymentError()

    # create deployments directory
    df.parent.mkdir(parents=True, mode=mode)

    # write deployment file
    df.write_file(
        get_rendered_template('deployment.py', context)
    )
    top_td = Path(__file__).parent.child(templates_dir)
    td = top_td.child(deployment_dir)
    for tf in td.walk():
        if tf.isdir():
            continue
        partitioned = tf.partition(td)
        target = Path(dd, Path(partitioned[2][1:]))
        target_dir = target.parent
        if not target_dir.exists():
            target_dir.mkdir(parents=True, mode=mode)
        tmp = tf.partition(top_td)[2][1:]
        rendered = get_rendered_template(tmp, context)
        target.write_file(rendered)
Пример #4
0
def normaliza_dump_mysql(nome_arquivo):
    arquivo = Path(nome_arquivo).expand()
    banco = arquivo.stem
    conteudo = arquivo.read_file()
    inicio = re.finditer('--\n-- Table structure for table .*\n--\n', conteudo)
    inicio = next(inicio).start()
    conteudo = cabecalho.format(banco=banco) + conteudo[inicio:]
    arquivo.write_file(conteudo)
Пример #5
0
    def test_load_directionals_no_space_end_of_line(self):
        temp_dir = Path(tempfile.mkdtemp())
        temp_file = Path(temp_dir, 'test.txt')
        temp_file.write_file('Northeast NE\n')

        Directional.load_directionals(temp_file.absolute())

        temp_dir.rmtree()

        self.assertEqual('Northeast', Directional.objects.first().direction)
        self.assertEqual('NE', Directional.objects.first().abbreviation)
Пример #6
0
    def test_load_street_types_no_space_end_of_line(self):
        temp_dir = Path(tempfile.mkdtemp())
        temp_file = Path(temp_dir, 'test.txt')
        temp_file.write_file('VILLAGE VILL VLG\n')

        StreetType.load_street_types(temp_file.absolute())

        temp_dir.rmtree()

        self.assertEqual('VILLAGE VILL', StreetType.objects.first().name)
        self.assertEqual('VLG', StreetType.objects.first().abbreviation)
Пример #7
0
    def test_load_states_space_end_of_line(self):
        temp_dir = Path(tempfile.mkdtemp())
        temp_file = Path(temp_dir, 'test.txt')
        temp_file.write_file('Florida FL \n')

        State.load_states(temp_file.absolute())

        temp_dir.rmtree()

        self.assertEqual('Florida', State.objects.first().name)
        self.assertEqual('FL', State.objects.first().abbreviation)
Пример #8
0
def process(appname):
    appdir = Path(appname)
    if not appdir.isdir():
        print("Error: there is no app called {0}.".format(appdir))
        sys.exit(1)
    # else
    static = Path(appname, 'static', appname)
    static.mkdir(True)
    templates = Path(appname, 'templates', appname)
    templates.mkdir(True)
    urls = Path(appname, 'urls.py')
    if not urls.isfile():
        urls.write_file(urls_py)
Пример #9
0
    def create(self, entity):
        repo_file = Path(self.path, 'conf/repos/%s.conf' % entity)
        if repo_file.exists():
            raise ValueError('Repository %s already exists' % entity)
        # If there are missing parent paths in the repo path, create them so we don't get IOErrors
        # In the case of a repo having names with slashes (e.g. "username/reponame")
        elif repo_file.parent != Path(""):
            repo_file.parent.mkdir(parents=True)

        repo_file.write_file("repo %s\n" % entity)

        self.git.commit([str(repo_file)], 'Created repo %s' % entity)

        return Repository(entity, self.path, self.git)
Пример #10
0
    def create(self, lookup_repo):
        repo_file = Path(self.path, 'conf/repos/%s.conf' % lookup_repo)
        if repo_file.exists():
            raise ValueError('Repository %s already exists' % lookup_repo)
        # If there are missing parent paths in the repo path, create them so we don't get IOErrors
        # In the case of a repo having names with slashes (e.g. "username/reponame")
        elif repo_file.parent != Path(""):
            repo_file.parent.mkdir(parents=True)

        repo_file.write_file("repo %s\n" % lookup_repo)

        self.git.commit([str(repo_file)], 'Created repo %s' % lookup_repo)

        return Repository(lookup_repo, self.path, self.git)
Пример #11
0
def join_file(path, output_file_name=None, chunk_size='1K'):
    path = Path(path)
    chunk_size = get_real_size(chunk_size)
    if output_file_name is None:
        output_file_path = create_file_name(path)
    else:
        output_file_path = Path(output_file_name)

    output_file_path.write_file(b'', 'wb')

    with open(output_file_path, 'wb') as output_file:
        for cur_file_name in files_iter(path):
            with open(cur_file_name, 'rb') as cur_file:
                for piece in read_in_chunks(cur_file, chunk_size):
                    output_file.write(piece)
Пример #12
0
    def create_file(self, which_one):
        self.render_config_for_file_template(which_one=which_one)

        logger.info('Creating config files in parent dir: %s'
                    % self.install_path)

        #gets self.postdeactivate if which_one=postdeactivate
        contents = getattr(self, which_one)

        logger.info('%s: Writing contents to file ...' % which_one)

        p = Path(self.install_path, which_one)
        #write configuration and append it to the file
        p.write_file(contents, 'a+')
        logger.info('...done')
Пример #13
0
def get_thesaurus_text():
    p = Path('./thesaurus.txt')
    
    text = None
    
    if not p.exists():
        logging.info("File doesn't exist, parsing from internet")
        text = _retrieve_thesaurus_text()
        p.write_file(text.encode('utf-8')) 
    else:
        logging.info("File exists, reading in")
        text = p.read_file().decode('utf-8')
    
    logging.info("Read text length: %s" % len(text))

    return text
Пример #14
0
def generate_thumbnail(obj, image_field='logo'):
    source = getattr(obj, image_field)

    source_img = Path(source.path)

    parent_dir = source_img.parent

    target_img = Path(
        parent_dir, source_img.stem + THUMBNAIL_SUFFIX + source_img.ext
    )

    source_img_open = open(source_img)

    image_generator = Thumbnail(source=source_img_open)
    resized_image = image_generator.generate()

    target_img.write_file(resized_image.read())
Пример #15
0
def dump_sapl(sigla):
    sigla = sigla[-3:]  # ignora prefixo (por ex. 'sapl_cm_')
    data_fs_path, documentos_fs_path = [
        DIR_DADOS_MIGRACAO.child('datafs',
                                 '{}_cm_{}.fs'.format(prefixo, sigla))
        for prefixo in ('Data', 'DocumentosSapl')
    ]

    assert exists(data_fs_path), 'Origem não existe: {}'.format(data_fs_path)
    if not exists(documentos_fs_path):
        documentos_fs_path = data_fs_path

    nome_banco_legado = 'sapl_cm_{}'.format(sigla)
    destino = DIR_DADOS_MIGRACAO.child('repos', nome_banco_legado)
    destino.mkdir(parents=True)
    repo = git.Repo.init(destino)
    if TAG_ZOPE in repo.tags:
        print(
            '{}: A exportação de documentos já está feita -- abortando'.format(
                sigla))
        return

    repo_execute(repo, 'git annex init')
    repo_execute(repo, 'git config annex.thin true')

    salvar = build_salvar(repo)
    try:
        finalizado = False
        arq_mtimes = Path(repo.working_dir, 'mtimes.yaml')
        mtimes = yaml.load(
            arq_mtimes.read_file()) if arq_mtimes.exists() else {}
        _dump_sapl(data_fs_path, documentos_fs_path, destino, salvar, mtimes)
        finalizado = True
    finally:
        # grava mundaças
        repo_execute(repo, 'git annex add sapl_documentos')
        arq_mtimes.write_file(yaml.safe_dump(mtimes, allow_unicode=True))
        repo.git.add(A=True)
        # atualiza repo
        if 'master' not in repo.heads or repo.index.diff('HEAD'):
            # se de fato existe mudança
            status = 'completa' if finalizado else 'parcial'
            repo.index.commit(u'Exportação do zope {}'.format(status))
        if finalizado:
            repo.git.execute('git tag -f'.split() + [TAG_ZOPE])
Пример #16
0
def unused_write_release_notes():
    """
    Generate docs/releases/x.y.z.rst file from setup_info.
    """
    v = env.SETUP_INFO['version']
    if v.endswith('+'):
        return
    notes = Path(env.ROOTDIR, 'docs', 'releases', '%s.rst' % v)
    if notes.exists():
        return
    must_confirm("Create %s" % notes.absolute())
    #~ context = dict(date=get_current_date().strftime(env.long_date_format))
    context = dict(date=get_current_date().strftime('%Y%m%d'))
    context.update(env.SETUP_INFO)
    txt = """\
==========================
Version %(version)s
==========================

Release process started :blogref:`%(date)s`


List of changes
===============

New features
------------

Optimizations
-------------

Bugfixes
--------

Manual tasks after upgrade
--------------------------


""" % context
    notes.write_file(txt)
    notes.parent.child('index.rst').set_times()
    args = [os.environ['EDITOR']]
    args += [notes.absolute()]
    local(' '.join(args))
Пример #17
0
 def create(self, name):
     """
     Given a name, create the group. Idempotent.
     :param name: the group name to create.
     :type name: str
     :returns: The group created.
     :rtype: pyolite.models.Group
     """
     path = Path(
         os.path.join(self.path, 'conf', 'groups', '{}.conf'.format(name)))
     if path.exists():  #Already exist
         return self.get(name)
     # If there are missing parent paths in the group path, create them so we don't get IOErrors
     # In the case of a repo having names with slashes (e.g. "username/reponame")
     elif path.parent != Path(""):
         path.parent.mkdir(parents=True)
     #
     path.write_file("")
     self.git.commit([str(path)], 'Created group %s' % name)
     return Group(name, self.path, self.git)
Пример #18
0
    def move_to_venv(self, which_one):
        """
        Moves the created config_files into the bin folder to be executed.
        Does this by first pasting all the contents of the temporary file
        into the new or existing target file and then deleting the temp file.
        """
        target = Path(self.venv_folder, self.project_name, 'bin', which_one)
        source = Path(self.install_path, which_one)
        logger.info('target: %s, move_orig: %s' % (target, source))

        if source.exists():
            logger.info('Moving %s into place ...' % which_one)
            content = source.read_file()

            #make sure the directory exists
            if not target.parent.exists():
                target.parent.mkdir(parents=True)
            target.write_file(content, 'w+')

            source.remove()

        logger.info('...done')
Пример #19
0
    def append(self, key):
        key_path = Path(key)

        if key_path.isfile():
            with open(str(key_path)) as f:
                key = f.read()

        if key in self:
                return

        directory = Path(self.user.path, 'keydir', self.user.name,
                                         hashlib.md5(key.strip().split()[1]).hexdigest())
        directory.mkdir(parents=True)

        key_file = Path(directory, "%s.pub" % self.user.name)
        if key_file.exists() and key_file.read_file() == key:
                return

        key_file.write_file(key)

        self.user.git.commit(['keydir'],
                                                 'Added new key for user %s' % self.user.name)

        super(ListKeys, self).append(key)
Пример #20
0
import arrow, json, math
from functools import wraps
from tinydb import TinyDB, Query
import uuid
import os, logging
from logging.handlers import RotatingFileHandler
from unipath import Path

logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# create a file handler

log_file = Path(
    os.path.realpath(__file__)).parent.parent.child('data').child('serial.log')
if not log_file.exists():
    log_file.write_file("")
handler = RotatingFileHandler(log_file,
                              mode='a',
                              maxBytes=5 * 1024 * 1024,
                              backupCount=2,
                              encoding=None,
                              delay=0)
handler.setLevel(logging.INFO)
# create a logging format
formatter = logging.Formatter(
    '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(handler)

Пример #21
0
def write_to_file_path(filename, content, path):
    from unipath import Path

    p = Path(path, filename)
    p.write_file(html_decode(content))
Пример #22
0
 def save_file(self, response):
     p = Path("data/" + response.url.split("/")[-1] + '.html')
     p.write_file(response.body)
Пример #23
0
def index(notebook):
    #return '<h1>Hello World!</h1>'
    if notebook == None:
    	notebooks_list = db.all();
    	return render_template('index.html', notebooks=notebooks_list)
    else:
		if request.method == 'POST':
			# Check if the notebook already exists
			selected_notebook = db.search(where('slug') == notebook)
			notebook_data = {}

			if selected_notebook:
				notebook_data['title'] = selected_notebook[0].get('title')
				notebook_data['slug'] = selected_notebook[0].get('slig')
				notebook_data['desc'] = selected_notebook[0].get('desc')

				new_md_file = Path(notebooks_dir, notebook + '.md')
				new_md_file.write_file('\n'.join(request.form['content'].split('\r\n')))

				db.update({'desc':request.form['desc'], 'title':request.form['title']}, where('slug') == notebook)
			else:
				notebook_data['title'] = request.form['title']
				notebook_data['slug'] = slugify(request.form['title'])
				notebook_data['desc'] = request.form['desc']
				#notebook_data['content'] = request.form['content'].split('\r\n')
				#Create markdown file under notebooks dir
				new_md_file = Path(notebooks_dir, notebook_data['slug'] + '.md')
				new_md_file.write_file('\n'.join(request.form['content'].split('\r\n')))

				db.insert(notebook_data)
				notebook = notebook_data['slug']

			#return render_template('notebook.html', notebook=notebook_data)
			return redirect(notebook)
		else:
			notebook_data = {}
			notebook_html = ''

			mode = request.args.get('m')
			selected_notebook = db.search(where('slug') == notebook)
			notebook_path = Path(notebooks_dir, notebook + '.md')
			
			if selected_notebook:
				notebook_data['title'] = selected_notebook[0].get('title')
				notebook_data['desc'] = selected_notebook[0].get('desc')
			else:
				notebook_data['title'] = ''
				notebook_data['desc'] = ''
				notebook_data['content'] = ''

			if mode == None:
				if selected_notebook:
					if notebook_path.exists():
						notebook_html = md.markdown(notebook_path.read_file(), extras=["code-friendly", "fenced-code-blocks", "tables", "metadata", "cuddled-lists", "link-patterns"], link_patterns=link_patterns)
						notebook_data['content'] = notebook_html
					#notebook_data['content'] = '\n'.join(selected_notebook[0].get('content'))

				return render_template('notebook.html', notebook=notebook_data)
			else:
				if selected_notebook:
					if notebook_path.exists():
						notebook_html = notebook_path.read_file()
						notebook_data['content'] = notebook_html

				return render_template('notebook-edit.html', notebook=notebook_data)
Пример #24
0
    salvar = build_salvar(repo)
    try:
        finalizado = False
<<<<<<< HEAD
        _dump_sapl(data_fs_path, documentos_fs_path, destino, salvar)
=======
        arq_mtimes = Path(repo.working_dir, 'mtimes.yaml')
        mtimes = yaml.load(
            arq_mtimes.read_file()) if arq_mtimes.exists() else {}
        _dump_sapl(data_fs_path, documentos_fs_path, destino, salvar, mtimes)
>>>>>>> 3.1.x
        finalizado = True
    finally:
        # grava mundaças
        repo_execute(repo, 'git annex add sapl_documentos')
        arq_mtimes.write_file(yaml.safe_dump(mtimes, allow_unicode=True))
        repo.git.add(A=True)
        # atualiza repo
        if 'master' not in repo.heads or repo.index.diff('HEAD'):
            # se de fato existe mudança
            status = 'completa' if finalizado else 'parcial'
            repo.index.commit(u'Exportação do zope {}'.format(status))
        if finalizado:
            repo.git.execute('git tag -f'.split() + [TAG_ZOPE])


if __name__ == "__main__":
    if len(sys.argv) == 2:
        sigla = sys.argv[1]
        dump_sapl(sigla)
    else:
Пример #25
0
def write_raw_content(report_dir, host, filename, content):
    f = Path(report_dir, [host, filename])
    f.parent.mkdir(parents=True)
    f.write_file(content, mode="w")