コード例 #1
0
def test_SavePersistentEmpty(storage_persistent):
    storage = storage_persistent

    fp = StringIO()
    storage.savePersistent(fp=fp, periodic=False)

    fp.seek(0)
    assert "[NetworkTables Storage 3.0]\n\n" == fp.read()
コード例 #2
0
def fetch_sources():
    app.logger.info('Fetching sources from {} sources.'.format(
        RuleSource.query.count()))
    rules = []
    for src in RuleSource.query:
        # Download rules from every source.
        app.logger.info('Downloading from "{}".'.format(src.uri))
        # If a gzip file, perform a streamed download
        # and save it to a temp file.
        stream = src.uri.endswith('gz')
        resp = requests.get(src.uri, stream=stream)
        if resp.status_code == 200:
            if stream:
                tmpdir = '/tmp/{}-{}/'.format(src.name,
                                              datetime.utcnow().isoformat())
                os.mkdir(tmpdir)
                ziprules = StringIO()
                for chunk in resp.iter_content():
                    ziprules.write(chunk)
                ziprules.seek(0)
                try:
                    zrules = tarfile.open(fileobj=ziprules, mode='r:gz')
                except tarfile.TarError as terr:
                    app.logger.warning('Error in rule file: {}\n{}'.format(
                        src.uri, str(terr)))
                else:
                    ruleslist = []
                    for member in zrules.getmembers():
                        if member.name.endswith('.rules') and member.isfile():
                            # Keep track of extracted filenames.
                            ruleslist.append(member.name)
                            zrules.extract(member, path=tmpdir)
                    # All rule files found are now extracted into tmpdir.
                    for rname in ruleslist:
                        try:
                            rulepath = os.path.join(tmpdir, rname)
                            with open(rulepath, 'rb') as rfile:
                                rules.extend(from_buffer(rfile.read()))
                            os.remove(rulepath)
                        except Exception as e:
                            app.logger.exception(
                                "Unhandled exception: {}. Continuing".format(
                                    e))
                            continue

                    # A subdirectory /rules/ is created when extracting,
                    # removing that first then the whole tmpdir.
                    os.rmdir(os.path.join(tmpdir, 'rules'))
                    os.rmdir(tmpdir)
            else:
                # rules will contain all parsed rules.
                rules.extend(from_buffer(resp.text))
        else:
            pass
    app.logger.info('Bulk importing {} rules.'.format(len(rules)))
    Rule.bulk_import(rules)
    render_rules()
コード例 #3
0
ファイル: rules.py プロジェクト: bigbigx/mhn
def fetch_sources():
    app.logger.info('Fetching sources from {} sources.'.format(
        RuleSource.query.count()))
    rules = []
    for src in RuleSource.query:
        # Download rules from every source.
        app.logger.info('Downloading from "{}".'.format(src.uri))
        # If a gzip file, perform a streamed download
        # and save it to a temp file.
        stream = src.uri.endswith('gz')
        resp = requests.get(src.uri, stream=stream)
        if resp.status_code == 200:
            if stream:
                tmpdir = '/tmp/{}-{}/'.format(src.name,
                                              datetime.utcnow().isoformat())
                os.mkdir(tmpdir)
                ziprules = StringIO()
                for chunk in resp.iter_content():
                    ziprules.write(chunk)
                ziprules.seek(0)
                try:
                    zrules = tarfile.open(fileobj=ziprules, mode='r:gz')
                except tarfile.TarError as terr:
                    app.logger.warning(
                        'Error in rule file: {}\n{}'.format(src.uri, str(terr)))
                else:
                    ruleslist = []
                    for member in zrules.getmembers():
                        if member.name.endswith('.rules') and member.isfile():
                            # Keep track of extracted filenames.
                            ruleslist.append(member.name)
                            zrules.extract(member, path=tmpdir)
                    # All rule files found are now extracted into tmpdir.
                    for rname in ruleslist:
                        try:
                            rulepath = os.path.join(tmpdir, rname)
                            with open(rulepath, 'rb') as rfile:
                                rules.extend(from_buffer(rfile.read()))
                            os.remove(rulepath)
                        except Exception as e:
                            app.logger.exception("Unhandled exception: {}. Continuing".format(e))
                            continue

                    # A subdirectory /rules/ is created when extracting,
                    # removing that first then the whole tmpdir.
                    os.rmdir(os.path.join(tmpdir, 'rules'))
                    os.rmdir(tmpdir)
            else:
                # rules will contain all parsed rules.
                rules.extend(from_buffer(resp.text))
        else:
            pass
    app.logger.info('Bulk importing {} rules.'.format(len(rules)))
    Rule.bulk_import(rules)
    render_rules()
コード例 #4
0
def in_memory_csv(request):
    file_obj = InMemIO()

    def fin():
        file_obj.close()

    request.addfinalizer(fin)

    for i in range(IN_MEMORY_CSV_NROWS):
        row = u'SRID=4326;POINT({lon} {lat}),{name},{age}\n'.format(
            lon=random.uniform(-170.0, 170.0),
            lat=random.uniform(-80.0, 80.0),
            name=random.choice(['fulano', 'mengano', 'zutano', 'perengano']),
            age=random.randint(18, 99))
        file_obj.write(bytearray(row, 'utf-8'))
    file_obj.seek(0)
    return file_obj
コード例 #5
0
ファイル: test_sql_copy.py プロジェクト: CartoDB/carto-python
def in_memory_csv(request):
    file_obj = InMemIO()

    def fin():
        file_obj.close()

    request.addfinalizer(fin)

    for i in range(IN_MEMORY_CSV_NROWS):
        row = u'SRID=4326;POINT({lon} {lat}),{name},{age}\n'.format(
            lon=random.uniform(-170.0, 170.0),
            lat=random.uniform(-80.0, 80.0),
            name=random.choice(['fulano', 'mengano', 'zutano', 'perengano']),
            age=random.randint(18, 99)
        )
        file_obj.write(bytearray(row, 'utf-8'))
    file_obj.seek(0)
    return file_obj
コード例 #6
0
def test_savePersistent(storage_persistent):
    storage = storage_persistent

    for e in storage.m_entries.values():
        e.flags = NT_PERSISTENT

    fp = StringIO()
    storage.savePersistent(fp=fp, periodic=False)

    fp.seek(0)

    line = fp.readline()[:-1]
    assert "[NetworkTables Storage 3.0]" == py2(line)
    line = fp.readline()[:-1]
    assert "boolean \"\\x00\\x03\\x05\\n\"=true" == py2(line)
    line = fp.readline()[:-1]
    assert "boolean \"=\"=true" == py2(line)
    line = fp.readline()[:-1]
    assert "boolean \"CaseSensitive/KeyName\"=true" == py2(line)
    line = fp.readline()[:-1]
    assert "boolean \"boolean/false\"=false" == py2(line)
    line = fp.readline()[:-1]
    assert "boolean \"boolean/true\"=true" == py2(line)
    line = fp.readline()[:-1]
    assert "array boolean \"booleanarr/empty\"=" == py2(line)
    line = fp.readline()[:-1]
    assert "array boolean \"booleanarr/one\"=true" == py2(line)
    line = fp.readline()[:-1]
    assert "array boolean \"booleanarr/two\"=true,false" == py2(line)
    line = fp.readline()[:-1]
    # this differs from ntcore
    assert "double \"double/big\"=130000000.0" == py2(line)
    line = fp.readline()[:-1]
    assert "double \"double/neg\"=-1.5" == py2(line)
    line = fp.readline()[:-1]
    assert "double \"double/zero\"=0.0" == py2(line)
    line = fp.readline()[:-1]
    assert "array double \"doublearr/empty\"=" == py2(line)
    line = fp.readline()[:-1]
    assert "array double \"doublearr/one\"=0.5" == py2(line)
    line = fp.readline()[:-1]
    assert "array double \"doublearr/two\"=0.5,-0.25" == py2(line)
    line = fp.readline()[:-1]
    assert "raw \"raw/empty\"=" == py2(line)
    line = fp.readline()[:-1]
    assert "raw \"raw/normal\"=aGVsbG8=" == py2(line)
    line = fp.readline()[:-1]
    assert "raw \"raw/special\"=AAMFCg==" == py2(line)
    line = fp.readline()[:-1]
    assert "string \"string/empty\"=\"\"" == py2(line)
    line = fp.readline()[:-1]
    assert "string \"string/normal\"=\"hello\"" == py2(line)
    line = fp.readline()[:-1]
    assert "string \"string/special\"=\"\\x00\\x03\\x05\\n\"" == py2(line)
    line = fp.readline()[:-1]
    assert "array string \"stringarr/empty\"=" == py2(line)
    line = fp.readline()[:-1]
    assert "array string \"stringarr/one\"=\"hello\"" == py2(line)
    line = fp.readline()[:-1]
    assert "array string \"stringarr/two\"=\"hello\",\"world\\n\"" == py2(line)
    line = fp.readline()[:-1]
    assert "" == line