コード例 #1
0
class Parser(anyconfig.backend.base.StringParser,
             anyconfig.backend.base.BinaryFilesMixin):
    """
    Loader/Dumper of BSON files.
    """
    _type = "bson"
    _extensions = ["bson", "bsn"]  # Temporary.
    _load_opts = [] if bson.has_c() else ["codec_options"]
    _dump_opts = [] if bson.has_c() else ["check_keys", "codec_options"]
    _ordered = not bson.has_c()
    _dict_opts = [] if bson.has_c() else ["document_class"]

    def _load_options(self, container, **options):
        """
        :param container: callble to make a container object later
        """
        if "codec_options" not in options:
            options.setdefault("document_class", container)
            if any(k in options for k in _CO_OPTIONS):
                options["codec_options"] = _codec_options(**options)

        return anyconfig.utils.filter_options(self._load_opts, options)

    def load_from_string(self, content, container, **kwargs):
        """
        Load BSON config from given string `content`.

        :param content: BSON config content in bytes data string
        :param container: callble to make a container object
        :param kwargs: optional keyword parameters

        :return: Dict-like object holding config parameters
        """
        if self._load_opts:  # indicates that C extension is not used.
            objs = bson.decode_all(content, **kwargs)
        else:
            # .. note::
            #    The order of loaded configuration keys may be lost but
            #    there is no way to avoid that, AFAIK.
            objs = [container(x) for x in bson.decode_all(content)
                    if x is not None]

        return objs[0] if objs else None

    def dump_to_string(self, cnf, **kwargs):
        """Dump BSON data `cnf` to a string.

        :param cnf: BSON Data to dump
        :param kwargs: optional keyword parameters to be sanitized
        :return: string represents the configuration
        """
        if self._dump_opts:
            container = self._container_factory(**kwargs)
            opts = self._load_options(container, **kwargs)
            for key in self._dump_opts:
                if kwargs.get(key, False):
                    opts[key] = kwargs[key]
            return bson.BSON.encode(cnf, *opts)

        return bson.BSON.encode(cnf)
コード例 #2
0
class Parser(anyconfig.backend.base.FromStringLoader,
             anyconfig.backend.base.ToStringDumper):
    """
    Loader/Dumper of BSON files.
    """
    _type = "bson"
    _extensions = ["bson", "bsn"]  # Temporary.
    _load_opts = [] if bson.has_c() else ["tz_aware", "uuid_subtype"]
    _dump_opts = ["check_keys", "uuid_subtype"]
    _open_flags = ('rb', 'wb')

    dump_to_string = anyconfig.backend.base.to_method(bson.BSON.encode)

    def load_from_string(self, content, to_container, **kwargs):
        """
        Load BSON config from given string `content`.

        :param content: BSON config content in bytes data string
        :param to_container: callble to make a container object
        :param kwargs: optional keyword parameters

        :return: Dict-like object holding config parameters
        """
        if self._load_opts:  # indicates that C extension is not used.
            objs = bson.decode_all(content, as_class=to_container, **kwargs)
        else:
            # .. note::
            #    The order of loaded configuration keys may be lost but
            #    there is no way to avoid that, AFAIK.
            objs = to_container(bson.decode_all(content))

        return objs[0] if objs else None
コード例 #3
0
class Test_10(TBC.Test_10_dumps_and_loads, HasParserTrait):

    # Can't if bson.has_c():
    if not bson.has_c():
        load_options = dict(as_class=dict)
        dump_options = dict(check_keys=True)
        empty_patterns = ['']
コード例 #4
0
def init(fast_db_url, big_db_url):
    """Initialize FDB/BDB factory singletons.

    As a useful side effect, creates all (missing) indices.

    @type fast_db_url: basestring
    @type big_db_url: basestring

    @raises MongoDBInitializationException: if cannot complete connection.
    """
    global FDB, BDB
    assert FDB is None and BDB is None, (FDB, BDB)

    try:
        FDB = DocStoreWrapperFactory(fast_db_url)
        BDB = GFSEnabledDocStoreWrapperFactory(big_db_url)
    except MongoDBInitializationException:
        raise

    if not bson.has_c():
        logger.warning('python-bson-ext is not installed, '
                       'performance may be lower than expected!')
    if not pymongo.has_c():
        logger.warning('python-pymongo-ext is not installed, '
                       'performance may be lower than expected!')

    logger.debug('Creating indices on Fast DataBase')
    with FDB() as fdbw:
        __make_indices(fdbqueries.FDB_INDICES_PER_COLLECTION.iteritems(),
                       dsw=fdbw)
    logger.debug('Creating indices on Big DataBase')
    with BDB() as bdbw:
        __make_indices(bdbqueries.GFS_INDICES_PER_COLLECTION.iteritems(),
                       dsw=bdbw)
コード例 #5
0
ファイル: __init__.py プロジェクト: shvar/redfs
def init(fast_db_url, big_db_url):
    """Initialize FDB/BDB factory singletons.

    As a useful side effect, creates all (missing) indices.

    @type fast_db_url: basestring
    @type big_db_url: basestring

    @raises MongoDBInitializationException: if cannot complete connection.
    """
    global FDB, BDB
    assert FDB is None and BDB is None, (FDB, BDB)

    try:
        FDB = DocStoreWrapperFactory(fast_db_url)
        BDB = GFSEnabledDocStoreWrapperFactory(big_db_url)
    except MongoDBInitializationException:
        raise

    if not bson.has_c():
        logger.warning('python-bson-ext is not installed, '
                           'performance may be lower than expected!')
    if not pymongo.has_c():
        logger.warning('python-pymongo-ext is not installed, '
                           'performance may be lower than expected!')

    logger.debug('Creating indices on Fast DataBase')
    with FDB() as fdbw:
        __make_indices(fdbqueries.FDB_INDICES_PER_COLLECTION.iteritems(),
                       dsw=fdbw)
    logger.debug('Creating indices on Big DataBase')
    with BDB() as bdbw:
        __make_indices(bdbqueries.GFS_INDICES_PER_COLLECTION.iteritems(),
                       dsw=bdbw)
コード例 #6
0
ファイル: app.py プロジェクト: 0xcd03/pritunl
def setup_app():
    if not pymongo.has_c():
        logger.warning('Failed to load pymongo c bindings')

    if not bson.has_c():
        logger.warning('Failed to load bson c bindings')

    if settings.conf.debug and settings.conf.ssl:
        settings.conf.ssl = False
コード例 #7
0
def setup_app():
    if not pymongo.has_c():
        logger.warning('Failed to load pymongo c bindings')

    if not bson.has_c():
        logger.warning('Failed to load bson c bindings')

    if settings.conf.debug and settings.conf.ssl:
        settings.conf.ssl = False
コード例 #8
0
 def test_dates(self):
     doc = {"early": datetime.datetime(1686, 5, 5), "late": datetime.datetime(2086, 5, 5)}
     try:
         self.assertEqual(doc, BSON.encode(doc).decode())
     except ValueError:
         # Ignore ValueError when no C ext, since it's probably
         # a problem w/ 32-bit Python - we work around this in the
         # C ext, though.
         if bson.has_c():
             raise
コード例 #9
0
 def test_dates(self):
     doc = {"early": datetime.datetime(1686, 5, 5),
            "late": datetime.datetime(2086, 5, 5)}
     try:
         self.assertEqual(doc, BSON.encode(doc).decode())
     except ValueError:
         # Ignore ValueError when no C ext, since it's probably
         # a problem w/ 32-bit Python - we work around this in the
         # C ext, though.
         if bson.has_c():
             raise
コード例 #10
0
# Copyright 2009-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Fail if the C extension module doesn't exist.

Only really intended to be used by internal build scripts.
"""

import sys
sys.path[0:0] = [""]

import bson
import pymongo

if not pymongo.has_c() or not bson.has_c():
    sys.exit("could not load C extensions")
コード例 #11
0
ファイル: check.py プロジェクト: 1990754123/pritunl
def setup_check():
    if not pymongo.has_c():
        logger.warning('Failed to load pymongo c bindings')

    if not bson.has_c():
        logger.warning('Failed to load bson c bindings')
コード例 #12
0
def setup_check():
    if not pymongo.has_c():
        logger.warning('Failed to load pymongo c bindings')

    if not bson.has_c():
        logger.warning('Failed to load bson c bindings')
コード例 #13
0
def setup_mongo():
    if not pymongo.has_c():
        logger.warning('Failed to load pymongo c bindings')

    if not bson.has_c():
        logger.warning('Failed to load bson c bindings')

    prefix = settings.conf.mongodb_collection_prefix or ''
    last_error = time.time() - 24
    while True:
        try:
            client = pymongo.MongoClient(settings.conf.mongodb_url,
                                         connectTimeoutMS=2000)
            break
        except pymongo.errors.ConnectionFailure:
            time.sleep(0.5)
            if time.time() - last_error > 30:
                last_error = time.time()
                logger.exception('Error connecting to mongodb server')

    database = client.get_default_database()
    cur_collections = database.collection_names()

    if prefix + 'messages' not in cur_collections:
        database.create_collection(prefix + 'messages',
                                   capped=True,
                                   size=100000)

    mongo.collections.update({
        'transaction':
        getattr(database, prefix + 'transaction'),
        'queue':
        getattr(database, prefix + 'queue'),
        'task':
        getattr(database, prefix + 'task'),
        'system':
        getattr(database, prefix + 'system'),
        'messages':
        getattr(database, prefix + 'messages'),
        'administrators':
        getattr(database, prefix + 'administrators'),
        'users':
        getattr(database, prefix + 'users'),
        'users_key_link':
        getattr(database, prefix + 'users_key_link'),
        'organizations':
        getattr(database, prefix + 'organizations'),
        'hosts':
        getattr(database, prefix + 'hosts'),
        'hosts_usage':
        getattr(database, prefix + 'hosts_usage'),
        'servers':
        getattr(database, prefix + 'servers'),
        'servers_output':
        getattr(database, prefix + 'servers_output'),
        'servers_bandwidth':
        getattr(database, prefix + 'servers_bandwidth'),
        'servers_ip_pool':
        getattr(database, prefix + 'servers_ip_pool'),
        'dh_params':
        getattr(database, prefix + 'dh_params'),
        'auth_nonces':
        getattr(database, prefix + 'auth_nonces'),
        'auth_limiter':
        getattr(database, prefix + 'auth_limiter'),
        'otp':
        getattr(database, prefix + 'otp'),
        'otp_cache':
        getattr(database, prefix + 'otp_cache'),
    })

    if prefix + 'log_entries' not in cur_collections:
        log_limit = settings.app.log_entry_limit
        database.create_collection(prefix + 'log_entries',
                                   capped=True,
                                   size=log_limit * 256 * 2,
                                   max=log_limit)

    mongo.collections.update({
        'log_entries':
        getattr(database, prefix + 'log_entries'),
    })

    for collection_name, collection in mongo.collections.items():
        collection.name_str = collection_name

    settings.init()

    mongo.collections['transaction'].ensure_index('lock_id', unique=True)
    mongo.collections['transaction'].ensure_index([
        ('ttl_timestamp', pymongo.ASCENDING),
        ('state', pymongo.ASCENDING),
        ('priority', pymongo.DESCENDING),
    ])
    mongo.collections['queue'].ensure_index('runner_id')
    mongo.collections['queue'].ensure_index('ttl_timestamp')
    mongo.collections['task'].ensure_index('type', unique=True)
    mongo.collections['task'].ensure_index('ttl_timestamp')
    mongo.collections['log_entries'].ensure_index([
        ('timestamp', pymongo.DESCENDING),
    ])
    mongo.collections['messages'].ensure_index('channel')
    mongo.collections['administrators'].ensure_index('username', unique=True)
    mongo.collections['users'].ensure_index([
        ('type', pymongo.ASCENDING),
        ('org_id', pymongo.ASCENDING),
    ])
    mongo.collections['users'].ensure_index([
        ('org_id', pymongo.ASCENDING),
        ('name', pymongo.ASCENDING),
    ])
    mongo.collections['users_key_link'].ensure_index('key_id')
    mongo.collections['users_key_link'].ensure_index('short_id', unique=True)
    mongo.collections['organizations'].ensure_index('type')
    mongo.collections['hosts'].ensure_index('name')
    mongo.collections['hosts_usage'].ensure_index([
        ('host_id', pymongo.ASCENDING),
        ('timestamp', pymongo.ASCENDING),
    ])
    mongo.collections['servers'].ensure_index('name')
    mongo.collections['servers'].ensure_index('ping_timestamp')
    mongo.collections['servers_output'].ensure_index([
        ('server_id', pymongo.ASCENDING),
        ('timestamp', pymongo.ASCENDING),
    ])
    mongo.collections['servers_bandwidth'].ensure_index([
        ('server_id', pymongo.ASCENDING),
        ('period', pymongo.ASCENDING),
        ('timestamp', pymongo.ASCENDING),
    ])
    mongo.collections['servers_ip_pool'].ensure_index([
        ('server_id', pymongo.ASCENDING),
        ('user_id', pymongo.ASCENDING),
    ])
    mongo.collections['servers_ip_pool'].ensure_index('user_id')
    mongo.collections['dh_params'].ensure_index('dh_param_bits')
    mongo.collections['auth_nonces'].ensure_index([
        ('token', pymongo.ASCENDING),
        ('nonce', pymongo.ASCENDING),
    ],
                                                  unique=True)

    mongo.collections['users_key_link'].ensure_index(
        'timestamp', expireAfterSeconds=settings.app.key_link_timeout)
    mongo.collections['auth_nonces'].ensure_index(
        'timestamp', expireAfterSeconds=settings.app.auth_time_window * 2.1)
    mongo.collections['auth_limiter'].ensure_index(
        'timestamp', expireAfterSeconds=settings.app.auth_limiter_ttl)
    mongo.collections['otp'].ensure_index('timestamp', expireAfterSeconds=120)
    mongo.collections['otp_cache'].ensure_index(
        'timestamp', expireAfterSeconds=settings.user.otp_cache_ttl)

    if not auth.Administrator.collection.find_one():
        auth.Administrator(
            username=DEFAULT_USERNAME,
            password=DEFAULT_PASSWORD,
            default=True,
        ).commit()

    secret_key = settings.app.cookie_secret
    if not secret_key:
        secret_key = re.sub(r'[\W_]+', '',
                            base64.b64encode(os.urandom(128)))[:64]
        settings.app.cookie_secret = secret_key
        settings.commit()
    app.app.secret_key = secret_key.encode()

    server_api_key = settings.app.server_api_key
    if not server_api_key:
        server_api_key = re.sub(r'[\W_]+', '',
                                base64.b64encode(os.urandom(128)))[:64]
        settings.app.server_api_key = server_api_key
        settings.commit()
コード例 #14
0
ファイル: mongo.py プロジェクト: afdnlw/pritunl
def setup_mongo():
    if not pymongo.has_c():
        logger.warning('Failed to load pymongo c bindings')

    if not bson.has_c():
        logger.warning('Failed to load bson c bindings')

    prefix = settings.conf.mongodb_collection_prefix or ''
    last_error = time.time() - 24
    while True:
        try:
            client = pymongo.MongoClient(settings.conf.mongodb_url,
                connectTimeoutMS=2000)
            break
        except pymongo.errors.ConnectionFailure:
            time.sleep(0.5)
            if time.time() - last_error > 30:
                last_error = time.time()
                logger.exception('Error connecting to mongodb server')

    database = client.get_default_database()
    cur_collections = database.collection_names()

    if prefix + 'messages' not in cur_collections:
        database.create_collection(prefix + 'messages', capped=True,
            size=100000)

    mongo.collections.update({
        'transaction': getattr(database, prefix + 'transaction'),
        'queue': getattr(database, prefix + 'queue'),
        'task': getattr(database, prefix + 'task'),
        'system': getattr(database, prefix + 'system'),
        'messages': getattr(database, prefix + 'messages'),
        'administrators': getattr(database, prefix + 'administrators'),
        'users': getattr(database, prefix + 'users'),
        'users_key_link': getattr(database, prefix + 'users_key_link'),
        'organizations': getattr(database, prefix + 'organizations'),
        'hosts': getattr(database, prefix + 'hosts'),
        'hosts_usage': getattr(database, prefix + 'hosts_usage'),
        'servers': getattr(database, prefix + 'servers'),
        'servers_output': getattr(database, prefix + 'servers_output'),
        'servers_bandwidth': getattr(database, prefix + 'servers_bandwidth'),
        'servers_ip_pool': getattr(database, prefix + 'servers_ip_pool'),
        'dh_params': getattr(database, prefix + 'dh_params'),
        'auth_nonces': getattr(database, prefix + 'auth_nonces'),
        'auth_limiter': getattr(database, prefix + 'auth_limiter'),
        'otp': getattr(database, prefix + 'otp'),
        'otp_cache': getattr(database, prefix + 'otp_cache'),
    })

    if prefix + 'log_entries' not in cur_collections:
        log_limit = settings.app.log_entry_limit
        database.create_collection(prefix + 'log_entries', capped=True,
            size=log_limit * 256 * 2, max=log_limit)

    mongo.collections.update({
        'log_entries': getattr(database, prefix + 'log_entries'),
    })

    for collection_name, collection in mongo.collections.items():
        collection.name_str = collection_name

    settings.init()

    mongo.collections['transaction'].ensure_index('lock_id', unique=True)
    mongo.collections['transaction'].ensure_index([
        ('ttl_timestamp', pymongo.ASCENDING),
        ('state', pymongo.ASCENDING),
        ('priority', pymongo.DESCENDING),
    ])
    mongo.collections['queue'].ensure_index('runner_id')
    mongo.collections['queue'].ensure_index('ttl_timestamp')
    mongo.collections['task'].ensure_index('type', unique=True)
    mongo.collections['task'].ensure_index('ttl_timestamp')
    mongo.collections['log_entries'].ensure_index([
        ('timestamp', pymongo.DESCENDING),
    ])
    mongo.collections['messages'].ensure_index('channel')
    mongo.collections['administrators'].ensure_index('username', unique=True)
    mongo.collections['users'].ensure_index([
        ('type', pymongo.ASCENDING),
        ('org_id', pymongo.ASCENDING),
    ])
    mongo.collections['users'].ensure_index([
        ('org_id', pymongo.ASCENDING),
        ('name', pymongo.ASCENDING),
    ])
    mongo.collections['users_key_link'].ensure_index('key_id')
    mongo.collections['users_key_link'].ensure_index('short_id', unique=True)
    mongo.collections['organizations'].ensure_index('type')
    mongo.collections['hosts'].ensure_index('name')
    mongo.collections['hosts_usage'].ensure_index([
        ('host_id', pymongo.ASCENDING),
        ('timestamp', pymongo.ASCENDING),
    ])
    mongo.collections['servers'].ensure_index('name')
    mongo.collections['servers'].ensure_index('ping_timestamp')
    mongo.collections['servers_output'].ensure_index([
        ('server_id', pymongo.ASCENDING),
        ('timestamp', pymongo.ASCENDING),
    ])
    mongo.collections['servers_bandwidth'].ensure_index([
        ('server_id', pymongo.ASCENDING),
        ('period', pymongo.ASCENDING),
        ('timestamp', pymongo.ASCENDING),
    ])
    mongo.collections['servers_ip_pool'].ensure_index([
        ('server_id', pymongo.ASCENDING),
        ('user_id', pymongo.ASCENDING),
    ])
    mongo.collections['servers_ip_pool'].ensure_index('user_id')
    mongo.collections['dh_params'].ensure_index('dh_param_bits')
    mongo.collections['auth_nonces'].ensure_index([
        ('token', pymongo.ASCENDING),
        ('nonce', pymongo.ASCENDING),
    ], unique=True)

    mongo.collections['users_key_link'].ensure_index('timestamp',
        expireAfterSeconds=settings.app.key_link_timeout)
    mongo.collections['auth_nonces'].ensure_index('timestamp',
        expireAfterSeconds=settings.app.auth_time_window * 2.1)
    mongo.collections['auth_limiter'].ensure_index('timestamp',
        expireAfterSeconds=settings.app.auth_limiter_ttl)
    mongo.collections['otp'].ensure_index('timestamp',
        expireAfterSeconds=120)
    mongo.collections['otp_cache'].ensure_index('timestamp',
        expireAfterSeconds=settings.user.otp_cache_ttl)

    if not auth.Administrator.collection.find_one():
        auth.Administrator(
            username=DEFAULT_USERNAME,
            password=DEFAULT_PASSWORD,
            default=True,
        ).commit()

    secret_key = settings.app.cookie_secret
    if not secret_key:
        secret_key = re.sub(r'[\W_]+', '',
            base64.b64encode(os.urandom(128)))[:64]
        settings.app.cookie_secret = secret_key
        settings.commit()
    app.app.secret_key = secret_key.encode()

    server_api_key = settings.app.server_api_key
    if not server_api_key:
        server_api_key = re.sub(r'[\W_]+', '',
            base64.b64encode(os.urandom(128)))[:64]
        settings.app.server_api_key = server_api_key
        settings.commit()
コード例 #15
0
ファイル: custombson.py プロジェクト: jacob22/pytransact
        if module in needs_replacing:
            del sys.modules[name]

    for module, attr in ((bson, '_cbson'), (pymongo, '_cmessage'),
                         (pymongo.message, '_cmessage')):
        try:
            delattr(module, attr)
        except AttributeError:
            pass

    del attr, name, module, needs_replacing

    importlib.reload(bson)
    importlib.reload(pymongo.message)

    assert not bson.has_c()
    assert not pymongo.has_c()


import types, functools
import bson, bson.son, contextlib, struct
from bson.binary import Binary, OLD_UUID_SUBTYPE
from gridfs import GridFS
from collections import OrderedDict
from bson import _elements_to_dict, DBRef
from pytransact import spickle

max_bson_element_size = 2 ** 18

Extension = spickle.ExtensionType()