コード例 #1
0
def connect_to_mongodb(db,
                       host,
                       port=27017,
                       tz_aware=True,
                       user=None,
                       password=None,
                       retry_wait_time=0.1,
                       proxy=True,
                       **kwargs):
    """
    Returns a MongoDB Database connection, optionally wrapped in a proxy. The proxy
    handles AutoReconnect errors by retrying read operations, since these exceptions
    typically indicate a temporary step-down condition for MongoDB.
    """
    # The MongoReplicaSetClient class is deprecated in Mongo 3.x, in favor of using
    # the MongoClient class for all connections. Update/simplify this code when using
    # PyMongo 3.x.
    if kwargs.get('replicaSet'):
        # Enable reading from secondary nodes in the MongoDB replicaset by using the
        # MongoReplicaSetClient class.
        # The 'replicaSet' parameter in kwargs is required for secondary reads.
        # The read_preference should be set to a proper value, like SECONDARY_PREFERRED.
        mongo_client_class = pymongo.MongoReplicaSetClient
    else:
        # No 'replicaSet' in kwargs - so no secondary reads.
        mongo_client_class = pymongo.MongoClient

    # If the MongoDB server uses a separate authentication database that should be specified here
    auth_source = kwargs.get('authsource', '') or None

    # sanitize a kwarg which may be present and is no longer expected
    # AED 2020-03-02 TODO: Remove this when 'auth_source' will no longer exist in kwargs
    if 'auth_source' in kwargs:
        kwargs.pop('auth_source')

    # If read_preference is given as a name of a valid ReadPreference.<NAME>
    # constant such as "SECONDARY_PREFERRED" or a mongo mode such as
    # "secondaryPreferred", convert it. Otherwise pass it through unchanged.
    if 'read_preference' in kwargs:
        read_preference = MONGO_READ_PREFERENCE_MAP.get(
            kwargs['read_preference'], kwargs['read_preference'])

        read_preference = getattr(ReadPreference, read_preference, None)
        if read_preference is not None:
            kwargs['read_preference'] = read_preference

    mongo_conn = pymongo.database.Database(
        mongo_client_class(host=host,
                           port=port,
                           tz_aware=tz_aware,
                           document_class=dict,
                           **kwargs), db)

    if proxy:
        mongo_conn = MongoProxy(mongo_conn, wait_time=retry_wait_time)
    # If credentials were provided, authenticate the user.
    if user is not None and password is not None:
        mongo_conn.authenticate(user, password, source=auth_source)

    return mongo_conn
コード例 #2
0
    def __init__(self,
                 db,
                 collection,
                 host,
                 port=27017,
                 tz_aware=True,
                 user=None,
                 password=None,
                 asset_collection=None,
                 retry_wait_time=0.1,
                 **kwargs):
        """
        Create & open the connection, authenticate, and provide pointers to the collections
        """
        self.database = MongoProxy(pymongo.database.Database(
            pymongo.MongoClient(host=host,
                                port=port,
                                tz_aware=tz_aware,
                                **kwargs), db),
                                   wait_time=retry_wait_time)

        if user is not None and password is not None:
            self.database.authenticate(user, password)

        self.course_index = self.database[collection + '.active_versions']
        self.structures = self.database[collection + '.structures']
        self.definitions = self.database[collection + '.definitions']

        # every app has write access to the db (v having a flag to indicate r/o v write)
        # Force mongo to report errors, at the expense of performance
        # pymongo docs suck but explanation:
        # http://api.mongodb.org/java/2.10.1/com/mongodb/WriteConcern.html
        self.course_index.write_concern = {'w': 1}
        self.structures.write_concern = {'w': 1}
        self.definitions.write_concern = {'w': 1}
コード例 #3
0
ファイル: tests.py プロジェクト: GbalsaC/bitnamiP
 def test_exceptions2(self):
     mongo_connection = MongoProxy(
         MongoConnection(exceptions_to_raise=[AutoReconnect, AutoReconnect]),
         methods_needing_retry={MongoConnection: ['find', ]}
     )
     self.assertEqual(mongo_connection.find(), "Finished.")
     self.assertEqual(mongo_connection.call_count, 3)
コード例 #4
0
ファイル: tests.py プロジェクト: GbalsaC/bitnamiP
 def test_exceptions1(self):
     mongo_connection = MongoProxy(
         MongoConnection(exceptions_to_raise=[ValueError]), methods_needing_retry={MongoConnection: ['find', ]}
     )
     with self.assertRaises(ValueError):
         mongo_connection.find()
     self.assertEqual(mongo_connection.call_count, 1)
コード例 #5
0
 def test_no_exceptions1(self):
     mongo_connection = MongoProxy(
         MongoConnection(),
         methods_needing_retry={MongoConnection: [
             'find',
         ]})
     self.assertEqual(mongo_connection.find(), "Finished.")
     self.assertEqual(mongo_connection.call_count, 1)
コード例 #6
0
 def test_exceptions2(self):
     mongo_connection = MongoProxy(
         MongoConnection(
             exceptions_to_raise=[AutoReconnect, AutoReconnect]),
         methods_needing_retry={MongoConnection: [
             'find',
         ]})
     self.assertEqual(mongo_connection.find(), "Finished.")
     self.assertEqual(mongo_connection.call_count, 3)
コード例 #7
0
 def test_exceptions1(self):
     mongo_connection = MongoProxy(
         MongoConnection(exceptions_to_raise=[ValueError]),
         methods_needing_retry={MongoConnection: [
             'find',
         ]})
     with self.assertRaises(ValueError):
         mongo_connection.find()
     self.assertEqual(mongo_connection.call_count, 1)
コード例 #8
0
ファイル: mongo_utils.py プロジェクト: edx/edx-platform
def connect_to_mongodb(
    db, host,
    port=27017, tz_aware=True, user=None, password=None,
    retry_wait_time=0.1, proxy=True, **kwargs
):
    """
    Returns a MongoDB Database connection, optionally wrapped in a proxy. The proxy
    handles AutoReconnect errors by retrying read operations, since these exceptions
    typically indicate a temporary step-down condition for MongoDB.
    """
    # The MongoReplicaSetClient class is deprecated in Mongo 3.x, in favor of using
    # the MongoClient class for all connections. Update/simplify this code when using
    # PyMongo 3.x.
    if kwargs.get('replicaSet'):
        # Enable reading from secondary nodes in the MongoDB replicaset by using the
        # MongoReplicaSetClient class.
        # The 'replicaSet' parameter in kwargs is required for secondary reads.
        # The read_preference should be set to a proper value, like SECONDARY_PREFERRED.
        mongo_client_class = pymongo.MongoReplicaSetClient
    else:
        # No 'replicaSet' in kwargs - so no secondary reads.
        mongo_client_class = pymongo.MongoClient

    # If read_preference is given as a name of a valid ReadPreference.<NAME> constant
    # such as "SECONDARY_PREFERRED", convert it. Otherwise pass it through unchanged.
    if 'read_preference' in kwargs:
        read_preference = getattr(ReadPreference, kwargs['read_preference'], None)
        if read_preference is not None:
            kwargs['read_preference'] = read_preference

    mongo_conn = pymongo.database.Database(
        mongo_client_class(
            host=host,
            port=port,
            tz_aware=tz_aware,
            document_class=dict,
            **kwargs
        ),
        db
    )

    if proxy:
        mongo_conn = MongoProxy(
            mongo_conn,
            wait_time=retry_wait_time
        )

    # If credentials were provided, authenticate the user.
    if user is not None and password is not None:
        mongo_conn.authenticate(user, password)

    return mongo_conn
コード例 #9
0
def connect_to_mongodb(db,
                       host,
                       port=27017,
                       tz_aware=True,
                       user=None,
                       password=None,
                       retry_wait_time=0.1,
                       proxy=True,
                       **kwargs):
    """
    Returns a MongoDB Database connection, optionally wrapped in a proxy. The proxy
    handles AutoReconnect errors by retrying read operations, since these exceptions
    typically indicate a temporary step-down condition for MongoDB.
    """
    # The MongoReplicaSetClient class is deprecated in Mongo 3.x, in favor of using
    # the MongoClient class for all connections. Update/simplify this code when using
    # PyMongo 3.x.
    if kwargs.get('replicaSet'):
        # Enable reading from secondary nodes in the MongoDB replicaset by using the
        # MongoReplicaSetClient class.
        # The 'replicaSet' parameter in kwargs is required for secondary reads.
        # The read_preference should be set to a proper value, like SECONDARY_PREFERRED.
        mongo_client_class = pymongo.MongoReplicaSetClient
    else:
        # No 'replicaSet' in kwargs - so no secondary reads.
        mongo_client_class = pymongo.MongoClient

    # If read_preference is given as a name of a valid ReadPreference.<NAME> constant
    # such as "SECONDARY_PREFERRED", convert it. Otherwise pass it through unchanged.
    if 'read_preference' in kwargs:
        read_preference = getattr(ReadPreference, kwargs['read_preference'],
                                  None)
        if read_preference is not None:
            kwargs['read_preference'] = read_preference

    mongo_conn = pymongo.database.Database(
        mongo_client_class(host=host,
                           port=port,
                           tz_aware=tz_aware,
                           document_class=dict,
                           **kwargs), db)

    if proxy:
        mongo_conn = MongoProxy(mongo_conn, wait_time=retry_wait_time)

    # If credentials were provided, authenticate the user.
    if user is not None and password is not None:
        mongo_conn.authenticate(user, password)

    return mongo_conn
コード例 #10
0
    def __init__(
        self, db, collection, host, port=27017, tz_aware=True, user=None, password=None,
        asset_collection=None, retry_wait_time=0.1, **kwargs
    ):
        """
        Create & open the connection, authenticate, and provide pointers to the collections
        """
        self.database = MongoProxy(
            pymongo.database.Database(
                pymongo.MongoClient(
                    host=host,
                    port=port,
                    tz_aware=tz_aware,
                    **kwargs
                ),
                db
            ),
            wait_time=retry_wait_time
        )

        if user is not None and password is not None:
            self.database.authenticate(user, password)

        self.course_index = self.database[collection + '.active_versions']
        self.structures = self.database[collection + '.structures']
        self.definitions = self.database[collection + '.definitions']

        # every app has write access to the db (v having a flag to indicate r/o v write)
        # Force mongo to report errors, at the expense of performance
        # pymongo docs suck but explanation:
        # http://api.mongodb.org/java/2.10.1/com/mongodb/WriteConcern.html
        self.course_index.write_concern = {'w': 1}
        self.structures.write_concern = {'w': 1}
        self.definitions.write_concern = {'w': 1}
コード例 #11
0
class MongoConnection(object):
    """
    Segregation of pymongo functions from the data modeling mechanisms for split modulestore.
    """
    def __init__(self,
                 db,
                 collection,
                 host,
                 port=27017,
                 tz_aware=True,
                 user=None,
                 password=None,
                 asset_collection=None,
                 retry_wait_time=0.1,
                 **kwargs):
        """
        Create & open the connection, authenticate, and provide pointers to the collections
        """
        self.database = MongoProxy(pymongo.database.Database(
            pymongo.MongoClient(host=host,
                                port=port,
                                tz_aware=tz_aware,
                                **kwargs), db),
                                   wait_time=retry_wait_time)

        if user is not None and password is not None:
            self.database.authenticate(user, password)

        self.course_index = self.database[collection + '.active_versions']
        self.structures = self.database[collection + '.structures']
        self.definitions = self.database[collection + '.definitions']

        # every app has write access to the db (v having a flag to indicate r/o v write)
        # Force mongo to report errors, at the expense of performance
        # pymongo docs suck but explanation:
        # http://api.mongodb.org/java/2.10.1/com/mongodb/WriteConcern.html
        self.course_index.write_concern = {'w': 1}
        self.structures.write_concern = {'w': 1}
        self.definitions.write_concern = {'w': 1}

    def heartbeat(self):
        """
        Check that the db is reachable.
        """
        if self.database.connection.alive():
            return True
        else:
            raise HeartbeatFailure("Can't connect to {}".format(
                self.database.name))

    def get_structure(self, key):
        """
        Get the structure from the persistence mechanism whose id is the given key
        """
        return structure_from_mongo(self.structures.find_one({'_id': key}))

    @autoretry_read()
    def find_structures_by_id(self, ids):
        """
        Return all structures that specified in ``ids``.

        Arguments:
            ids (list): A list of structure ids
        """
        return [
            structure_from_mongo(structure)
            for structure in self.structures.find({'_id': {
                '$in': ids
            }})
        ]

    @autoretry_read()
    def find_structures_derived_from(self, ids):
        """
        Return all structures that were immediately derived from a structure listed in ``ids``.

        Arguments:
            ids (list): A list of structure ids
        """
        return [
            structure_from_mongo(structure) for structure in
            self.structures.find({'previous_version': {
                '$in': ids
            }})
        ]

    @autoretry_read()
    def find_ancestor_structures(self, original_version, block_key):
        """
        Find all structures that originated from ``original_version`` that contain ``block_key``.

        Arguments:
            original_version (str or ObjectID): The id of a structure
            block_key (BlockKey): The id of the block in question
        """
        return [
            structure_from_mongo(structure)
            for structure in self.structures.find({
                'original_version': original_version,
                'blocks': {
                    '$elemMatch': {
                        'block_id': block_key.id,
                        'block_type': block_key.type,
                        'edit_info.update_version': {
                            '$exists': True,
                        },
                    },
                },
            })
        ]

    def insert_structure(self, structure):
        """
        Insert a new structure into the database.
        """
        self.structures.insert(structure_to_mongo(structure))

    def get_course_index(self, key, ignore_case=False):
        """
        Get the course_index from the persistence mechanism whose id is the given key
        """
        if ignore_case:
            query = {
                key_attr:
                re.compile(u'^{}$'.format(re.escape(getattr(key, key_attr))),
                           re.IGNORECASE)
                for key_attr in ('org', 'course', 'run')
            }
        else:
            query = {
                key_attr: getattr(key, key_attr)
                for key_attr in ('org', 'course', 'run')
            }
        return self.course_index.find_one(query)

    def find_matching_course_indexes(self, branch=None, search_targets=None):
        """
        Find the course_index matching particular conditions.

        Arguments:
            branch: If specified, this branch must exist in the returned courses
            search_targets: If specified, this must be a dictionary specifying field values
                that must exist in the search_targets of the returned courses
        """
        query = {}
        if branch is not None:
            query['versions.{}'.format(branch)] = {'$exists': True}

        if search_targets:
            for key, value in search_targets.iteritems():
                query['search_targets.{}'.format(key)] = value

        return self.course_index.find(query)

    def insert_course_index(self, course_index):
        """
        Create the course_index in the db
        """
        course_index['last_update'] = datetime.datetime.now(pytz.utc)
        self.course_index.insert(course_index)

    def update_course_index(self, course_index, from_index=None):
        """
        Update the db record for course_index.

        Arguments:
            from_index: If set, only update an index if it matches the one specified in `from_index`.
        """
        if from_index:
            query = {"_id": from_index["_id"]}
            # last_update not only tells us when this course was last updated but also helps
            # prevent collisions
            if 'last_update' in from_index:
                query['last_update'] = from_index['last_update']
        else:
            query = {
                'org': course_index['org'],
                'course': course_index['course'],
                'run': course_index['run'],
            }
        course_index['last_update'] = datetime.datetime.now(pytz.utc)
        self.course_index.update(
            query,
            course_index,
            upsert=False,
        )

    def delete_course_index(self, course_key):
        """
        Delete the course_index from the persistence mechanism whose id is the given course_index
        """
        query = {
            key_attr: getattr(course_key, key_attr)
            for key_attr in ('org', 'course', 'run')
        }
        return self.course_index.remove(query)

    def get_definition(self, key):
        """
        Get the definition from the persistence mechanism whose id is the given key
        """
        return self.definitions.find_one({'_id': key})

    def get_definitions(self, definitions):
        """
        Retrieve all definitions listed in `definitions`.
        """
        return self.definitions.find({'_id': {'$in': definitions}})

    def insert_definition(self, definition):
        """
        Create the definition in the db
        """
        self.definitions.insert(definition)

    def ensure_indexes(self):
        """
        Ensure that all appropriate indexes are created that are needed by this modulestore, or raise
        an exception if unable to.

        This method is intended for use by tests and administrative commands, and not
        to be run during server startup.
        """
        self.course_index.create_index([('org', pymongo.ASCENDING),
                                        ('course', pymongo.ASCENDING),
                                        ('run', pymongo.ASCENDING)],
                                       unique=True)
コード例 #12
0
class MongoConnection(object):
    """
    Segregation of pymongo functions from the data modeling mechanisms for split modulestore.
    """
    def __init__(
        self, db, collection, host, port=27017, tz_aware=True, user=None, password=None,
        asset_collection=None, retry_wait_time=0.1, **kwargs
    ):
        """
        Create & open the connection, authenticate, and provide pointers to the collections
        """
        if kwargs.get('replicaSet') is None:
            kwargs.pop('replicaSet', None)
            mongo_class = pymongo.MongoClient
        else:
            mongo_class = pymongo.MongoReplicaSetClient
        _client = mongo_class(
            host=host,
            port=port,
            tz_aware=tz_aware,
            **kwargs
        )
        self.database = MongoProxy(
            pymongo.database.Database(_client, db),
            wait_time=retry_wait_time
        )

        if user is not None and password is not None:
            self.database.authenticate(user, password)

        self.course_index = self.database[collection + '.active_versions']
        self.structures = self.database[collection + '.structures']
        self.definitions = self.database[collection + '.definitions']

        # every app has write access to the db (v having a flag to indicate r/o v write)
        # Force mongo to report errors, at the expense of performance
        # pymongo docs suck but explanation:
        # http://api.mongodb.org/java/2.10.1/com/mongodb/WriteConcern.html
        self.course_index.write_concern = {'w': 1}
        self.structures.write_concern = {'w': 1}
        self.definitions.write_concern = {'w': 1}

    def heartbeat(self):
        """
        Check that the db is reachable.
        """
        if self.database.connection.alive():
            return True
        else:
            raise HeartbeatFailure("Can't connect to {}".format(self.database.name))

    def get_structure(self, key, course_context=None):
        """
        Get the structure from the persistence mechanism whose id is the given key.

        This method will use a cached version of the structure if it is availble.
        """
        with TIMER.timer("get_structure", course_context) as tagger_get_structure:
            cache = CourseStructureCache()

            structure = cache.get(key, course_context)
            tagger_get_structure.tag(from_cache=str(bool(structure)).lower())
            if not structure:
                # Always log cache misses, because they are unexpected
                tagger_get_structure.sample_rate = 1

                with TIMER.timer("get_structure.find_one", course_context) as tagger_find_one:
                    doc = self.structures.find_one({'_id': key})
                    tagger_find_one.measure("blocks", len(doc['blocks']))
                    structure = structure_from_mongo(doc, course_context)
                    tagger_find_one.sample_rate = 1

                cache.set(key, structure, course_context)

            return structure

    @autoretry_read()
    def find_structures_by_id(self, ids, course_context=None):
        """
        Return all structures that specified in ``ids``.

        Arguments:
            ids (list): A list of structure ids
        """
        with TIMER.timer("find_structures_by_id", course_context) as tagger:
            tagger.measure("requested_ids", len(ids))
            docs = [
                structure_from_mongo(structure, course_context)
                for structure in self.structures.find({'_id': {'$in': ids}})
            ]
            tagger.measure("structures", len(docs))
            return docs

    @autoretry_read()
    def find_structures_derived_from(self, ids, course_context=None):
        """
        Return all structures that were immediately derived from a structure listed in ``ids``.

        Arguments:
            ids (list): A list of structure ids
        """
        with TIMER.timer("find_structures_derived_from", course_context) as tagger:
            tagger.measure("base_ids", len(ids))
            docs = [
                structure_from_mongo(structure, course_context)
                for structure in self.structures.find({'previous_version': {'$in': ids}})
            ]
            tagger.measure("structures", len(docs))
            return docs

    @autoretry_read()
    def find_ancestor_structures(self, original_version, block_key, course_context=None):
        """
        Find all structures that originated from ``original_version`` that contain ``block_key``.

        Arguments:
            original_version (str or ObjectID): The id of a structure
            block_key (BlockKey): The id of the block in question
        """
        with TIMER.timer("find_ancestor_structures", course_context) as tagger:
            docs = [
                structure_from_mongo(structure, course_context)
                for structure in self.structures.find({
                    'original_version': original_version,
                    'blocks': {
                        '$elemMatch': {
                            'block_id': block_key.id,
                            'block_type': block_key.type,
                            'edit_info.update_version': {
                                '$exists': True,
                            },
                        },
                    },
                })
            ]
            tagger.measure("structures", len(docs))
            return docs

    def insert_structure(self, structure, course_context=None):
        """
        Insert a new structure into the database.
        """
        with TIMER.timer("insert_structure", course_context) as tagger:
            tagger.measure("blocks", len(structure["blocks"]))
            self.structures.insert(structure_to_mongo(structure, course_context))

    def get_course_index(self, key, ignore_case=False):
        """
        Get the course_index from the persistence mechanism whose id is the given key
        """
        with TIMER.timer("get_course_index", key):
            if ignore_case:
                query = {
                    key_attr: re.compile(u'^{}$'.format(re.escape(getattr(key, key_attr))), re.IGNORECASE)
                    for key_attr in ('org', 'course', 'run')
                }
            else:
                query = {
                    key_attr: getattr(key, key_attr)
                    for key_attr in ('org', 'course', 'run')
                }
            return self.course_index.find_one(query)

    def find_matching_course_indexes(self, branch=None, search_targets=None, org_target=None, course_context=None):
        """
        Find the course_index matching particular conditions.

        Arguments:
            branch: If specified, this branch must exist in the returned courses
            search_targets: If specified, this must be a dictionary specifying field values
                that must exist in the search_targets of the returned courses
            org_target: If specified, this is an ORG filter so that only course_indexs are
                returned for the specified ORG
        """
        with TIMER.timer("find_matching_course_indexes", course_context):
            query = {}
            if branch is not None:
                query['versions.{}'.format(branch)] = {'$exists': True}

            if search_targets:
                for key, value in search_targets.iteritems():
                    query['search_targets.{}'.format(key)] = value

            if org_target:
                query['org'] = org_target

            return self.course_index.find(query)

    def insert_course_index(self, course_index, course_context=None):
        """
        Create the course_index in the db
        """
        with TIMER.timer("insert_course_index", course_context):
            course_index['last_update'] = datetime.datetime.now(pytz.utc)
            self.course_index.insert(course_index)

    def update_course_index(self, course_index, from_index=None, course_context=None):
        """
        Update the db record for course_index.

        Arguments:
            from_index: If set, only update an index if it matches the one specified in `from_index`.
        """
        with TIMER.timer("update_course_index", course_context):
            if from_index:
                query = {"_id": from_index["_id"]}
                # last_update not only tells us when this course was last updated but also helps
                # prevent collisions
                if 'last_update' in from_index:
                    query['last_update'] = from_index['last_update']
            else:
                query = {
                    'org': course_index['org'],
                    'course': course_index['course'],
                    'run': course_index['run'],
                }
            course_index['last_update'] = datetime.datetime.now(pytz.utc)
            self.course_index.update(query, course_index, upsert=False,)

    def delete_course_index(self, course_key):
        """
        Delete the course_index from the persistence mechanism whose id is the given course_index
        """
        with TIMER.timer("delete_course_index", course_key):
            query = {
                key_attr: getattr(course_key, key_attr)
                for key_attr in ('org', 'course', 'run')
            }
            return self.course_index.remove(query)

    def get_definition(self, key, course_context=None):
        """
        Get the definition from the persistence mechanism whose id is the given key
        """
        with TIMER.timer("get_definition", course_context) as tagger:
            definition = self.definitions.find_one({'_id': key})
            tagger.measure("fields", len(definition['fields']))
            tagger.tag(block_type=definition['block_type'])
            return definition

    def get_definitions(self, definitions, course_context=None):
        """
        Retrieve all definitions listed in `definitions`.
        """
        with TIMER.timer("get_definitions", course_context) as tagger:
            tagger.measure('definitions', len(definitions))
            definitions = self.definitions.find({'_id': {'$in': definitions}})
            return definitions

    def insert_definition(self, definition, course_context=None):
        """
        Create the definition in the db
        """
        with TIMER.timer("insert_definition", course_context) as tagger:
            tagger.measure('fields', len(definition['fields']))
            tagger.tag(block_type=definition['block_type'])
            self.definitions.insert(definition)

    def ensure_indexes(self):
        """
        Ensure that all appropriate indexes are created that are needed by this modulestore, or raise
        an exception if unable to.

        This method is intended for use by tests and administrative commands, and not
        to be run during server startup.
        """
        self.course_index.create_index(
            [
                ('org', pymongo.ASCENDING),
                ('course', pymongo.ASCENDING),
                ('run', pymongo.ASCENDING)
            ],
            unique=True
        )
コード例 #13
0
 def _connect_replica_set(self):
     return MongoProxy(
         pymongo.MongoReplicaSetClient(self.mongo_uri, self._replicaSet))
コード例 #14
0
 def _connect_server(self):
     return MongoProxy(pymongo.MongoClient(self.mongo_uri))
コード例 #15
0
ファイル: mongo_connection.py プロジェクト: KhaledF/edx-plat
class MongoConnection(object):
    """
    Segregation of pymongo functions from the data modeling mechanisms for split modulestore.
    """
    def __init__(
        self, db, collection, host, port=27017, tz_aware=True, user=None, password=None,
        asset_collection=None, retry_wait_time=0.1, **kwargs
    ):
        """
        Create & open the connection, authenticate, and provide pointers to the collections
        """
        self.database = MongoProxy(
            pymongo.database.Database(
                pymongo.MongoClient(
                    host=host,
                    port=port,
                    tz_aware=tz_aware,
                    **kwargs
                ),
                db
            ),
            wait_time=retry_wait_time
        )

        # Remove when adding official Split support for asset metadata storage.
        if asset_collection:
            pass

        if user is not None and password is not None:
            self.database.authenticate(user, password)

        self.course_index = self.database[collection + '.active_versions']
        self.structures = self.database[collection + '.structures']
        self.definitions = self.database[collection + '.definitions']

        # every app has write access to the db (v having a flag to indicate r/o v write)
        # Force mongo to report errors, at the expense of performance
        # pymongo docs suck but explanation:
        # http://api.mongodb.org/java/2.10.1/com/mongodb/WriteConcern.html
        self.course_index.write_concern = {'w': 1}
        self.structures.write_concern = {'w': 1}
        self.definitions.write_concern = {'w': 1}

    def heartbeat(self):
        """
        Check that the db is reachable.
        """
        if self.database.connection.alive():
            return True
        else:
            raise HeartbeatFailure("Can't connect to {}".format(self.database.name))

    def get_structure(self, key):
        """
        Get the structure from the persistence mechanism whose id is the given key
        """
        return structure_from_mongo(self.structures.find_one({'_id': key}))

    @autoretry_read()
    def find_structures_by_id(self, ids):
        """
        Return all structures that specified in ``ids``.

        Arguments:
            ids (list): A list of structure ids
        """
        return [structure_from_mongo(structure) for structure in self.structures.find({'_id': {'$in': ids}})]

    @autoretry_read()
    def find_structures_derived_from(self, ids):
        """
        Return all structures that were immediately derived from a structure listed in ``ids``.

        Arguments:
            ids (list): A list of structure ids
        """
        return [structure_from_mongo(structure) for structure in self.structures.find({'previous_version': {'$in': ids}})]

    @autoretry_read()
    def find_ancestor_structures(self, original_version, block_key):
        """
        Find all structures that originated from ``original_version`` that contain ``block_key``.

        Arguments:
            original_version (str or ObjectID): The id of a structure
            block_key (BlockKey): The id of the block in question
        """
        return [structure_from_mongo(structure) for structure in self.structures.find({
            'original_version': original_version,
            'blocks': {
                '$elemMatch': {
                    'block_id': block_key.id,
                    'block_type': block_key.type,
                    'edit_info.update_version': {'$exists': True},
                }
            }
        })]

    def insert_structure(self, structure):
        """
        Insert a new structure into the database.
        """
        self.structures.insert(structure_to_mongo(structure))

    def get_course_index(self, key, ignore_case=False):
        """
        Get the course_index from the persistence mechanism whose id is the given key
        """
        if ignore_case:
            query = {
                key_attr: re.compile(u'^{}$'.format(re.escape(getattr(key, key_attr))), re.IGNORECASE)
                for key_attr in ('org', 'course', 'run')
            }
        else:
            query = {
                key_attr: getattr(key, key_attr)
                for key_attr in ('org', 'course', 'run')
            }
        return self.course_index.find_one(query)

    def find_matching_course_indexes(self, branch=None, search_targets=None):
        """
        Find the course_index matching particular conditions.

        Arguments:
            branch: If specified, this branch must exist in the returned courses
            search_targets: If specified, this must be a dictionary specifying field values
                that must exist in the search_targets of the returned courses
        """
        query = {}
        if branch is not None:
            query['versions.{}'.format(branch)] = {'$exists': True}

        if search_targets:
            for key, value in search_targets.iteritems():
                query['search_targets.{}'.format(key)] = value

        return self.course_index.find(query)

    def insert_course_index(self, course_index):
        """
        Create the course_index in the db
        """
        course_index['last_update'] = datetime.datetime.now(pytz.utc)
        self.course_index.insert(course_index)

    def update_course_index(self, course_index, from_index=None):
        """
        Update the db record for course_index.

        Arguments:
            from_index: If set, only update an index if it matches the one specified in `from_index`.
        """
        if from_index:
            query = {"_id": from_index["_id"]}
            # last_update not only tells us when this course was last updated but also helps
            # prevent collisions
            if 'last_update' in from_index:
                query['last_update'] = from_index['last_update']
        else:
            query = {
                'org': course_index['org'],
                'course': course_index['course'],
                'run': course_index['run'],
            }
        course_index['last_update'] = datetime.datetime.now(pytz.utc)
        self.course_index.update(query, course_index, upsert=False,)

    def delete_course_index(self, course_index):
        """
        Delete the course_index from the persistence mechanism whose id is the given course_index
        """
        return self.course_index.remove({
            'org': course_index['org'],
            'course': course_index['course'],
            'run': course_index['run'],
        })

    def get_definition(self, key):
        """
        Get the definition from the persistence mechanism whose id is the given key
        """
        return self.definitions.find_one({'_id': key})

    def get_definitions(self, definitions):
        """
        Retrieve all definitions listed in `definitions`.
        """
        return self.definitions.find({'$in': {'_id': definitions}})

    def insert_definition(self, definition):
        """
        Create the definition in the db
        """
        self.definitions.insert(definition)

    def ensure_indexes(self):
        """
        Ensure that all appropriate indexes are created that are needed by this modulestore, or raise
        an exception if unable to.

        This method is intended for use by tests and administrative commands, and not
        to be run during server startup.
        """
        self.course_index.create_index(
            [
                ('org', pymongo.ASCENDING),
                ('course', pymongo.ASCENDING),
                ('run', pymongo.ASCENDING)
            ],
            unique=True
        )
コード例 #16
0
ファイル: tests.py プロジェクト: GbalsaC/bitnamiP
 def test_no_exceptions1(self):
     mongo_connection = MongoProxy(MongoConnection(), methods_needing_retry={MongoConnection: ['find', ]})
     self.assertEqual(mongo_connection.find(), "Finished.")
     self.assertEqual(mongo_connection.call_count, 1)