示例#1
0
文件: dbref.py 项目: Yucie/Arianrhod
 def __repr__(self):
     extra = "".join([", %s=%r" % (k, v)
                      for k, v in iteritems(self.__kwargs)])
     if self.database is None:
         return "DBRef(%r, %r%s)" % (self.collection, self.id, extra)
     return "DBRef(%r, %r, %r%s)" % (self.collection, self.id,
                                     self.database, extra)
示例#2
0
    def _deepcopy(self, x, memo=None):
        """Deepcopy helper for the data dictionary or list.

        Regular expressions cannot be deep copied but as they are immutable we
        don't have to copy them when cloning.
        """
        if not hasattr(x, 'items'):
            y, is_list, iterator = [], True, enumerate(x)
        else:
            y, is_list, iterator = {}, False, iteritems(x)

        if memo is None:
            memo = {}
        val_id = id(x)
        if val_id in memo:
            return memo.get(val_id)
        memo[val_id] = y

        for key, value in iterator:
            if isinstance(value, (dict, list)) and not isinstance(value, SON):
                value = self._deepcopy(value, memo)
            elif not isinstance(value, RE_TYPE):
                value = copy.deepcopy(value, memo)

            if is_list:
                y.append(value)
            else:
                if not isinstance(key, RE_TYPE):
                    key = copy.deepcopy(key, memo)
                y[key] = value
        return y
示例#3
0
def get_validated_options(options, warn=True):
    """Validate each entry in options and raise a warning if it is not valid.
    Returns a copy of options with invalid entries removed.

    :Parameters:
        - `opts`: A dict of MongoDB URI options.
        - `warn` (optional): If ``True`` then warnings will be logged and
          invalid options will be ignored. Otherwise, invalid options will
          cause errors.
    """
    validated_options = {}
    for opt, value in iteritems(options):
        lower = opt.lower()
        try:
            validator = URI_OPTIONS_VALIDATOR_MAP.get(
                lower, raise_config_error)
            value = validator(opt, value)
        except (ValueError, TypeError, ConfigurationError) as exc:
            if warn:
                warnings.warn(str(exc))
            else:
                raise
        else:
            validated_options[lower] = value
    return validated_options
示例#4
0
def _index_document(index_list):
    """Helper to generate an index specifying document.

    Takes a list of (key, direction) pairs.
    """
    if isinstance(index_list, collections.Mapping):
        raise TypeError("passing a dict to sort/create_index/hint is not "
                        "allowed - use a list of tuples instead. did you "
                        "mean %r?" % list(iteritems(index_list)))
    elif not isinstance(index_list, (list, tuple)):
        raise TypeError("must use a list of (key, direction) pairs, "
                        "not: " + repr(index_list))
    if not len(index_list):
        raise ValueError("key_or_list must not be the empty list")

    index = SON()
    for (key, value) in index_list:
        if not isinstance(key, string_type):
            raise TypeError("first item in each key pair must be a string")
        if not isinstance(value, (string_type, int, collections.Mapping)):
            raise TypeError("second item in each key pair must be 1, -1, "
                            "'2d', 'geoHaystack', or another valid MongoDB "
                            "index specifier.")
        index[key] = value
    return index
示例#5
0
def _encode_mapping(name, value, check_keys, opts):
    """Encode a mapping type."""
    if _raw_document_class(value):
        return b'\x03' + name + value.raw
    data = b"".join([_element_to_bson(key, val, check_keys, opts)
                     for key, val in iteritems(value)])
    return b"\x03" + name + _PACK_INT(len(data) + 5) + data + b"\x00"
    def run_scenario(self):
        # Set up
        self.setUpCluster(scenario_def)
        try:
            with get_change_stream(
                self.client, scenario_def, test
            ) as change_stream:
                for operation in test["operations"]:
                    # Run specified operations
                    run_operation(self.client, operation)
                num_expected_changes = len(test["result"]["success"])
                changes = [
                    change_stream.next() for _ in range(num_expected_changes)
                ]

        except OperationFailure as exc:
            if test["result"].get("error") is None:
                raise
            expected_code = test["result"]["error"]["code"]
            self.assertEqual(exc.code, expected_code)

        else:
            # Check for expected output from change streams
            for change, expected_changes in zip(changes, test["result"]["success"]):
                assert_dict_is_subset(change, expected_changes)
            self.assertEqual(len(changes), len(test["result"]["success"]))
        
        finally:
            # Check for expected events
            results = self.listener.results
            for expectation in test["expectations"]:
                for idx, (event_type, event_desc) in enumerate(iteritems(expectation)):
                    results_key = event_type.split("_")[1]
                    event = results[results_key][idx] if len(results[results_key]) > idx else None
                    check_event(event, event_desc)
    def run_operation(self, collection, test):
        # Iterate over all operations.
        for opdef in test['operations']:
            # Convert command from CamelCase to pymongo.collection method.
            operation = camel_to_snake(opdef['name'])

            # Get command handle on target entity (collection/database).
            target_object = opdef.get('object', 'collection')
            if target_object == 'database':
                cmd = getattr(collection.database, operation)
            elif target_object == 'collection':
                collection = collection.with_options(**dict(
                    parse_collection_options(opdef.get(
                        'collectionOptions', {}))))
                cmd = getattr(collection, operation)
            else:
                self.fail("Unknown object name %s" % (target_object,))

            # Convert arguments to snake_case and handle special cases.
            arguments = opdef['arguments']
            options = arguments.pop("options", {})

            for option_name in options:
                arguments[camel_to_snake(option_name)] = options[option_name]

            if operation == "bulk_write":
                # Parse each request into a bulk write model.
                requests = []
                for request in arguments["requests"]:
                    bulk_model = camel_to_upper_camel(request["name"])
                    bulk_class = getattr(operations, bulk_model)
                    bulk_arguments = camel_to_snake_args(request["arguments"])
                    requests.append(bulk_class(**bulk_arguments))
                arguments["requests"] = requests
            else:
                for arg_name in list(arguments):
                    c2s = camel_to_snake(arg_name)
                    # PyMongo accepts sort as list of tuples.
                    if arg_name == "sort":
                        sort_dict = arguments[arg_name]
                        arguments[arg_name] = list(iteritems(sort_dict))
                    # Named "key" instead not fieldName.
                    if arg_name == "fieldName":
                        arguments["key"] = arguments.pop(arg_name)
                    # Aggregate uses "batchSize", while find uses batch_size.
                    elif arg_name == "batchSize" and operation == "aggregate":
                        continue
                    # Requires boolean returnDocument.
                    elif arg_name == "returnDocument":
                        arguments[c2s] = arguments[arg_name] == "After"
                    else:
                        arguments[c2s] = arguments.pop(arg_name)

            if opdef.get('error') is True:
                with self.assertRaises(PyMongoError):
                    cmd(**arguments)
            else:
                result = cmd(**arguments)
                self.check_result(opdef.get('result'), result)
示例#8
0
    def validate_collection(self, name_or_collection,
                            scandata=False, full=False):
        """Validate a collection.

        Returns a dict of validation info. Raises CollectionInvalid if
        validation fails.

        With MongoDB < 1.9 the result dict will include a `result` key
        with a string value that represents the validation results. With
        MongoDB >= 1.9 the `result` key no longer exists and the results
        are split into individual fields in the result dict.

        :Parameters:
          - `name_or_collection`: A Collection object or the name of a
            collection to validate.
          - `scandata`: Do extra checks beyond checking the overall
            structure of the collection.
          - `full`: Have the server do a more thorough scan of the
            collection. Use with `scandata` for a thorough scan
            of the structure of the collection and the individual
            documents. Ignored in MongoDB versions before 1.9.
        """
        name = name_or_collection
        if isinstance(name, Collection):
            name = name.name

        if not isinstance(name, string_type):
            raise TypeError("name_or_collection must be an instance of "
                            "%s or Collection" % (string_type.__name__,))

        result = self.command("validate", _unicode(name),
                              scandata=scandata, full=full)

        valid = True
        # Pre 1.9 results
        if "result" in result:
            info = result["result"]
            if info.find("exception") != -1 or info.find("corrupt") != -1:
                raise CollectionInvalid("%s invalid: %s" % (name, info))
        # Sharded results
        elif "raw" in result:
            for _, res in iteritems(result["raw"]):
                if "result" in res:
                    info = res["result"]
                    if (info.find("exception") != -1 or
                                info.find("corrupt") != -1):
                        raise CollectionInvalid("%s invalid: "
                                                "%s" % (name, info))
                elif not res.get("valid", False):
                    valid = False
                    break
        # Post 1.9 non-sharded results.
        elif not result.get("valid", False):
            valid = False

        if not valid:
            raise CollectionInvalid("%s invalid: %r" % (name, result))

        return result
示例#9
0
 def transform_value(value):
     if isinstance(value, list):
         return [transform_value(v) for v in value]
     elif isinstance(value, abc.Mapping):
         return dict([
             (k, transform_value(v))
             for k, v in iteritems(value)])
     else:
         return value
def validate_options(opts):
    """Validates and normalizes options passed in a MongoDB URI.

    Returns a new dictionary of validated and normalized options.

    :Parameters:
        - `opts`: A dict of MongoDB URI options.
    """
    return dict([_validate(opt, val) for opt, val in iteritems(opts)])
def _normalize_options(options):
    """Renames keys in the options dictionary to their internally-used
    names."""
    normalized_options = {}
    for key, value in iteritems(options):
        optname = str(key).lower()
        intname = INTERNAL_URI_OPTION_NAME_MAP.get(optname, key)
        normalized_options[intname] = options[key]
    return normalized_options
def check_event(event, expectation_dict):
    if event is None:
        raise AssertionError
    for key, value in iteritems(expectation_dict):
        if isinstance(value, dict):
            assert_dict_is_subset(
                getattr(event, key), value
            )
        else:
            assert getattr(event, key) == value
示例#13
0
def _json_convert(obj):
    """Recursive helper method that converts BSON types so they can be
    converted into json.
    """
    if hasattr(obj, 'iteritems') or hasattr(obj, 'items'):  # PY3 support
        return SON(((k, _json_convert(v)) for k, v in iteritems(obj)))
    elif hasattr(obj, '__iter__') and not isinstance(obj, (text_type, bytes)):
        return list((_json_convert(v) for v in obj))
    try:
        return default(obj)
    except TypeError:
        return obj
def assert_dict_is_subset(superdict, subdict):
    """Check that subdict is a subset of superdict."""
    exempt_fields = ["documentKey", "_id"]
    for key, value in iteritems(subdict):
        if key not in superdict:
            assert False
        if isinstance(value, dict):
            assert_dict_is_subset(superdict[key], value)
            continue
        if key in exempt_fields:
            superdict[key] = "42"
        assert superdict[key] == value
示例#15
0
 def _clone(self, deepcopy=True):
     """Internal clone helper."""
     clone = self._clone_base()
     values_to_clone = ("spec", "projection", "skip", "limit",
                        "max_time_ms", "max_await_time_ms", "comment",
                        "max", "min", "ordering", "explain", "hint",
                        "batch_size", "max_scan", "manipulate",
                        "query_flags", "modifiers", "collation")
     data = dict((k, v) for k, v in iteritems(self.__dict__)
                 if k.startswith('_Cursor__') and k[9:] in values_to_clone)
     if deepcopy:
         data = self._deepcopy(data)
     clone.__dict__.update(data)
     return clone
示例#16
0
def get_validated_options(options):
    """Validate each entry in options and raise a warning if it is not valid.
    Returns a copy of options with invalid entries removed
    """
    validated_options = {}
    for opt, value in iteritems(options):
        lower = opt.lower()
        try:
            validator = VALIDATORS.get(lower, raise_config_error)
            value = validator(opt, value)
        except (ValueError, ConfigurationError) as exc:
            warnings.warn(str(exc))
        else:
            validated_options[lower] = value
    return validated_options
示例#17
0
    def __init__(self, username, password, database, options):
        options = dict([validate(opt, val) for opt, val in iteritems(options)])

        self.__codec_options = _parse_codec_options(options)
        self.__credentials = _parse_credentials(
            username, password, database, options)
        self.__local_threshold_ms = options.get('localthresholdms', 15)
        # self.__server_selection_timeout is in seconds. Must use full name for
        # common.SERVER_SELECTION_TIMEOUT because it is set directly by tests.
        self.__server_selection_timeout = options.get(
            'serverselectiontimeoutms', common.SERVER_SELECTION_TIMEOUT)
        self.__pool_options = _parse_pool_options(options)
        self.__read_preference = _parse_read_preference(options)
        self.__replica_set_name = options.get('replicaset')
        self.__write_concern = _parse_write_concern(options)
示例#18
0
文件: __init__.py 项目: songjundev/b
def _dict_to_bson(doc, check_keys, opts, top_level=True):
    """Encode a document to BSON."""
    try:
        elements = []
        if top_level and "_id" in doc:
            elements.append(_name_value_to_bson(b"_id\x00", doc["_id"],
                                                check_keys, opts))
        for (key, value) in iteritems(doc):
            if not top_level or key != "_id":
                elements.append(_element_to_bson(key, value,
                                                 check_keys, opts))
    except AttributeError:
        raise TypeError("encoder expected a mapping type but got: %r" % (doc,))

    encoded = b"".join(elements)
    return _PACK_INT(len(encoded) + 5) + encoded + b"\x00"
    def run_scenario(self):
            # Load data.
            assert scenario_def['data'], "tests must have non-empty data"
            self.db.test.drop()
            self.db.test.insert_many(scenario_def['data'])

            # Convert command from CamelCase to pymongo.collection method.
            operation = camel_to_snake(test['operation']['name'])
            cmd = getattr(self.db.test, operation)

            # Convert arguments to snake_case and handle special cases.
            arguments = test['operation']['arguments']
            for arg_name in list(arguments):
                c2s = camel_to_snake(arg_name)
                # PyMongo accepts sort as list of tuples. Asserting len=1
                # because ordering dicts from JSON in 2.6 is unwieldy.
                if arg_name == "sort":
                    sort_dict = arguments[arg_name]
                    assert len(sort_dict) == 1, 'test can only have 1 sort key'
                    arguments[arg_name] = list(iteritems(sort_dict))
                # Named "key" instead not fieldName.
                if arg_name == "fieldName":
                    arguments["key"] = arguments.pop(arg_name)
                # Aggregate uses "batchSize", while find uses batch_size.
                elif arg_name == "batchSize" and operation == "aggregate":
                    continue
                # Requires boolean returnDocument.
                elif arg_name == "returnDocument":
                    arguments[c2s] = arguments[arg_name] == "After"
                else:
                    arguments[c2s] = arguments.pop(arg_name)

            result = cmd(**arguments)

            # Assert result is expected, excluding the $out aggregation test.
            if not ignore_result:
                check_result(test['outcome'].get('result'), result)

            # Assert final state is expected.
            expected_c = test['outcome'].get('collection')
            if expected_c is not None:
                expected_name = expected_c.get('name')
                if expected_name is not None:
                    db_coll = self.db[expected_name]
                else:
                    db_coll = self.db.test
                self.assertEqual(list(db_coll.find()), expected_c['data'])
def _handle_option_deprecations(options):
    """Issue appropriate warnings when deprecated options are present in the
    options dictionary. Removes deprecated option key, value pairs if the
    options dictionary is found to also have the renamed option."""
    undeprecated_options = _CaseInsensitiveDictionary()
    for key, value in iteritems(options):
        optname = str(key).lower()
        if optname in URI_OPTIONS_DEPRECATION_MAP:
            renamed_key = URI_OPTIONS_DEPRECATION_MAP[optname]
            if renamed_key.lower() in options:
                warnings.warn("Deprecated option '%s' ignored in favor of "
                              "'%s'." % (str(key), renamed_key))
                continue
            warnings.warn("Option '%s' is deprecated, use '%s' instead." % (
                          str(key), renamed_key))
        undeprecated_options[str(key)] = value
    return undeprecated_options
示例#21
0
def validate_options(opts, warn=False):
    """Validates and normalizes options passed in a MongoDB URI.

    Returns a new dictionary of validated and normalized options. If warn is
    False then errors will be thrown for invalid options, otherwise they will
    be ignored and a warning will be issued.

    :Parameters:
        - `opts`: A dict of MongoDB URI options.
        - `warn` (optional): If ``True`` then warnigns will be logged and
          invalid options will be ignored. Otherwise invalid options will
          cause errors.
    """
    if warn:
        return get_validated_options(opts)
    else:
        return dict([_validate(opt, val) for opt, val in iteritems(opts)])
def run_operation(collection, test):
    # Convert command from CamelCase to pymongo.collection method.
    operation = camel_to_snake(test['operation']['name'])
    cmd = getattr(collection, operation)

    # Convert arguments to snake_case and handle special cases.
    arguments = test['operation']['arguments']
    options = arguments.pop("options", {})
    for option_name in options:
        arguments[camel_to_snake(option_name)] = options[option_name]
    if operation == "bulk_write":
        # Parse each request into a bulk write model.
        requests = []
        for request in arguments["requests"]:
            bulk_model = camel_to_upper_camel(request["name"])
            bulk_class = getattr(operations, bulk_model)
            bulk_arguments = camel_to_snake_args(request["arguments"])
            requests.append(bulk_class(**bulk_arguments))
        arguments["requests"] = requests
    else:
        for arg_name in list(arguments):
            c2s = camel_to_snake(arg_name)
            # PyMongo accepts sort as list of tuples.
            if arg_name == "sort":
                sort_dict = arguments[arg_name]
                arguments[arg_name] = list(iteritems(sort_dict))
            # Named "key" instead not fieldName.
            if arg_name == "fieldName":
                arguments["key"] = arguments.pop(arg_name)
            # Aggregate uses "batchSize", while find uses batch_size.
            elif arg_name == "batchSize" and operation == "aggregate":
                continue
            # Requires boolean returnDocument.
            elif arg_name == "returnDocument":
                arguments[c2s] = arguments[arg_name] == "After"
            else:
                arguments[c2s] = arguments.pop(arg_name)

    result = cmd(**arguments)

    if operation == "aggregate":
        if arguments["pipeline"] and "$out" in arguments["pipeline"][-1]:
            out = collection.database[arguments["pipeline"][-1]["$out"]]
            return out.find()
    return result
示例#23
0
def _encode_dbref(name, value, check_keys, opts):
    """Encode bson.dbref.DBRef."""
    buf = bytearray(b"\x03" + name + b"\x00\x00\x00\x00")
    begin = len(buf) - 4

    buf += _name_value_to_bson(b"$ref\x00",
                               value.collection, check_keys, opts)
    buf += _name_value_to_bson(b"$id\x00",
                               value.id, check_keys, opts)
    if value.database is not None:
        buf += _name_value_to_bson(
            b"$db\x00", value.database, check_keys, opts)
    for key, val in iteritems(value._DBRef__kwargs):
        buf += _element_to_bson(key, val, check_keys, opts)

    buf += b"\x00"
    buf[begin:begin + 4] = _PACK_INT(len(buf) - begin)
    return bytes(buf)
示例#24
0
    def test_subclasses(self):
        # make sure we can serialize subclasses of native Python types.
        class _myint(int):
            pass

        class _myfloat(float):
            pass

        class _myunicode(text_type):
            pass

        d = {'a': _myint(42), 'b': _myfloat(63.9),
             'c': _myunicode('hello world')
            }
        d2 = BSON.encode(d).decode()
        for key, value in iteritems(d2):
            orig_value = d[key]
            orig_type = orig_value.__class__.__bases__[0]
            self.assertEqual(type(value), orig_type)
            self.assertEqual(value, orig_type(value))
def get_change_stream(client, scenario_def, test):
    # Get target namespace on which to instantiate change stream
    target = test["target"]
    if target == "collection":
        db = client.get_database(scenario_def["database_name"])
        cs_target = db.get_collection(scenario_def["collection_name"])
    elif target == "database":
        cs_target = client.get_database(scenario_def["database_name"])
    elif target == "client":
        cs_target = client
    else:
        raise ValueError("Invalid target in spec")

    # Construct change stream kwargs dict
    cs_pipeline = test["changeStreamPipeline"]
    options = test["changeStreamOptions"]
    cs_options = {}
    for key, value in iteritems(options):
        cs_options[camel_to_snake(key)] = value
    
    # Create and return change stream
    return cs_target.watch(pipeline=cs_pipeline, **cs_options)
示例#26
0
def simplify(case):  # TODO this is a hack
    if isinstance(case, SON) and "$ref" not in case:
        simplified = SON(case)  # make a copy!
        if random.choice([True, False]):
            # delete
            simplified_keys = list(simplified)
            if not len(simplified_keys):
                return (False, case)
            simplified.pop(random.choice(simplified_keys))
            return (True, simplified)
        else:
            # simplify a value
            simplified_items = list(iteritems(simplified))
            if not len(simplified_items):
                return (False, case)
            (key, value) = random.choice(simplified_items)
            (success, value) = simplify(value)
            simplified[key] = value
            return (success, success and simplified or case)
    if isinstance(case, list):
        simplified = list(case)
        if random.choice([True, False]):
            # delete
            if not len(simplified):
                return (False, case)
            simplified.pop(random.randrange(len(simplified)))
            return (True, simplified)
        else:
            # simplify an item
            if not len(simplified):
                return (False, case)
            index = random.randrange(len(simplified))
            (success, value) = simplify(simplified[index])
            simplified[index] = value
            return (success, success and simplified or case)
    return (False, case)
示例#27
0
      'the client certificate and the private key')),
    'ssl_keyfile':
    ('removed',
     ('Instead of using ssl_keyfile to specify the private keyfile, '
      'use tlsCertificateKeyFile to pass a single file containing both '
      'the client certificate and the private key')),
    'ssl_pem_passphrase': ('renamed', 'tlsCertificateKeyFilePassword'),
    'waitqueuemultiple':
    ('removed',
     ('Instead of using waitQueueMultiple to bound queuing, limit the size '
      'of the thread pool in your application server'))
}

# Augment the option validator map with pymongo-specific option information.
URI_OPTIONS_VALIDATOR_MAP.update(NONSPEC_OPTIONS_VALIDATOR_MAP)
for optname, aliases in iteritems(URI_OPTIONS_ALIAS_MAP):
    for alias in aliases:
        if alias not in URI_OPTIONS_VALIDATOR_MAP:
            URI_OPTIONS_VALIDATOR_MAP[alias] = (
                URI_OPTIONS_VALIDATOR_MAP[optname])

# Map containing all URI option and keyword argument validators.
VALIDATORS = URI_OPTIONS_VALIDATOR_MAP.copy()
VALIDATORS.update(KW_VALIDATORS)

# List of timeout-related options.
TIMEOUT_OPTIONS = [
    'connecttimeoutms',
    'heartbeatfrequencyms',
    'maxidletimems',
    'maxstalenessseconds',
    def run_operation(self, sessions, collection, operation):
        name = camel_to_snake(operation['name'])
        if name == 'run_command':
            name = 'command'
        self.transaction_test_debug(name)

        def parse_options(opts):
            if 'readPreference' in opts:
                opts['read_preference'] = parse_read_preference(
                    opts.pop('readPreference'))

            if 'writeConcern' in opts:
                opts['write_concern'] = WriteConcern(
                    **dict(opts.pop('writeConcern')))

            if 'readConcern' in opts:
                opts['read_concern'] = ReadConcern(
                    **dict(opts.pop('readConcern')))
            return opts

        database = collection.database
        collection = database.get_collection(collection.name)
        if 'collectionOptions' in operation:
            collection = collection.with_options(
                **dict(parse_options(operation['collectionOptions'])))

        objects = {'database': database, 'collection': collection}
        objects.update(sessions)
        obj = objects[operation['object']]

        # Combine arguments with options and handle special cases.
        arguments = operation.get('arguments', {})
        arguments.update(arguments.pop("options", {}))
        parse_options(arguments)

        cmd = getattr(obj, name)

        for arg_name in list(arguments):
            c2s = camel_to_snake(arg_name)
            # PyMongo accepts sort as list of tuples. Asserting len=1
            # because ordering dicts from JSON in 2.6 is unwieldy.
            if arg_name == "sort":
                sort_dict = arguments[arg_name]
                assert len(sort_dict) == 1, 'test can only have 1 sort key'
                arguments[arg_name] = list(iteritems(sort_dict))
            # Named "key" instead not fieldName.
            if arg_name == "fieldName":
                arguments["key"] = arguments.pop(arg_name)
            # Aggregate uses "batchSize", while find uses batch_size.
            elif arg_name == "batchSize" and name == "aggregate":
                continue
            # Requires boolean returnDocument.
            elif arg_name == "returnDocument":
                arguments[c2s] = arguments[arg_name] == "After"
            elif c2s == "requests":
                # Parse each request into a bulk write model.
                requests = []
                for request in arguments["requests"]:
                    bulk_model = camel_to_upper_camel(request["name"])
                    bulk_class = getattr(operations, bulk_model)
                    bulk_arguments = camel_to_snake_args(request["arguments"])
                    requests.append(bulk_class(**dict(bulk_arguments)))
                arguments["requests"] = requests
            elif arg_name == "session":
                arguments['session'] = sessions[arguments['session']]
            elif name == 'command' and arg_name == 'command':
                # Ensure the first key is the command name.
                ordered_command = SON([(operation['command_name'], 1)])
                ordered_command.update(arguments['command'])
                arguments['command'] = ordered_command
            else:
                arguments[c2s] = arguments.pop(arg_name)

        result = cmd(**dict(arguments))

        if name == "aggregate":
            if arguments["pipeline"] and "$out" in arguments["pipeline"][-1]:
                # Read from the primary to ensure causal consistency.
                out = collection.database.get_collection(
                    arguments["pipeline"][-1]["$out"],
                    read_preference=ReadPreference.PRIMARY)
                return out.find()

        if isinstance(result, Cursor) or isinstance(result, CommandCursor):
            return list(result)

        return result
    def run_operation(self, sessions, collection, operation):
        name = camel_to_snake(operation['name'])
        if name == 'run_command':
            name = 'command'
        self.transaction_test_debug(name)

        def parse_options(opts):
            if 'readPreference' in opts:
                opts['read_preference'] = parse_read_preference(
                    opts.pop('readPreference'))

            if 'writeConcern' in opts:
                opts['write_concern'] = WriteConcern(
                    **dict(opts.pop('writeConcern')))

            if 'readConcern' in opts:
                opts['read_concern'] = ReadConcern(
                    **dict(opts.pop('readConcern')))
            return opts

        database = collection.database
        collection = database.get_collection(collection.name)
        if 'collectionOptions' in operation:
            collection = collection.with_options(
                **dict(parse_options(operation['collectionOptions'])))

        objects = {'database': database, 'collection': collection}
        objects.update(sessions)
        obj = objects[operation['object']]

        # Combine arguments with options and handle special cases.
        arguments = operation.get('arguments', {})
        arguments.update(arguments.pop("options", {}))
        parse_options(arguments)

        cmd = getattr(obj, name)

        for arg_name in list(arguments):
            c2s = camel_to_snake(arg_name)
            # PyMongo accepts sort as list of tuples.
            if arg_name == "sort":
                sort_dict = arguments[arg_name]
                arguments[arg_name] = list(iteritems(sort_dict))
            # Named "key" instead not fieldName.
            if arg_name == "fieldName":
                arguments["key"] = arguments.pop(arg_name)
            # Aggregate uses "batchSize", while find uses batch_size.
            elif arg_name == "batchSize" and name == "aggregate":
                continue
            # Requires boolean returnDocument.
            elif arg_name == "returnDocument":
                arguments[c2s] = arguments[arg_name] == "After"
            elif c2s == "requests":
                # Parse each request into a bulk write model.
                requests = []
                for request in arguments["requests"]:
                    bulk_model = camel_to_upper_camel(request["name"])
                    bulk_class = getattr(operations, bulk_model)
                    bulk_arguments = camel_to_snake_args(request["arguments"])
                    requests.append(bulk_class(**dict(bulk_arguments)))
                arguments["requests"] = requests
            elif arg_name == "session":
                arguments['session'] = sessions[arguments['session']]
            elif name == 'command' and arg_name == 'command':
                # Ensure the first key is the command name.
                ordered_command = SON([(operation['command_name'], 1)])
                ordered_command.update(arguments['command'])
                arguments['command'] = ordered_command
            else:
                arguments[c2s] = arguments.pop(arg_name)

        result = cmd(**dict(arguments))

        if name == "aggregate":
            if arguments["pipeline"] and "$out" in arguments["pipeline"][-1]:
                # Read from the primary to ensure causal consistency.
                out = collection.database.get_collection(
                    arguments["pipeline"][-1]["$out"],
                    read_preference=ReadPreference.PRIMARY)
                return out.find()

        if isinstance(result, Cursor) or isinstance(result, CommandCursor):
            return list(result)

        return result
示例#30
0
    def authenticate(self,
                     name=None,
                     password=None,
                     source=None,
                     mechanism='DEFAULT',
                     **kwargs):
        """**DEPRECATED**: Authenticate to use this database.

        Authentication lasts for the life of the underlying client
        instance, or until :meth:`logout` is called.

        Raises :class:`TypeError` if (required) `name`, (optional) `password`,
        or (optional) `source` is not an instance of :class:`basestring`
        (:class:`str` in python 3).

        .. note::
          - This method authenticates the current connection, and
            will also cause all new :class:`~socket.socket` connections
            in the underlying client instance to be authenticated automatically.

          - Authenticating more than once on the same database with different
            credentials is not supported. You must call :meth:`logout` before
            authenticating with new credentials.

          - When sharing a client instance between multiple threads, all
            threads will share the authentication. If you need different
            authentication profiles for different purposes you must use
            distinct client instances.

        :Parameters:
          - `name`: the name of the user to authenticate. Optional when
            `mechanism` is MONGODB-X509 and the MongoDB server version is
            >= 3.4.
          - `password` (optional): the password of the user to authenticate.
            Not used with GSSAPI or MONGODB-X509 authentication.
          - `source` (optional): the database to authenticate on. If not
            specified the current database is used.
          - `mechanism` (optional): See
            :data:`~pymongo.auth.MECHANISMS` for options.
            By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
            MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
          - `authMechanismProperties` (optional): Used to specify
            authentication mechanism specific options. To specify the service
            name for GSSAPI authentication pass
            authMechanismProperties='SERVICE_NAME:<service name>'

        .. versionchanged:: 3.5
           Deprecated. Authenticating multiple users conflicts with support for
           logical sessions in MongoDB 3.6. To authenticate as multiple users,
           create multiple instances of MongoClient.

        .. versionadded:: 2.8
           Use SCRAM-SHA-1 with MongoDB 3.0 and later.

        .. versionchanged:: 2.5
           Added the `source` and `mechanism` parameters. :meth:`authenticate`
           now raises a subclass of :class:`~pymongo.errors.PyMongoError` if
           authentication fails due to invalid credentials or configuration
           issues.

        .. mongodoc:: authenticate
        """
        if name is not None and not isinstance(name, string_type):
            raise TypeError("name must be an "
                            "instance of %s" % (string_type.__name__, ))
        if password is not None and not isinstance(password, string_type):
            raise TypeError("password must be an "
                            "instance of %s" % (string_type.__name__, ))
        if source is not None and not isinstance(source, string_type):
            raise TypeError("source must be an "
                            "instance of %s" % (string_type.__name__, ))
        common.validate_auth_mechanism('mechanism', mechanism)

        validated_options = {}
        for option, value in iteritems(kwargs):
            normalized, val = common.validate_auth_option(option, value)
            validated_options[normalized] = val

        credentials = auth._build_credentials_tuple(mechanism, source
                                                    or self.name, name,
                                                    password,
                                                    validated_options)

        self.client._cache_credentials(self.name, credentials, connect=True)

        return True
示例#31
0
    def run_operation(self, sessions, collection, operation):
        original_collection = collection
        name = camel_to_snake(operation['name'])
        if name == 'run_command':
            name = 'command'
        elif name == 'download_by_name':
            name = 'open_download_stream_by_name'
        elif name == 'download':
            name = 'open_download_stream'

        database = collection.database
        collection = database.get_collection(collection.name)
        if 'collectionOptions' in operation:
            collection = collection.with_options(
                **self.parse_options(operation['collectionOptions']))

        object_name = self.get_object_name(operation)
        if object_name == 'gridfsbucket':
            # Only create the GridFSBucket when we need it (for the gridfs
            # retryable reads tests).
            obj = GridFSBucket(database,
                               bucket_name=collection.name,
                               disable_md5=True)
        else:
            objects = {
                'client': database.client,
                'database': database,
                'collection': collection,
                'testRunner': self
            }
            objects.update(sessions)
            obj = objects[object_name]

        # Combine arguments with options and handle special cases.
        arguments = operation.get('arguments', {})
        arguments.update(arguments.pop("options", {}))
        self.parse_options(arguments)

        cmd = getattr(obj, name)

        for arg_name in list(arguments):
            c2s = camel_to_snake(arg_name)
            # PyMongo accepts sort as list of tuples.
            if arg_name == "sort":
                sort_dict = arguments[arg_name]
                arguments[arg_name] = list(iteritems(sort_dict))
            # Named "key" instead not fieldName.
            if arg_name == "fieldName":
                arguments["key"] = arguments.pop(arg_name)
            # Aggregate uses "batchSize", while find uses batch_size.
            elif ((arg_name == "batchSize" or arg_name == "allowDiskUse")
                  and name == "aggregate"):
                continue
            # Requires boolean returnDocument.
            elif arg_name == "returnDocument":
                arguments[c2s] = arguments.pop(arg_name) == "After"
            elif c2s == "requests":
                # Parse each request into a bulk write model.
                requests = []
                for request in arguments["requests"]:
                    bulk_model = camel_to_upper_camel(request["name"])
                    bulk_class = getattr(operations, bulk_model)
                    bulk_arguments = camel_to_snake_args(request["arguments"])
                    requests.append(bulk_class(**dict(bulk_arguments)))
                arguments["requests"] = requests
            elif arg_name == "session":
                arguments['session'] = sessions[arguments['session']]
            elif (name in ('command', 'run_admin_command')
                  and arg_name == 'command'):
                # Ensure the first key is the command name.
                ordered_command = SON([(operation['command_name'], 1)])
                ordered_command.update(arguments['command'])
                arguments['command'] = ordered_command
            elif name == 'open_download_stream' and arg_name == 'id':
                arguments['file_id'] = arguments.pop(arg_name)
            elif name != 'find' and c2s == 'max_time_ms':
                # find is the only method that accepts snake_case max_time_ms.
                # All other methods take kwargs which must use the server's
                # camelCase maxTimeMS. See PYTHON-1855.
                arguments['maxTimeMS'] = arguments.pop('max_time_ms')
            elif name == 'with_transaction' and arg_name == 'callback':
                callback_ops = arguments[arg_name]['operations']
                arguments['callback'] = lambda _: self.run_operations(
                    sessions,
                    original_collection,
                    copy.deepcopy(callback_ops),
                    in_with_transaction=True)
            elif name == 'drop_collection' and arg_name == 'collection':
                arguments['name_or_collection'] = arguments.pop(arg_name)
            elif name == 'create_collection' and arg_name == 'collection':
                arguments['name'] = arguments.pop(arg_name)
            elif name == 'create_index' and arg_name == 'keys':
                arguments['keys'] = list(arguments.pop(arg_name).items())
            elif name == 'drop_index' and arg_name == 'name':
                arguments['index_or_name'] = arguments.pop(arg_name)
            else:
                arguments[c2s] = arguments.pop(arg_name)

        if name == 'run_on_thread':
            args = {'sessions': sessions, 'collection': collection}
            args.update(arguments)
            arguments = args
        result = cmd(**dict(arguments))

        if name == "aggregate":
            if arguments["pipeline"] and "$out" in arguments["pipeline"][-1]:
                # Read from the primary to ensure causal consistency.
                out = collection.database.get_collection(
                    arguments["pipeline"][-1]["$out"],
                    read_preference=ReadPreference.PRIMARY)
                return out.find()
        if name == "map_reduce":
            if isinstance(result, dict) and 'results' in result:
                return result['results']
        if 'download' in name:
            result = Binary(result.read())

        if isinstance(result, Cursor) or isinstance(result, CommandCursor):
            return list(result)

        return result
示例#32
0
def _encode_mapping(name, value, check_keys, opts):
    """Encode a mapping type."""
    data = b"".join([_element_to_bson(key, val, check_keys, opts)
                     for key, val in iteritems(value)])
    return b"\x03" + name + _PACK_INT(len(data) + 5) + data + b"\x00"
示例#33
0
    def run_operation(self, sessions, collection, operation):
        session = None
        name = camel_to_snake(operation['name'])
        self.transaction_test_debug(name)
        session_name = operation['arguments'].pop('session', None)
        if session_name:
            session = sessions[session_name]

        # Combine arguments with options and handle special cases.
        arguments = operation['arguments']
        arguments.update(arguments.pop("options", {}))
        pref = write_c = read_c = None
        if 'readPreference' in arguments:
            pref = parse_read_preference(arguments.pop('readPreference'))

        if 'writeConcern' in arguments:
            write_c = WriteConcern(**dict(arguments.pop('writeConcern')))

        if 'readConcern' in arguments:
            read_c = ReadConcern(**dict(arguments.pop('readConcern')))

        if name == 'start_transaction':
            cmd = partial(session.start_transaction,
                          write_concern=write_c,
                          read_concern=read_c,
                          read_preference=pref)
        elif name in ('commit_transaction', 'abort_transaction'):
            cmd = getattr(session, name)
        else:
            collection = collection.with_options(write_concern=write_c,
                                                 read_concern=read_c,
                                                 read_preference=pref)

            cmd = getattr(collection, name)
            arguments['session'] = session

        for arg_name in list(arguments):
            c2s = camel_to_snake(arg_name)
            # PyMongo accepts sort as list of tuples. Asserting len=1
            # because ordering dicts from JSON in 2.6 is unwieldy.
            if arg_name == "sort":
                sort_dict = arguments[arg_name]
                assert len(sort_dict) == 1, 'test can only have 1 sort key'
                arguments[arg_name] = list(iteritems(sort_dict))
            # Named "key" instead not fieldName.
            if arg_name == "fieldName":
                arguments["key"] = arguments.pop(arg_name)
            # Aggregate uses "batchSize", while find uses batch_size.
            elif arg_name == "batchSize" and name == "aggregate":
                continue
            # Requires boolean returnDocument.
            elif arg_name == "returnDocument":
                arguments[c2s] = arguments[arg_name] == "After"
            elif c2s == "requests":
                # Parse each request into a bulk write model.
                requests = []
                for request in arguments["requests"]:
                    bulk_model = camel_to_upper_camel(request["name"])
                    bulk_class = getattr(operations, bulk_model)
                    bulk_arguments = camel_to_snake_args(request["arguments"])
                    requests.append(bulk_class(**dict(bulk_arguments)))
                arguments["requests"] = requests
            else:
                arguments[c2s] = arguments.pop(arg_name)

        result = cmd(**dict(arguments))

        if name == "aggregate":
            if arguments["pipeline"] and "$out" in arguments["pipeline"][-1]:
                # Read from the primary to ensure causal consistency.
                out = collection.database.get_collection(
                    arguments["pipeline"][-1]["$out"],
                    read_preference=ReadPreference.PRIMARY)
                return out.find()

        if isinstance(result, Cursor) or isinstance(result, CommandCursor):
            return list(result)

        return result
示例#34
0
 def items(self):
     """Lazily decode and iterate elements in this document."""
     return iteritems(self.__inflated)
示例#35
0
    def validate_collection(self,
                            name_or_collection,
                            scandata=False,
                            full=False):
        """Validate a collection.

        Returns a dict of validation info. Raises CollectionInvalid if
        validation fails.

        With MongoDB < 1.9 the result dict will include a `result` key
        with a string value that represents the validation results. With
        MongoDB >= 1.9 the `result` key no longer exists and the results
        are split into individual fields in the result dict.

        :Parameters:
          - `name_or_collection`: A Collection object or the name of a
            collection to validate.
          - `scandata`: Do extra checks beyond checking the overall
            structure of the collection.
          - `full`: Have the server do a more thorough scan of the
            collection. Use with `scandata` for a thorough scan
            of the structure of the collection and the individual
            documents. Ignored in MongoDB versions before 1.9.
        """
        name = name_or_collection
        if isinstance(name, Collection):
            name = name.name

        if not isinstance(name, string_type):
            raise TypeError("name_or_collection must be an instance of "
                            "%s or Collection" % (string_type.__name__, ))

        result = self.command("validate",
                              _unicode(name),
                              scandata=scandata,
                              full=full)

        valid = True
        # Pre 1.9 results
        if "result" in result:
            info = result["result"]
            if info.find("exception") != -1 or info.find("corrupt") != -1:
                raise CollectionInvalid("%s invalid: %s" % (name, info))
        # Sharded results
        elif "raw" in result:
            for _, res in iteritems(result["raw"]):
                if "result" in res:
                    info = res["result"]
                    if (info.find("exception") != -1
                            or info.find("corrupt") != -1):
                        raise CollectionInvalid("%s invalid: "
                                                "%s" % (name, info))
                elif not res.get("valid", False):
                    valid = False
                    break
        # Post 1.9 non-sharded results.
        elif not result.get("valid", False):
            valid = False

        if not valid:
            raise CollectionInvalid("%s invalid: %r" % (name, result))

        return result
    def _create_entity(self, entity_spec):
        if len(entity_spec) != 1:
            self._test_class.fail(
                "Entity spec %s did not contain exactly one top-level key" %
                (entity_spec, ))

        entity_type, spec = next(iteritems(entity_spec))
        if entity_type == 'client':
            kwargs = {}
            observe_events = spec.get('observeEvents', [])
            ignore_commands = spec.get('ignoreCommandMonitoringEvents', [])
            if len(observe_events) or len(ignore_commands):
                ignore_commands = [cmd.lower() for cmd in ignore_commands]
                listener = EventListenerUtil(observe_events, ignore_commands)
                self._listeners[spec['id']] = listener
                kwargs['event_listeners'] = [listener]
            if client_context.is_mongos and spec.get('useMultipleMongoses'):
                kwargs['h'] = client_context.mongos_seeds()
            kwargs.update(spec.get('uriOptions', {}))
            server_api = spec.get('serverApi')
            if server_api:
                kwargs['server_api'] = ServerApi(
                    server_api['version'],
                    strict=server_api.get('strict'),
                    deprecation_errors=server_api.get('deprecationErrors'))
            client = rs_or_single_client(**kwargs)
            self[spec['id']] = client
            self._test_class.addCleanup(client.close)
            return
        elif entity_type == 'database':
            client = self[spec['client']]
            if not isinstance(client, MongoClient):
                self._test_class.fail(
                    'Expected entity %s to be of type MongoClient, got %s' %
                    (spec['client'], type(client)))
            options = parse_collection_or_database_options(
                spec.get('databaseOptions', {}))
            self[spec['id']] = client.get_database(spec['databaseName'],
                                                   **options)
            return
        elif entity_type == 'collection':
            database = self[spec['database']]
            if not isinstance(database, Database):
                self._test_class.fail(
                    'Expected entity %s to be of type Database, got %s' %
                    (spec['database'], type(database)))
            options = parse_collection_or_database_options(
                spec.get('collectionOptions', {}))
            self[spec['id']] = database.get_collection(spec['collectionName'],
                                                       **options)
            return
        elif entity_type == 'session':
            client = self[spec['client']]
            if not isinstance(client, MongoClient):
                self._test_class.fail(
                    'Expected entity %s to be of type MongoClient, got %s' %
                    (spec['client'], type(client)))
            opts = camel_to_snake_args(spec.get('sessionOptions', {}))
            if 'default_transaction_options' in opts:
                txn_opts = parse_spec_options(
                    opts['default_transaction_options'])
                txn_opts = TransactionOptions(**txn_opts)
                opts = copy.deepcopy(opts)
                opts['default_transaction_options'] = txn_opts
            session = client.start_session(**dict(opts))
            self[spec['id']] = session
            self._session_lsids[spec['id']] = copy.deepcopy(session.session_id)
            self._test_class.addCleanup(session.end_session)
            return
        elif entity_type == 'bucket':
            # TODO: implement the 'bucket' entity type
            self._test_class.skipTest(
                'GridFS is not currently supported (PYTHON-2459)')
        self._test_class.fail('Unable to create entity of unknown type %s' %
                              (entity_type, ))
示例#37
0
# Map from deprecated URI option names to the updated option names.
# Case is preserved for updated option names as they are part of user warnings.
URI_OPTIONS_DEPRECATION_MAP = {
    'j': 'journal',
    'wtimeout': 'wTimeoutMS',
    'ssl_cert_reqs': 'tlsAllowInvalidCertificates',
    'ssl_match_hostname': 'tlsAllowInvalidHostnames',
    'ssl_crlfile': 'tlsCRLFile',
    'ssl_ca_certs': 'tlsCAFile',
    'ssl_pem_passphrase': 'tlsCertificateKeyFilePassword',
}

# Augment the option validator map with pymongo-specific option information.
URI_OPTIONS_VALIDATOR_MAP.update(NONSPEC_OPTIONS_VALIDATOR_MAP)
for optname, aliases in iteritems(URI_OPTIONS_ALIAS_MAP):
    for alias in aliases:
        if alias not in URI_OPTIONS_VALIDATOR_MAP:
            URI_OPTIONS_VALIDATOR_MAP[alias] = (
                URI_OPTIONS_VALIDATOR_MAP[optname])

# Map containing all URI option and keyword argument validators.
VALIDATORS = URI_OPTIONS_VALIDATOR_MAP.copy()
VALIDATORS.update(KW_VALIDATORS)

# List of timeout-related options.
TIMEOUT_OPTIONS = [
    'connecttimeoutms',
    'heartbeatfrequencyms',
    'maxidletimems',
    'maxstalenessseconds',
示例#38
0
def prepare_spec_arguments(spec, arguments, opname, entity_map,
                           with_txn_callback):
    for arg_name in list(arguments):
        c2s = camel_to_snake(arg_name)
        # PyMongo accepts sort as list of tuples.
        if arg_name == "sort":
            sort_dict = arguments[arg_name]
            arguments[arg_name] = list(iteritems(sort_dict))
        # Named "key" instead not fieldName.
        if arg_name == "fieldName":
            arguments["key"] = arguments.pop(arg_name)
        # Aggregate uses "batchSize", while find uses batch_size.
        elif ((arg_name == "batchSize" or arg_name == "allowDiskUse")
              and opname == "aggregate"):
            continue
        # Requires boolean returnDocument.
        elif arg_name == "returnDocument":
            arguments[c2s] = getattr(ReturnDocument,
                                     arguments.pop(arg_name).upper())
        elif c2s == "requests":
            # Parse each request into a bulk write model.
            requests = []
            for request in arguments["requests"]:
                if 'name' in request:
                    # CRUD v2 format
                    bulk_model = camel_to_upper_camel(request["name"])
                    bulk_class = getattr(operations, bulk_model)
                    bulk_arguments = camel_to_snake_args(request["arguments"])
                else:
                    # Unified test format
                    bulk_model, spec = next(iteritems(request))
                    bulk_class = getattr(operations,
                                         camel_to_upper_camel(bulk_model))
                    bulk_arguments = camel_to_snake_args(spec)
                requests.append(bulk_class(**dict(bulk_arguments)))
            arguments["requests"] = requests
        elif arg_name == "session":
            arguments['session'] = entity_map[arguments['session']]
        elif (opname in ('command', 'run_admin_command')
              and arg_name == 'command'):
            # Ensure the first key is the command name.
            ordered_command = SON([(spec['command_name'], 1)])
            ordered_command.update(arguments['command'])
            arguments['command'] = ordered_command
        elif opname == 'open_download_stream' and arg_name == 'id':
            arguments['file_id'] = arguments.pop(arg_name)
        elif opname != 'find' and c2s == 'max_time_ms':
            # find is the only method that accepts snake_case max_time_ms.
            # All other methods take kwargs which must use the server's
            # camelCase maxTimeMS. See PYTHON-1855.
            arguments['maxTimeMS'] = arguments.pop('max_time_ms')
        elif opname == 'with_transaction' and arg_name == 'callback':
            if 'operations' in arguments[arg_name]:
                # CRUD v2 format
                callback_ops = arguments[arg_name]['operations']
            else:
                # Unified test format
                callback_ops = arguments[arg_name]
            arguments['callback'] = lambda _: with_txn_callback(
                copy.deepcopy(callback_ops))
        elif opname == 'drop_collection' and arg_name == 'collection':
            arguments['name_or_collection'] = arguments.pop(arg_name)
        elif opname == 'create_collection' and arg_name == 'collection':
            arguments['name'] = arguments.pop(arg_name)
        elif opname == 'create_index' and arg_name == 'keys':
            arguments['keys'] = list(arguments.pop(arg_name).items())
        elif opname == 'drop_index' and arg_name == 'name':
            arguments['index_or_name'] = arguments.pop(arg_name)
        else:
            arguments[c2s] = arguments.pop(arg_name)
示例#39
0
    def authenticate(self, name, password=None,
                     source=None, mechanism='DEFAULT', **kwargs):
        """Authenticate to use this database.

        Authentication lasts for the life of the underlying client
        instance, or until :meth:`logout` is called.

        Raises :class:`TypeError` if (required) `name`, (optional) `password`,
        or (optional) `source` is not an instance of :class:`basestring`
        (:class:`str` in python 3).

        .. note::
          - This method authenticates the current connection, and
            will also cause all new :class:`~socket.socket` connections
            in the underlying client instance to be authenticated automatically.

          - Authenticating more than once on the same database with different
            credentials is not supported. You must call :meth:`logout` before
            authenticating with new credentials.

          - When sharing a client instance between multiple threads, all
            threads will share the authentication. If you need different
            authentication profiles for different purposes you must use
            distinct client instances.

        :Parameters:
          - `name`: the name of the user to authenticate.
          - `password` (optional): the password of the user to authenticate.
            Not used with GSSAPI or MONGODB-X509 authentication.
          - `source` (optional): the database to authenticate on. If not
            specified the current database is used.
          - `mechanism` (optional): See
            :data:`~pymongo.auth.MECHANISMS` for options.
            By default, use SCRAM-SHA-1 with MongoDB 3.0 and later,
            MONGODB-CR (MongoDB Challenge Response protocol) for older servers.
          - `authMechanismProperties` (optional): Used to specify
            authentication mechanism specific options. To specify the service
            name for GSSAPI authentication pass
            authMechanismProperties='SERVICE_NAME:<service name>'

        .. versionadded:: 2.8
           Use SCRAM-SHA-1 with MongoDB 3.0 and later.

        .. versionchanged:: 2.5
           Added the `source` and `mechanism` parameters. :meth:`authenticate`
           now raises a subclass of :class:`~pymongo.errors.PyMongoError` if
           authentication fails due to invalid credentials or configuration
           issues.

        .. mongodoc:: authenticate
        """
        if not isinstance(name, string_type):
            raise TypeError("name must be an "
                            "instance of %s" % (string_type.__name__,))
        if password is not None and not isinstance(password, string_type):
            raise TypeError("password must be an "
                            "instance of %s" % (string_type.__name__,))
        if source is not None and not isinstance(source, string_type):
            raise TypeError("source must be an "
                            "instance of %s" % (string_type.__name__,))
        common.validate_auth_mechanism('mechanism', mechanism)

        validated_options = {}
        for option, value in iteritems(kwargs):
            normalized, val = common.validate_auth_option(option, value)
            validated_options[normalized] = val

        credentials = auth._build_credentials_tuple(
            mechanism,
            source or self.name,
            name,
            password,
            validated_options)

        self.client._cache_credentials(
            self.name,
            credentials,
            connect=True)

        return True
示例#40
0
文件: field.py 项目: kfiri/mongoit
 def operation(self, **operations):
     """Get the object that represent the `operations` in a Mongo query.
     """
     return {('$' + operation): value
             for operation, value in iteritems(operations)}
示例#41
0
 def items(self):
     """Lazily decode and iterate elements in this document."""
     return iteritems(self.__inflated)