Example #1
0
    def test_GridFsIndexesCreation(self):
        """ Tests gridfs indexes creation"""
        conn = yield txmongo.MongoConnection(mongo_host, mongo_port)
        db = conn.test
        yield self._drop_gridfs(db)

        # Create a new GridFS instance should trigger indexes creation
        gfs = GridFS(db)

        # Multiple calls should return multiple defer not to mix between them
        self.assertNotEqual(gfs.indexes_created(), gfs.indexes_created())

        yield gfs.indexes_created()

        indexes = yield db.fs.files.index_information()
        self.assertTrue(
            any(key["key"] == SON([("filename", 1), ("uploadDate", 1)])
                for key in indexes.values()))

        indexes = yield db.fs.chunks.index_information()
        self.assertTrue(
            any(key["key"] == SON([("files_id", 1), ("n", 1)])
                for key in indexes.values()))

        yield conn.disconnect()
Example #2
0
def test_document_from_ip_address():
    streamer = Streamer('test_document_from_ip_address')
    host = 'http://list-iptv.com'
    url1 = 'http://62.210.245.19:8000/live/testapp/testapp/2.ts'
    url2 = 'http://clientportalpro.com:2500/live/VE5DWv4Ait/7KHLqRRZ9E/2160.ts'
    url3 = 'http://ndasat.pro:8000/live/exch/exch/1227.ts'
    for url in [url1, url2, url3]:
        netloc = um.prepare_netloc(url)
        ip_addresses = socket.gethostbyname_ex(um.remove_schema(netloc))[2]
        for ip_address in ip_addresses:
            data = {
                'ip_address':
                ip_address,
                'network_locations': [
                    SON([('network_location', netloc), ('linked_by', [host]),
                         ('working_link', True)])
                ]
            }
            streamer.collection().insert(data)
            doc = streamer.document_from_ip_address(ip_address)
            assert doc['ip_address'] == ip_address
            assert doc['network_locations'] == [
                SON([('network_location', netloc), ('linked_by', [host]),
                     ('working_link', True)])
            ]

    streamer.delete()
Example #3
0
 def delete(self, user='******'):
     history_collection = DB.handle[self.record_type + '_history']
     record_history = history_collection.find_one({'_id': self.id}) or SON()
     record_history['deleted'] = SON({'user': user, 'time': datetime.utcnow()})
     history_collection.replace_one({'_id': self.id}, record_history, upsert=True)    
         
     return type(self).handle().delete_one({'_id': self.id})
Example #4
0
    def test_delta_for_dynamic_documents(self):
        class Person(DynamicDocument):
            name = StringField()
            meta = {'allow_inheritance': True}

        Person.drop_collection()

        p = Person(name="James", age=34)
        self.assertEqual(p._delta(), (
            SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))

        p.doc = 123
        del(p.doc)
        self.assertEqual(p._delta(), (
            SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {}))

        p = Person()
        p.name = "Dean"
        p.age = 22
        p.save()

        p.age = 24
        self.assertEqual(p.age, 24)
        self.assertEqual(p._get_changed_fields(), ['age'])
        self.assertEqual(p._delta(), ({'age': 24}, {}))

        p = Person.objects(age=22).get()
        p.age = 24
        self.assertEqual(p.age, 24)
        self.assertEqual(p._get_changed_fields(), ['age'])
        self.assertEqual(p._delta(), ({'age': 24}, {}))

        p.save()
        self.assertEqual(1, Person.objects(age=24).count())
Example #5
0
def get_punctuation(db, collection, city):
    df = pd.DataFrame([[d['name'], d['number_of_employees'], d['loc']]
                       for d in db[collection].find({'city': city})],
                      columns=['name', 'employees', 'loc'])
    distances = {'airports': 30000, 'restaurants': 1000, 'starbucks': 500}
    data = {
        'airports': src.yelp.get_airports(city),
        'restaurants': src.yelp.get_vegan_restaurants(city),
        'starbucks': src.yelp.get_starbucks(city)
    }
    for kind, listing in data.items():
        df[kind] = 0
        for element in listing:
            results = db[collection].find({
                'loc': {
                    '$near':
                    SON([('$geometry',
                          SON([('type', 'Point'),
                               ('coordinates', [
                                   element['coordinates']['longitude'],
                                   element['coordinates']['latitude']
                               ])])), ('$maxDistance', distances[kind])])
                }
            })
            for r in results:
                df.at[df.loc[df['name'] == r['name']].index, kind] = 1
    df = df.astype({
        'airports': 'int32',
        'starbucks': 'int32',
        'restaurants': 'int32'
    })
    df['total'] = df.iloc[:, -3:-1].sum(axis=1)
    return df
Example #6
0
    def authenticate_scram_sha1(self, database_name, username, password):
        # Totally stolen from pymongo.auth
        user = username.replace('=', "=3D").replace(',', "=2C")
        nonce = base64.standard_b64encode(
            str(SystemRandom().random()).encode('ascii'))[2:]
        first_bare = "n={0},r={1}".format(user, nonce.decode()).encode('ascii')

        cmd = SON([("saslStart", 1), ("mechanism", "SCRAM-SHA-1"),
                   ("autoAuthorize", 1),
                   ("payload", Binary(b"n,," + first_bare))])
        result = yield self.__run_command(database_name, cmd)

        server_first = result["payload"]
        parsed = auth._parse_scram_response(server_first)
        iterations = int(parsed[b'i'])
        salt = parsed[b's']
        rnonce = parsed[b'r']
        if not rnonce.startswith(nonce):
            raise MongoAuthenticationError(
                "TxMongo: server returned an invalid nonce.")

        without_proof = b"c=biws,r=" + rnonce
        salted_pass = auth._hi(
            auth._password_digest(username, password).encode("utf-8"),
            base64.standard_b64decode(salt), iterations)
        client_key = hmac.HMAC(salted_pass, b"Client Key", sha1).digest()
        stored_key = sha1(client_key).digest()
        auth_msg = b','.join((first_bare, server_first, without_proof))
        client_sig = hmac.HMAC(stored_key, auth_msg, sha1).digest()
        client_proof = b"p=" + base64.standard_b64encode(
            auth._xor(client_key, client_sig))
        client_final = b','.join((without_proof, client_proof))

        server_key = hmac.HMAC(salted_pass, b"Server Key", sha1).digest()
        server_sig = base64.standard_b64encode(
            hmac.HMAC(server_key, auth_msg, sha1).digest())

        cmd = SON([("saslContinue", 1),
                   ("conversationId", result["conversationId"]),
                   ("payload", Binary(client_final))])
        result = yield self.__run_command(database_name, cmd)

        if not result["ok"]:
            raise MongoAuthenticationError("TxMongo: authentication failed.")

        parsed = auth._parse_scram_response(result["payload"])
        if parsed[b'v'] != server_sig:
            raise MongoAuthenticationError(
                "TxMongo: server returned an invalid signature.")

        # Depending on how it's configured, Cyrus SASL (which the server uses)
        # requires a third empty challenge.
        if not result["done"]:
            cmd = SON([("saslContinue", 1),
                       ("conversationId", result["conversationId"]),
                       ("payload", Binary(b''))])
            result = yield self.__run_command(database_name, cmd)
            if not result["done"]:
                raise MongoAuthenticationError(
                    "TxMongo: SASL conversation failed to complete.")
Example #7
0
def pharmacy_search():
    if check_root != 2:
        return render_template('error.html', id="로컬이 아닌 잘못된 접근입니다.")

    client = MyMongoClient("kim_db", "pharmacy")
    client2 = MyMongoClient("kim_db", "modify_pharmacy")
    modify_pharmacy = client2.get_collection()
    cursor = client.get_collection().find()
    for cur in cursor:
        if cur.get('경도') == None:
            continue
        phar_loc = {'type': 'Point'}
        phar_loc['coordinates'] = [float(cur.get('경도')), float(cur.get('위도'))]
        bookJson = {'pharmacyName': cur.get('약국명'), 'location': phar_loc}
        modify_pharmacy.insert_one(bookJson)
    modify_pharmacy.create_index([('location', GEOSPHERE)])
    query = {
        'location': {
            '$near':
            SON([('$geometry',
                  SON([('type', 'Point'),
                       ('coordinates', [128.62707, 35.88107])])),
                 ('$maxDistance', 1000)])
        }
    }
    pharmacy = modify_pharmacy.find(query)

    return render_template('hkit_pharmacy.html', pharmacy=pharmacy)
Example #8
0
def make_pipeline(criterions):
    if not criterions:
        return [
            {
                '$match': {'skip': {'$exists': False}}
            },
            {
                '$limit': 10
            }
        ]
    return [
        {
            '$match': {'skip': {'$exists': False}}
        },
        {
            '$project': SON(
                [(name, chose_project_method(name, value))
                    for name, value in criterions] + [('id', 1), ('domain', 1)]
                    )
        },
        {
            '$sort': SON(
                [(name, -1) for name, value in criterions]
                )
        },
        {
            '$limit': 10
        }
    ]
Example #9
0
async def generateRandomPlay(db, state):
    now = str(datetime.datetime.now())
    if len(state["cards_left"]) < 2:
        # Start a new round!
        if state["round"] == state["board"]["numRounds"] - 1:
            # end of game
            state["in_progress"] = False
            state["time_ended"] = now
            state["last_updated"] = now
            await db.games.update_one({"_id": state["_id"]}, SON([
                ("$set", SON([("last_updated", state["last_updated"]), \
                ("in_progress", state["in_progress"]), \
                ("time_ended", state["time_ended"]), ]))]))
            return False
        state["round"] = state["round"] + 1
        state["cards_left"] = state["cards_list"].copy()
        for player in state["players"]:
            prev_score = player["score"]["target_rounds"][state["round"] - 1]
            player["score"]["target_rounds"].append(prev_score)

    newPlay = random.sample(state["cards_left"], 2)
    state["cards_left"].remove(newPlay[0])
    state["cards_left"].remove(newPlay[1])
    state["plays"].append(newPlay)
    state["last_updated"] = now
    await db.games.update_one({"_id": state["_id"]}, SON([("$push", SON([("plays", newPlay)])), \
        ("$set", SON([("last_updated", state["last_updated"]), \
        ("cards_left", state["cards_left"]), \
        ("round", state["round"]), \
        ("players", state["players"])]))]))
    return True
Example #10
0
    def test_forward__if_name_is_set_and_changed_and_field_spec_also_changed__should_create_index(
            self, test_db, left_schema):
        fields1 = [('field2', pymongo.ASCENDING)]
        fields2 = [('field1', pymongo.ASCENDING),
                   ('field2', pymongo.DESCENDING)]
        test_db['document1'].create_index(fields1,
                                          name='index_old',
                                          sparse=False)
        action = AlterIndex('Document1',
                            'index2',
                            fields=fields2,
                            name='index2',
                            sparse=True)
        action.prepare(test_db, left_schema, MigrationPolicy.strict)

        action.run_forward()

        indexes1 = [
            x for x in test_db['document1'].list_indexes()
            if x['key'] == SON(fields1)
        ]
        assert len(indexes1) == 1
        assert indexes1[0]['sparse'] is False
        assert indexes1[0]['name'] == 'index_old'
        indexes2 = [
            x for x in test_db['document1'].list_indexes()
            if x['key'] == SON(fields2)
        ]
        assert len(indexes2) == 1
        assert indexes2[0]['name'] == 'index2'
        assert indexes2[0]['sparse'] is True
Example #11
0
def _encode_binary(data, subtype, json_options):
    if json_options.json_mode == JSONMode.LEGACY:
        return SON([
            ('$binary', base64.b64encode(data).decode()),
            ('$type', "%02x" % subtype)])
    return {'$binary': SON([
        ('base64', base64.b64encode(data).decode()),
        ('subType', "%02x" % subtype)])}
Example #12
0
 def nearest_neighbors_query(cls, coordinate_field, lat, lon):
     return {
         coordinate_field:
         SON([("$near", {
             "$geometry":
             SON([("type", "Point"), ("coordinates", [lon, lat])])
         })])
     }
Example #13
0
def default(obj):
    # We preserve key order when rendering SON, DBRef, etc. as JSON by
    # returning a SON for those types instead of a dict. This works with
    # the "json" standard library in Python 2.6+ and with simplejson
    # 2.1.0+ in Python 2.5+, because those libraries iterate the SON
    # using PyIter_Next. Python 2.4 must use simplejson 2.0.9 or older,
    # and those versions of simplejson use the lower-level PyDict_Next,
    # which bypasses SON's order-preserving iteration, so we lose key
    # order in Python 2.4.
    if isinstance(obj, ObjectId):
        return {"$oid": str(obj)}
    if isinstance(obj, DBRef):
        return _json_convert(obj.as_doc())
    if isinstance(obj, datetime.datetime):
        # TODO share this code w/ bson.py?
        if obj.utcoffset() is not None:
            obj = obj - obj.utcoffset()
        millis = int(calendar.timegm(obj.timetuple()) * 1000 +
                     obj.microsecond / 1000)
        return {"$date": millis}
    if isinstance(obj, (RE_TYPE, Regex)):
        flags = ""
        if obj.flags & re.IGNORECASE:
            flags += "i"
        if obj.flags & re.LOCALE:
            flags += "l"
        if obj.flags & re.MULTILINE:
            flags += "m"
        if obj.flags & re.DOTALL:
            flags += "s"
        if obj.flags & re.UNICODE:
            flags += "u"
        if obj.flags & re.VERBOSE:
            flags += "x"
        if isinstance(obj.pattern, str):
            pattern = obj.pattern
        else:
            pattern = obj.pattern.decode('utf-8')
        return SON([("$regex", pattern), ("$options", flags)])
    if isinstance(obj, MinKey):
        return {"$minKey": 1}
    if isinstance(obj, MaxKey):
        return {"$maxKey": 1}
    if isinstance(obj, Timestamp):
        return SON([("t", obj.time), ("i", obj.inc)])
    if isinstance(obj, Code):
        return SON([('$code', str(obj)), ('$scope', obj.scope)])
    if isinstance(obj, Binary):
        return SON([
            ('$binary', base64.b64encode(obj).decode()),
            ('$type', "%02x" % obj.subtype)])
    if PY3 and isinstance(obj, binary_type):
        return SON([
            ('$binary', base64.b64encode(obj).decode()),
            ('$type', "00")])
    if bson.has_uuid() and isinstance(obj, bson.uuid.UUID):
        return {"$uuid": obj.hex}
    raise TypeError("%r is not JSON serializable" % obj)
Example #14
0
def MRsimple(collection,
             FunMap,
             FunReduce=None,
             query={},
             out={"replace": 'mr_tmp'},
             finalize=None,
             scope={},
             sort=None,
             jsMode=False,
             verbose=1):
    """ simplified generic Map Reduce
        see: http://docs.mongodb.org/manual/reference/method/db.collection.mapReduce/
        returns (MR response object, results collection or results list if out={"inline":1})
        Reduce function defaults to one that increments value count
        optimize by sorting on emit fields
        see: http://edgystuff.tumblr.com/post/7624019777/optimizing-map-reduce-with-mongodb
        docs.mongodb.org/manual/reference/method/db.collection.mapReduce/#db.collection.mapReduce
        sort      i.e: sort= { "_id":1 }
        jsMode    should be False if we expect more than 500K dictinct ids
    """
    if len(out.viewkeys()) > 1:
        command = MRCommand_(out)
        out = SON([(command, out[command]), ('db', out.get('db')),
                   ('nonAtomic', out.get('nonAtomic', False))])
        #nonAtomic not allowed on replace
    FunMap = Code(FunMap, {})
    if FunReduce is None:
        FunReduce = u"""function (key, values) {var total = 0; for (var i = 0;
                        i < values.length; i++) { total += values[i]; } return total;}
                     """
    FunReduce = Code(FunReduce, {})
    if verbose > 2:
        print "Start MRsimple collection = %s"\
              "query = %s\nMap=\n%s\nReduce=\n%s\nFinalize=%s\nscope=%s sort=%s" \
              % tuple(map(str, (out, query, FunMap, FunReduce, finalize, scope, sort)))
    if sort:
        sort = SON(sort)
    r = collection.map_reduce(FunMap,
                              FunReduce,
                              out=out,
                              query=query,
                              finalize=finalize,
                              scope=scope,
                              sort=sort,
                              full_response=True)
    if verbose > 1:
        print  "End MRsimple collection=%s, query=%s\nresulsts=\n %s"\
                % (collection.name, str(query), str(r))
    if 'db' in out.viewkeys():
        #@note:  can be dict or SON, either way it has property viewkeys
        results = collection.database.connection[r['result']['db']][
            r['result']['collection']]
    else:
        results = r['results'] if out.keys(
        )[0] == 'inline' else collection.database[r['result']]
        #@note:  results is a list if inline else a collection
    return r, results
Example #15
0
 def test_sort(self):
     self.assertEqual(pql.sort('a'),
                      [{
                          '$sort': SON([('a', pymongo.ASCENDING)])
                      }])
     self.assertEqual(pql.sort(['a', '-b', '+c']), [{
         '$sort':
         SON([('a', pymongo.ASCENDING), ('b', pymongo.DESCENDING),
              ('c', pymongo.ASCENDING)])
     }])
Example #16
0
async def startGame(db, state):
    state["in_progress"] = True
    now = str(datetime.datetime.now())
    state["last_updated"] = now

    await db.games.update_one({"_id": state["_id"]},
                              SON([("$set",
                                    SON([("in_progress", True),
                                         ("last_updated",
                                          state["last_updated"])]))]))
    await generateRandomPlay(db, state)
Example #17
0
def map_response_data_to_census():
    response_census = mongo_db.response_census

    x = response_census.find({
        "loc":
        SON([({
            '$geoWithin': {
                '$geometry': SON([("type", "Polygon"), ("coordinates", 'loc')])
            }
        })])
    })
    return x
Example #18
0
def _make_bson_doc(uid: str, df: pd.DataFrame, metadata) -> SON:
    """
    Takes a DataFrame and makes a BSON document ready to be inserted
    into MongoDB. Given Conritick's focus on timeseries data, the input
    DataFrame index must be a DatetimeIndex.
    Column names are kept and saved as strings.
    Index name is explicitly discarded and not saved.

    :param uid: Unique ID for the timeseries represented by the input DataFrame
    :param df: Input DataFrame
    :param metadata: Any BSON-able objects to be attached to document as metadata
    :return: BSON document
    """
    mem_usage = df.memory_usage().sum()
    df = df.sort_index(ascending=True)

    if df.index.tzinfo is None:
        if not all(ix.time() == datetime.time(0, 0) for ix in df.index[:100]):
            # Issue warning only if DataFrame doesn't look like EOD based.
            warnings.warn('DatetimeIndex is timezone-naive. Assuming to be in UTC.')
        offset = None
    else:
        offset = df.index.tzinfo._utcoffset.total_seconds() / 60

    # Remove invalid MongoDB field characters
    df = df.rename(columns=lambda x: re.sub('\.', '', str(x)))
    index = _make_bson_column(df.index)
    columns = SON()
    for col in df.columns:
        columns[col] = _make_bson_column(df[col])

    nrows = len(df)
    binary_size = sum([columns[col]['size'] for col in df.columns])
    binary_size += index['size']
    compression_ratio = binary_size / mem_usage
    if binary_size > 0.95 * MAX_BSON_SIZE:
        msg = f'Binary data size is too large ({binary_size:,} / {compression_ratio:.1%})'
        raise InvalidBSON(msg, compression_ratio)
    logger.debug(f'{uid} document: {binary_size:,} bytes ({compression_ratio:.1%}), {nrows} rows')
    add_meta = {'nrows': nrows, 'binary_size': binary_size, 'utc_offset': offset}
    metadata = {**metadata, **add_meta}

    doc = SON([
        ('uid', uid),
        ('start', df.index[0]),
        ('end', df.index[-1]),
        ('metadata', metadata),
        ('index', index),
        ('columns', columns)])

    return doc
Example #19
0
async def msg_submit_form(request):
    form = await request.post()  #get the form
    text = form['msg_input']  # get the form data

    output_result.append(text)  # store the previous data in an array

    db = request.app['db']  # request the db
    try:
        #check if the input from form matches any of the data in db
        msg_db = await db.command(
            SON([("distinct", "collection"), ("key", "msg"),
                 ("query", {
                     "msg": text
                 })]))
        try:
            msg_db = msg_db['values'][0]

        except Exception as e:
            print(e, 1)
            result = "can you say it again"
            return {"result": result, "output_result": output_result}

        if msg_db == text:
            try:
                # if it matches, retrieve the required data from the collection
                option1 = await db.command(
                    SON([("distinct", "collection"), ("key", "option1"),
                         ("query", {
                             "msg": text
                         })]))
                option2 = await db.command(
                    SON([("distinct", "collection"), ("key", "option2"),
                         ("query", {
                             "msg": text
                         })]))
                option1 = option1['values'][0]
                option2 = option2['values'][0]
                return {
                    "form": [option1, option2],
                    "output_result": output_result
                }

            except:
                result = "SON failure"
                return {"result": result, "output_result": output_result}

    except Exception as e:
        print(e, 2)
        result = "can you say it again"
        return {"result": result, "output_result": output_result}
Example #20
0
async def addNewPlayerToGame(db, state, player_id, playerName):
    newPlayer = {
        "id": player_id,
        "name": playerName,
        "score": {
            "targets_current_round": 0,
            "target_rounds": [0],
            "targets_previous_rounds": [],
            "shops_joined": [],
            "bonuses": [],
            "saladcop_bonus": 0
        },
        "moves": [],
        "numBonusMovesPlayed": 0,
        "cells_connected_to_shops": {},
        "targets_connected_to_shops": {},
        "bonusLines": 0,
        "connected_targets": {},
        "connected_target_types": {},
        "connected_shops": {},
        "shop_pairs": {},
        "active_cells": {}
    }

    # Add permutation
    perms = db.permutations.aggregate([{"$sample": {"size": 1}}])
    permlist = await perms.to_list(1)
    myPerm = permlist[0]["perm"]
    newPlayer["perm"] = myPerm

    # Add pairs of shops
    for letter in list(set(myPerm)):
        i = myPerm.index(letter)
        j = myPerm.index(letter, i + 1)
        shopi = "shop" + str(i)
        shopj = "shop" + str(j)
        newPlayer["shop_pairs"][shopi] = shopj
        newPlayer["shop_pairs"][shopj] = shopi

    state["players"].append(newPlayer)

    now = str(datetime.datetime.now())
    state["last_updated"] = now
    await db.games.update_one({"_id": state["_id"]},
                              SON([("$push", SON([("players", newPlayer)])),
                                   ("$set",
                                    SON([
                                        ("last_updated",
                                         state["last_updated"]),
                                    ]))]))
Example #21
0
def default(obj):
    # We preserve key order when rendering SON, DBRef, etc. as JSON by
    # returning a SON for those types instead of a dict.
    if isinstance(obj, ObjectId):
        return {"$oid": str(obj)}
    if isinstance(obj, DBRef):
        return _json_convert(obj.as_doc())
    if isinstance(obj, datetime.datetime):
        # TODO share this code w/ bson.py?
        if obj.utcoffset() is not None:
            obj = obj - obj.utcoffset()
        millis = int(
            calendar.timegm(obj.timetuple()) * 1000 + obj.microsecond / 1000)
        return {"$date": millis}
    if isinstance(obj, (RE_TYPE, Regex)):
        flags = ""
        if obj.flags & re.IGNORECASE:
            flags += "i"
        if obj.flags & re.LOCALE:
            flags += "l"
        if obj.flags & re.MULTILINE:
            flags += "m"
        if obj.flags & re.DOTALL:
            flags += "s"
        if obj.flags & re.UNICODE:
            flags += "u"
        if obj.flags & re.VERBOSE:
            flags += "x"
        if isinstance(obj.pattern, text_type):
            pattern = obj.pattern
        else:
            pattern = obj.pattern.decode('utf-8')
        return SON([("$regex", pattern), ("$options", flags)])
    if isinstance(obj, MinKey):
        return {"$minKey": 1}
    if isinstance(obj, MaxKey):
        return {"$maxKey": 1}
    if isinstance(obj, Timestamp):
        return {"$timestamp": SON([("t", obj.time), ("i", obj.inc)])}
    if isinstance(obj, Code):
        return SON([('$code', str(obj)), ('$scope', obj.scope)])
    if isinstance(obj, Binary):
        return SON([('$binary', base64.b64encode(obj).decode()),
                    ('$type', "%02x" % obj.subtype)])
    if PY3 and isinstance(obj, bytes):
        return SON([('$binary', base64.b64encode(obj).decode()),
                    ('$type', "00")])
    if isinstance(obj, uuid.UUID):
        return {"$uuid": obj.hex}
    raise TypeError("%r is not JSON serializable" % obj)
Example #22
0
    def _start_authentication(self, response, error=None):
        # this is the nonce response
        if error:
            logging.error(error)
            logging.error(response)
            raise AuthenticationError(error)
        nonce = response['data'][0]['nonce']
        key = helpers._auth_key(nonce, self.__dbuser, self.__dbpass)

        self.__callback = self._finish_authentication
        self._send_message(
            message.query(
                0, "%s.$cmd" % self.__pool._dbname, 0, 1,
                SON([('authenticate', 1), ('user', self.__dbuser),
                     ('nonce', nonce), ('key', key)]), SON({})))
    def test_to_mongo(self):
        doc = EmbeddedDocument(name='Bob')
        value = self.field.to_mongo(doc)
        self.assertIsInstance(value, SON)
        self.assertEqual(value, SON({'name': 'Bob'}))

        son = value
        value = self.field.to_mongo(son)
        self.assertIsInstance(value, SON)
        self.assertEqual(value, SON({'name': 'Bob'}))

        value = self.field.to_mongo({'name': 'Bob'})

        self.assertIsInstance(value, SON)
        self.assertEqual(value, SON({'name': 'Bob'}))
Example #24
0
    def from_json(cls, json_str, created=False, *args, **kwargs):
        """
        Decode from human-readable json.

        Parameters:
            json_str: JSON string that should be passed to the serialized
            created: a parameter that is passed to cls._from_son.
            *args, **kwargs: Any additional arguments that is passed to
                json.loads.

        """
        from .fields import FollowReferenceField
        kwargs.setdefault("object_hook", generate_object_hook(cls))
        dct = json.loads(json_str, *args, **kwargs)
        for name, fld in cls._fields.items():
            if any([
                    getattr(fld, "exclude_from_json", None),
                    getattr(fld, "exclude_json", None)
            ]):
                dct.pop(name, None)
        from_son_result = cls._from_son(SON(dct), created=created)

        for fldname, fld in cls._fields.items():
            target = fld.field if isinstance(fld, db.ListField) else fld

            if not isinstance(target, db.ReferenceField) or \
                    isinstance(target, FollowReferenceField):
                continue

            value = dct.get(fldname)
            setattr(from_son_result, fldname,
                    normalize_reference(getattr(value, "id", value), target))
        return from_son_result
Example #25
0
 def get_result2(cls, collection, fields_dict, match, limit, skip):
     sum_field = cls.create_sum_field(fields_dict)
     groups2 = cls.create_groups2(fields_dict, sum_field)
     total_count = collection.aggregate([
         {
             "$match": match
         },
         {
             "$group": groups2
         },
         {
             "$sort": SON([("_id", 1)])
         },
     ]).get('result').__len__() if not skip else 0
     res = collection.aggregate([{
         "$match": match
     }, {
         "$group": groups2
     }, {
         "$sort": {
             'day': 1
         }
     }, {
         "$skip": skip
     }, {
         "$limit": limit
     }])
     result = res.get('result')
     for r in result:
         fields = r.get('_id')
         r.pop('_id')
         r.update(fields)
     return total_count, result
 def test_get_more_iteration(self):
     # Insert 5 documents with WC majority.
     self.coll.insert_many([{'data': k} for k in range(5)])
     # Start a find operation and retrieve first batch of results.
     batch_size = 2
     cursor = self.coll.find(batch_size=batch_size)
     for _ in range(batch_size):
         cursor.next()
     # Force step-down the primary.
     res = self.client.admin.command(
         SON([("replSetStepDown", 5), ("force", True)]))
     self.assertEqual(res["ok"], 1.0)
     # Get next batch of results.
     for _ in range(batch_size):
         cursor.next()
     # Verify pool not cleared.
     self.verify_pool_not_cleared()
     # Attempt insertion to mark server description as stale and prevent a
     # notMaster error on the subsequent operation.
     try:
         self.coll.insert_one({})
     except NotMasterError:
         pass
     # Next insert should succeed on the new primary without clearing pool.
     self.coll.insert_one({})
     self.verify_pool_not_cleared()
Example #27
0
def _index_document(index_list):
    """Helper to generate an index specifying document.

    Takes a list of (key, direction) pairs.
    """
    if isinstance(index_list, abc.Mapping):
        raise TypeError("passing a dict to sort/create_index/hint is not "
                        "allowed - use a list of tuples instead. did you "
                        "mean %r?" % list(iteritems(index_list)))
    elif not isinstance(index_list, (list, tuple)):
        raise TypeError("must use a list of (key, direction) pairs, "
                        "not: " + repr(index_list))
    if not len(index_list):
        raise ValueError("key_or_list must not be the empty list")

    index = SON()
    for (key, value) in index_list:
        if not isinstance(key, string_type):
            raise TypeError("first item in each key pair must be a string")
        if not isinstance(value, (string_type, int, abc.Mapping)):
            raise TypeError("second item in each key pair must be 1, -1, "
                            "'2d', 'geoHaystack', or another valid MongoDB "
                            "index specifier.")
        index[key] = value
    return index
    def update(self, document_id, update_spec, namespace, timestamp):
        """Apply updates given in update_spec to the document whose id
        matches that of doc.

        """
        db, coll = self._db_and_collection(namespace)

        meta_collection_name = self._get_meta_collection(namespace)

        self.meta_database[meta_collection_name].replace_one(
            {
                self.id_field: document_id,
                "ns": namespace
            }, {
                self.id_field: document_id,
                "_ts": timestamp,
                "ns": namespace
            },
            upsert=True)

        no_obj_error = "No matching object found"
        updated = self.mongo[db].command(SON([('findAndModify', coll),
                                              ('query', {
                                                  '_id': document_id
                                              }), ('update', update_spec),
                                              ('new', True)]),
                                         allowable_errors=[no_obj_error
                                                           ])['value']
        return updated
Example #29
0
def messages_per_hours(db):
    """
    Number of messages per hour of the day
    :param db: database connection
    :return:
    """
    pipeline = [{
        "$project": {
            "hour": {
                "$hour": "$date"
            }
        }
    }, {
        "$unwind": "$hour"
    }, {
        "$group": {
            "_id": "$hour",
            "count": {
                "$sum": 1
            }
        }
    }, {
        "$sort": SON([("_id", 1)])
    }]

    data = list(db.messages.aggregate(pipeline))
    return ChartData(map(operator.itemgetter('_id'), data),
                     map(operator.itemgetter('count'), data))
Example #30
0
def most_mentions(db):
    print(
        "3. Who is are the most mentioned Twitter users? (Provide the top five.)"
    )
    pipeline = [{
        "$match": {
            "text": {
                "$regex": "@\\w*"
            }
        }
    }, {
        "$group": {
            "_id": {
                "$substrCP": [
                    "$text", {
                        "$indexOfCP": ["$text", "@"]
                    }, {
                        "$indexOfCP": ["$text", " "]
                    }
                ]
            },
            "count": {
                "$sum": 1
            }
        }
    }, {
        "$sort": SON([("count", -1), ("user", -1)])
    }, {
        "$limit": 5
    }]
    result = db.tweets.aggregate(pipeline)
    pp_all(result)
    """