Exemple #1
0
    def test_datetime_copy_pickle(self):
        d = datetime.datetime(2010, 5, 5, tzinfo=utc)
        t = Timestamp(d, 0)

        dc = copy.deepcopy(d)
        self.assertEqual(dc, t.as_datetime())

        dp = pickle.loads(pickle.dumps(d))
        self.assertEqual(dp, t.as_datetime())
Exemple #2
0
    def test_datetime_copy_pickle(self):
        d = datetime.datetime(2010, 5, 5, tzinfo=utc)
        t = Timestamp(d, 0)

        dc = copy.deepcopy(d)
        self.assertEqual(dc, t.as_datetime())

        for protocol in [0, 1, 2, -1]:
            pkl = pickle.dumps(d, protocol=protocol)
            dp = pickle.loads(pkl)
            self.assertEqual(dp, t.as_datetime())
    def test_datetime_copy_pickle(self):
        d = datetime.datetime(2010, 5, 5, tzinfo=utc)
        t = Timestamp(d, 0)

        dc = copy.deepcopy(d)
        self.assertEqual(dc, t.as_datetime())

        for protocol in [0, 1, 2, -1]:
            pkl = pickle.dumps(d, protocol=protocol)
            dp = pickle.loads(pkl)
            self.assertEqual(dp, t.as_datetime())
Exemple #4
0
 def create_password_reset_token(self, user):
     import os
     from bson.timestamp import Timestamp
     from datetime import datetime
     from hashlib import md5
     from backend.config import SAVVY_LOGIN_EXPIRATION
     expires = Timestamp(datetime.now() + SAVVY_LOGIN_EXPIRATION, 1)
     token = md5(os.urandom(512)).hexdigest()[-8:].upper()
     self.db.users.update_one({"username": user.username},
                              {
                                  "$set": {
                                      "password_reset_token": (token, expires)
                                  }
                              })
     return token, expires.as_datetime()
Exemple #5
0
    def initial_sync(
        self
    ):  # Method that starts the initial collection dump and then spawns the writer
        print self.dbnames

        time_t = time.time()
        time_log = Timestamp(int(time_t) - 1, 0)
        times = Timestamp(int(time_t), 0)
        curr_time = times.as_datetime()

        self.target_mongos['sync_log']['init_sync'].insert({'ts': time_log})
        self.master_mongos['sync_log']['init_sync'].insert({'ts': time_log})
        for name in self.replnames:
            print name
            self.target_mongos['sync_log'][name].insert({'ts': time_log})
            self.master_mongos['sync_log'][name].insert({'ts': time_log})
        self.last_sync = time_log  # set last time sync time to be current time, push to database

        threads = []

        for shard in self.master_shards:
            for dbname in self.dbnames:  # loop through all databases that you want to replicate
                if dbname in shard.database_names(
                ):  # if the database is on the shard
                    identity = shard.address
                    print("Replicating database: %s , on Shard: %s: %s" %
                          (dbname, identity[0], identity[1]))
                    db = shard[dbname]
                    colls = db.collection_names(
                        include_system_collections=False)
                    for coll in colls:  # spawn collection dumper threads for all collections within the database
                        coll_dumper = Thread(target=self.dump_collection,
                                             args=(
                                                 db,
                                                 dbname,
                                                 coll,
                                             ))
                        threads.append(coll_dumper)
                        coll_dumper.start()

        for thread in threads:  # wait on all dumper threads before moving on to write oplog operations
            thread.join()

        print("Finished inital sync, took")
        print(time.time() - time_t)
        self.start_listening()  # start tailing on all shards
Exemple #6
0
    def initial_sync(self):  # Method that starts the initial collection dump and then spawns the writer
        print self.dbnames

        time_t = time.time()
        time_log = Timestamp(int(time_t) - 1, 0)
        times = Timestamp(int(time_t), 0)
        curr_time = times.as_datetime()

        self.target_mongos['sync_log']['init_sync'].insert({'ts': time_log})
        self.master_mongos['sync_log']['init_sync'].insert({'ts': time_log})
        for name in self.replnames:
            print name
            self.target_mongos['sync_log'][name].insert({'ts': time_log})
            self.master_mongos['sync_log'][name].insert({'ts': time_log})
        self.last_sync = time_log  # set last time sync time to be current time, push to database

        threads = []

        for shard in self.master_shards:
            for dbname in self.dbnames:  # loop through all databases that you want to replicate
                if dbname in shard.database_names():  # if the database is on the shard
                    identity = shard.address
                    print("Replicating database: %s , on Shard: %s: %s" % (dbname, identity[0], identity[1]))
                    db = shard[dbname]
                    colls = db.collection_names(include_system_collections=False)
                    for coll in colls:  # spawn collection dumper threads for all collections within the database
                        coll_dumper = Thread(target=self.dump_collection, args=(db, dbname, coll,))
                        threads.append(coll_dumper)
                        coll_dumper.start()

        for thread in threads:  # wait on all dumper threads before moving on to write oplog operations
            thread.join()

        print("Finished inital sync, took")
        print(time.time() - time_t)
        self.start_listening()  # start tailing on all shards
Exemple #7
0
 def test_datetime(self):
     d = datetime.datetime(2010, 5, 5, tzinfo=utc)
     t = Timestamp(d, 0)
     self.assertEqual(1273017600, t.time)
     self.assertEqual(d, t.as_datetime())
 def test_datetime(self):
     d = datetime.datetime(2010, 5, 5, tzinfo=utc)
     t = Timestamp(d, 0)
     self.assertEqual(1273017600, t.time)
     self.assertEqual(d, t.as_datetime())