Esempio n. 1
0
class Client:
    def __init__(self, host, db):
        self.client = MongoClient(host)
        self.db = self.client.get_database(db)
        self.lists = self.db.get_collection("Clients")

    def create_new_member(self, chat_id, full_name):
        self.lists.replace_one({"chat_id": chat_id}, {
            "chat_id": chat_id,
            "full name": full_name,
            "friends": [],
        },
                               upsert=True)

    def add_friend_to_list(self, chat_id, friend):
        self.lists.update_one({"chat_id": chat_id},
                              {"$push": {
                                  "friends": friend
                              }})

    def get_doc(self, chat_id):
        return self.lists.find_one({"chat_id": chat_id})

    def get_all_friends(self, chat_id):
        client = self.lists.find_one({"chat_id": chat_id})
        return client["friends"]

    def delete_friend(self, chat_id, friend):
        self.lists.delete_one({"chat_id": chat_id, 'friends'[0]: friend})
    def test_read_with_failover(self):
        c = MongoClient(
            self.seed,
            replicaSet=self.name,
            serverSelectionTimeoutMS=self.server_selection_timeout)
        wait_until(lambda: c.primary, "discover primary")
        wait_until(lambda: len(c.secondaries) == 2, "discover secondaries")

        def iter_cursor(cursor):
            for _ in cursor:
                pass
            return True

        w = len(c.secondaries) + 1
        db = c.get_database("pymongo_test",
                            write_concern=WriteConcern(w=w))
        db.test.delete_many({})
        # Force replication
        db.test.insert_many([{'foo': i} for i in xrange(10)])
        self.assertEqual(10, db.test.count())

        db.read_preference = SECONDARY_PREFERRED
        cursor = db.test.find().batch_size(5)
        next(cursor)
        self.assertEqual(5, cursor._Cursor__retrieved)
        self.assertTrue(cursor.address in c.secondaries)
        ha_tools.kill_primary()
        # Primary failure shouldn't interrupt the cursor
        self.assertTrue(iter_cursor(cursor))
        self.assertEqual(10, cursor._Cursor__retrieved)
Esempio n. 3
0
 def __init__(self):
     connection = MongoClient(
         settings['MONGO_SERVER'],
         settings['MONGO_PORT']
     )
     db = connection.get_database(settings['MONGO_DB'])
     self.collection = db[settings['MONGO_COLLECTION']]
Esempio n. 4
0
    def test_read_with_failover(self):
        c = MongoClient(
            self.seed,
            replicaSet=self.name,
            serverSelectionTimeoutMS=self.server_selection_timeout)
        wait_until(lambda: c.primary, "discover primary")
        wait_until(lambda: len(c.secondaries) == 2, "discover secondaries")

        def iter_cursor(cursor):
            for _ in cursor:
                pass
            return True

        w = len(c.secondaries) + 1
        db = c.get_database("pymongo_test",
                            write_concern=WriteConcern(w=w))
        db.test.delete_many({})
        # Force replication
        db.test.insert_many([{'foo': i} for i in xrange(10)])
        self.assertEqual(10, db.test.count())

        db.read_preference = SECONDARY_PREFERRED
        cursor = db.test.find().batch_size(5)
        next(cursor)
        self.assertEqual(5, cursor._Cursor__retrieved)
        self.assertTrue(cursor.address in c.secondaries)
        ha_tools.kill_primary()
        # Primary failure shouldn't interrupt the cursor
        self.assertTrue(iter_cursor(cursor))
        self.assertEqual(10, cursor._Cursor__retrieved)
Esempio n. 5
0
 def __init__(self):
     connection = MongoClient(
         settings['MONGO_SERVER'],
         settings['MONGO_PORT']
     )
     db = connection.get_database(settings['MONGO_DB'])
     self.collection = db[settings['MONGO_COLLECTION']]
Esempio n. 6
0
def updatePlayerStats():
    client = MongoClient()
    db = client.get_database("dota")
    matches: Collection = db.match_details
    players: Collection = db.players

    {"$and": [{"skill": {"$ne": null}}, {"skill": {"$ne": 1}}]}
Esempio n. 7
0
def stash(results):
    """
    暂存到mongo数据库中。
    """
    summary = {}
    mongo = MongoClient(**config.mongo)
    try:
        for item_model, objs in results:
            collection_name = item_model['name']
            db = mongo.get_database('theforce')
            collection = db.get_collection(collection_name)
            collection.insert_many(objs)
            summary[collection_name] = len(
                objs) if collection_name not in summary else len(
                    objs) + summary[collection_name]

        print
        print "=" * 40
        print ' ' * 15, u'Stash'
        print "=" * 40
        print
        print u"数据已成功保存到MongoDB的theforce库中,其中新增数据:"
        for name, length in summary.items():
            print name, length
    finally:
        mongo.close()
Esempio n. 8
0
class giftList():
    def __init__(self, host, db):
        self.client = MongoClient(host)
        self.db = self.client.get_database(db)
        self.gifts = self.db.get_collection("gifts")
        initial()

    def add_gift(self, type, price, link):
        self.gifts.replace_one({"link": link}, {
            "type": type,
            "price": price,
            "link": link
        },
                               upsert=True)

    def get_gifts_by_cond(self, type, price):
        p = price.split()
        start = int(p[0]) - 1
        end = int(p[1]) + 1
        myRange = self.gifts.find({
            "type": type,
            "price": {
                "$gt": start,
                "$lt": end
            }
        })
        list = []
        for x in myRange:
            list.append(x)
        return list
def mongo_text_test():
    cli = MongoClient()
    test = cli.get_database("test").get_collection("test_search")
    test.create_index(
          [("super_type", ASCENDING), ("resource_state", ASCENDING),
           ("uuid", TEXT), ("name", TEXT), ("description", TEXT)],
          background=True
      )
    docs = [{
        "uuid": str(uuid.uuid4()),
        "name": str(i),
        "description": "Nature, \"time, and patience are "
        "the three great -physicians.",
        "super_type": "super_vol",
        "type": "vol",
        "create_time": int(
        time.time()),
        "resource_state": "inUse"
    } for i in range(1, 11)]
    test.insert_many(docs)

    text = [" -physicians"]
    print test.find({
                "description": {
                    "$regex":  "|".join([re.sub(
                        r"(\*|\.|\?|\+|\$|\^|\[|\]|\(|\)|\{|\}|\||\\|/)",
                        r"\\\1",
                        g
                    ) for g in text]),
                    "$options": "i"
                }
            }).count()
Esempio n. 10
0
def init_db(app):
    app.teardown_appcontext(close_db)

    # Set up Mongo client for text indexing
    global client
    client = MongoClient(Config.MONGO_URL)
    db = client.get_database('rokwire')
    events = db['eventsmanager_events']
    events.create_index([("title", pymongo.TEXT)])
Esempio n. 11
0
def init_db():
    global client
    client = MongoClient(cfg.APP_CONFIG_MONGO_URL)

    # Create indexes on api start
    db = client.get_database(name=cfg.APP_CONFIG_DB_NAME)
    app_configs = db[cfg.APP_CONFIGS_COLLECTION]
    app_configs.create_index([("mobileAppVersion", pymongo.DESCENDING)],
                             unique=True)
    def test_get_database(self):
        client = MongoClient(host, port, _connect=False)
        codec_options = CodecOptions(
            tz_aware=True, uuid_representation=JAVA_LEGACY)
        write_concern = WriteConcern(w=2, j=True)
        db = client.get_database(
            'foo', codec_options, ReadPreference.SECONDARY, write_concern)
        self.assertEqual('foo', db.name)
        self.assertEqual(codec_options, db.codec_options)
        self.assertEqual(JAVA_LEGACY, db.uuid_subtype)
        self.assertEqual(ReadPreference.SECONDARY, db.read_preference)
        self.assertEqual([{}], db.tag_sets)
        self.assertEqual(write_concern.document, db.write_concern)

        pref = Secondary([{"dc": "sf"}])
        db = client.get_database('foo', read_preference=pref)
        self.assertEqual(pref.mode, db.read_preference)
        self.assertEqual(pref.tag_sets, db.tag_sets)
        self.assertEqual({}, db.write_concern)
        self.assertEqual(CodecOptions(), db.codec_options)
        self.assertEqual(PYTHON_LEGACY, db.uuid_subtype)
Esempio n. 13
0
def init_db():
    global client
    client = MongoClient(cfg.EVENT_MONGO_URL)

    # Create indexes on app start
    db = client.get_database(name=cfg.EVENT_DB_NAME)
    events = db['events']
    events.create_index([("title", pymongo.TEXT)])
    events.create_index([("startDate", pymongo.DESCENDING)])
    events.create_index([("endDate", pymongo.DESCENDING)])
    events.create_index([("category", pymongo.ASCENDING)])
    events.create_index([("categorymainsub", pymongo.ASCENDING)])
    events.create_index([("coordinates", pymongo.GEOSPHERE)])
Esempio n. 14
0
    def test_last_error(self):
        c = MongoClient(self.seed,
                        replicaSet=self.name,
                        serverSelectionTimeoutMS=self.server_selection_timeout)
        wait_until(lambda: c.primary, "discover primary")
        wait_until(lambda: c.secondaries, "discover secondary")
        ha_tools.stepdown_primary()
        db = c.get_database("pymongo_test", write_concern=WriteConcern(w=0))

        db.test.insert_one({})
        response = db.error()
        self.assertTrue('err' in response and 'not master' in response['err'])
        wait_until(lambda: len(c.secondaries) == 2, "discover two secondaries")
Esempio n. 15
0
class win_gifDB:
    def __init__(self):
        self.client = MongoClient(settings.HOST)
        self.db = self.client.get_database(settings.DB)
        self.lists = self.db.get_collection(settings.WINNER_GIFS_COL)

    def add_all_to_DB(self):
        with open("win_gifs.txt", "r") as f:
            for url in f.readlines():
                self.lists.replace_one({"url": url}, {"url": url}, upsert=True)

    def get_random_gif(self):
        count = self.lists.estimated_document_count()
        return self.lists.find()[random.randrange(count)]['url']
Esempio n. 16
0
    def test_last_error(self):
        c = MongoClient(
            self.seed,
            replicaSet=self.name,
            serverSelectionTimeoutMS=self.server_selection_timeout)
        wait_until(lambda: c.primary, "discover primary")
        wait_until(lambda: c.secondaries, "discover secondary")
        ha_tools.stepdown_primary()
        db = c.get_database(
            "pymongo_test", write_concern=WriteConcern(w=0))

        db.test.insert_one({})
        response = db.error()
        self.assertTrue('err' in response and 'not master' in response['err'])
        wait_until(lambda: len(c.secondaries) == 2, "discover two secondaries")
Esempio n. 17
0
class WordsDB:
    def __init__(self):
        self.client = MongoClient(settings.HOST)
        self.db = self.client.get_database(settings.DB)
        self.lists = self.db.get_collection(settings.WORD_COL)

    def add_all_to_DB(self):
        with open("common_words.txt", "r") as f:
            for word in f:
                self.lists.replace_one({"word": word.split()[0].lower()},
                                       {"word": word.split()[0].lower()},
                                       upsert=True)

    def get_word(self, word):
        return self.lists.find_one({"word": word})
Esempio n. 18
0
class Event:
    def __init__(self, host, db):
        self.client = MongoClient(host)
        self.db = self.client.get_database(db)
        self.events = self.db.get_collection("events")

    def add_event(self, client_id, full_name, type, date, mark):
        self.events.insert_one({
            "client_id": client_id,
            "name": full_name,
            "type": type,
            "date": date,
            "mark": mark,
        })

    def delete_event(self, name, type, date):
        self.events.delete_one({"name": name, "type": type, "date": date})

    def get_events_by_date(self, date):
        myCursor = self.events.find({"date": date})
        list = []
        for x in myCursor:
            list.append(x)
        return list

    def get_events_by_name(self, name):
        myCursor = self.events.find({"name": name})
        list = []
        for x in myCursor:
            list.append(x)
        return list

    def count_events(self):
        return self.events.count_documents({})

    def get_all_events(self):
        myCursor = self.events.find({})
        list = []
        for x in myCursor:
            list.append(x)
        return list

    def get_upcoming_events(self, start, end):
        myCursor = self.events.find({"date": {"$gt": start, "$lt": end}})
        list = []
        for x in myCursor:
            list.append(x)
        return list
Esempio n. 19
0
class Storage_store:
    def __init__(self, host, db):
        self.client = MongoClient(host)
        self.db = self.client.get_database(db)
        self.my_DB = self.db.get_collection("Matches")
        # self.user_list = self.db.get_collection("Users List")

    def add_my_db(self, face_id, ad_id, store_id, img, time):
        self.my_DB.replace_one({'db_id': "COUNTER DOCUMENT NUMBER 0"}, {
            'ad_id': ad_id,
            'store_id': store_id,
            'face_id': face_id,
            'time': time,
            'image': img.tolist()
        }, upsert=True)

    def clear_table(self):
        self.my_DB.delete_many({})
Esempio n. 20
0
def save(metas, batch_num=100):
    """
    读取配置,把Mongo数据同步到mysql中。
    """
    mongo = MongoClient(**config.mongo)
    db = MySQLdb.connect(**config.mysql)
    cursor = db.cursor()
    print
    print "=" * 40
    print ' ' * 15, u'Mongo --> MySQL'
    print "=" * 40
    print
    try:
        mongo_db = mongo.get_database('theforce')
        for meta in metas:
            for model_name, item_model in meta.iter_model():
                collection_name = item_model['name']
                table_name = item_model['table']
                attrs = meta.get_model_persist_attr_names(item_model)

                collection = mongo_db.get_collection(collection_name)
                results = [obj for obj in collection.find({})]
                sql = "insert into {0}({1}) values({2})".format(
                    table_name, ','.join(attrs),
                    ','.join(itertools.repeat('%s', len(attrs))))

                print
                print '-' * 40
                print u'开始处理{0}@mongo --> {1}@mysql, 共{2}条数据,每批{3}条批量迁移:'.format(
                    collection_name, table_name, len(results), batch_num)
                # 分组进行批量处理
                results2 = itertools.izip(itertools.count(), results)
                for group_key, group_it in itertools.groupby(
                        results2, lambda item: item[0] / batch_num):
                    print '.',
                    values = [[obj[attr] for attr in attrs]
                              for index, obj in group_it]
                    cursor.executemany(sql, values)
                print u'[完成]'
    finally:
        mongo.close()
        cursor.close()
        db.close()
Esempio n. 21
0
class UsersDB:
    def __init__(self):
        self.client = MongoClient(settings.HOST)
        self.db = self.client.get_database(settings.DB)
        self.lists = self.db.get_collection(settings.USER_COL)

    def add_to_DB(self, id):
        if not self.lists.find_one({"user_id": id}):
            self.lists.insert_one({"user_id": id, "score": 0})

    def get_score(self, id):
        user = self.lists.find_one({"user_id": id})
        score = user['score']
        return score

    def update_score(self, id, score):
        self.lists.replace_one({"user_id": id}, {
            "user_id": id,
            "score": self.get_score(id) + score
        },
                               upsert=True)
Esempio n. 22
0
    def test_writes_with_failover(self):
        c = MongoClient(
            self.seed,
            replicaSet=self.name,
            serverSelectionTimeoutMS=self.server_selection_timeout)
        wait_until(lambda: c.primary, "discover primary")
        wait_until(lambda: len(c.secondaries) == 2, "discover secondaries")
        primary = c.primary
        w = len(c.secondaries) + 1
        db = c.get_database("pymongo_test",
                            write_concern=WriteConcern(w=w))
        db.test.delete_many({})
        db.test.insert_one({'foo': 'bar'})
        self.assertEqual('bar', db.test.find_one()['foo'])

        killed = ha_tools.kill_primary(9)
        self.assertTrue(bool(len(killed)))

        # Wait past pool's check interval, so it throws an error from
        # get_socket().
        time.sleep(1)

        # Verify that we only raise AutoReconnect, not some other error,
        # while we wait for new primary.
        for _ in xrange(10000):
            try:
                db.test.insert_one({'bar': 'baz'})

                # No error, found primary.
                break
            except AutoReconnect:
                time.sleep(.01)
        else:
            self.fail("Couldn't connect to new primary")

        # Found new primary.
        self.assertTrue(c.primary)
        self.assertTrue(primary != c.primary)
        self.assertEqual('baz', db.test.find_one({'bar': 'baz'})['bar'])
Esempio n. 23
0
    def test_writes_with_failover(self):
        c = MongoClient(
            self.seed,
            replicaSet=self.name,
            serverSelectionTimeoutMS=self.server_selection_timeout)
        wait_until(lambda: c.primary, "discover primary")
        wait_until(lambda: len(c.secondaries) == 2, "discover secondaries")
        primary = c.primary
        w = len(c.secondaries) + 1
        db = c.get_database("pymongo_test",
                            write_concern=WriteConcern(w=w))
        db.test.delete_many({})
        db.test.insert_one({'foo': 'bar'})
        self.assertEqual('bar', db.test.find_one()['foo'])

        killed = ha_tools.kill_primary(9)
        self.assertTrue(bool(len(killed)))

        # Wait past pool's check interval, so it throws an error from
        # get_socket().
        time.sleep(1)

        # Verify that we only raise AutoReconnect, not some other error,
        # while we wait for new primary.
        for _ in xrange(10000):
            try:
                db.test.insert_one({'bar': 'baz'})

                # No error, found primary.
                break
            except AutoReconnect:
                time.sleep(.01)
        else:
            self.fail("Couldn't connect to new primary")

        # Found new primary.
        self.assertTrue(c.primary)
        self.assertTrue(primary != c.primary)
        self.assertEqual('baz', db.test.find_one({'bar': 'baz'})['bar'])
Esempio n. 24
0
class PagesDB:
    def __init__(self):
        self.client = MongoClient(settings.HOST)
        self.db = self.client.get_database(settings.DB)
        self.lists = self.db.get_collection(settings.PAGE_COL)

    def add_all_to_DB(self):
        with open("minimal_data.csv", "r", encoding='utf-8') as f:
            reader = csv.reader(f, delimiter=",")
            for i, line in enumerate(reader):
                if re.search(r"\b" + re.escape(line[0].lower()) + r"\b",
                             settings.WIKI_EXLUDE_VALS):
                    continue
                self.lists.replace_one({"title": line[0].lower()},
                                       {"title": line[0].lower()},
                                       upsert=True)

    def get_page(self, word):
        return self.lists.find_one({"title": word})

    def get_random_page(self):
        count = self.lists.estimated_document_count()
        return self.lists.find()[random.randrange(count)]['title']
from pprint import pprint

from pymongo.mongo_client import MongoClient

client = MongoClient()
db = client.get_database("my_school")
students = db.get_collection('students')

students.insert_one({
    'name': 'Shira',
    'age': 22,
    'address': {
        'city': 'Jerusalem',
        'street': 'King George'
    }
})

for d in students.find():
    pprint(d)
    print()

xx = students.find({})
print(type(xx))
Esempio n. 26
0
class PickAnalysis:
    def __init__(self):
        self.client = MongoClient()
        self.db = self.client.get_database("dota")
        self.matches: Collection = self.db.match_details
        self.abilities: Collection = self.db.get_collection("abilities")
        self.ab_abilities: Collection = self.db.get_collection("ab_abilities")
        self.days = 90
        self.ability_id = None
        self.pick_data = None

    def get_distinct_abilities(self,
                               period: Union[Tuple[int, int], None] = None):
        if period is None:
            res = self.matches.distinct("ability_draft.skills")
        else:
            res = self.matches.distinct(
                "ability_draft.skills",
                {"start_time": {
                    "$gt": period[0],
                    "$lt": period[1]
                }})
        return res

    def save_ab_abilities(self):
        abilities = self.get_distinct_abilities()
        self.ab_abilities.update_one({"_id": "latest"}, {
            "$set": {
                "abilities": abilities,
                "last_run": int(datetime.utcnow().timestamp())
            }
        },
                                     upsert=True)

    def __load_ability_picks__(self) -> pd.DataFrame:
        cursor: CommandCursor = self.matches.aggregate([{
            "$match": {
                "ability_draft.skills": self.ability_id,
                "duration": {
                    "$gt": 900
                },
                "start_time": {
                    "$gt": time.time() - 60 * 60 * 24 * self.days
                }
            }
        }, {
            "$project": {
                "position": {
                    "$indexOfArray":
                    ["$ability_draft.drafts", self.ability_id]
                },
                "radiant_win": 1
            }
        }])

        arr = np.asarray([[x['radiant_win'], x['position']] for x in cursor])
        if arr.size == 0:
            raise NoSkillDataException

        arr = np.hstack(
            [arr, (arr[:, 1] % 2 == (arr[:, 0] + 1) % 2)[..., None]])
        df = pd.DataFrame(arr, columns=['radiant_win', 'pick', 'win'])
        df.loc[df['pick'] == -1, 'pick'] = 40

        return df

    def __load_ability_picks__(self) -> pd.DataFrame:
        cursor: CommandCursor = self.matches.aggregate([{
            "$match": {
                "ability_draft.skills": self.ability_id,
                "duration": {
                    "$gt": 900
                },
                "start_time": {
                    "$gt": time.time() - 60 * 60 * 24 * self.days
                }
            }
        }, {
            "$project": {
                "position": {
                    "$indexOfArray":
                    ["$ability_draft.drafts", self.ability_id]
                },
                "radiant_win": 1
            }
        }])

        arr = np.asarray([[x['radiant_win'], x['position']] for x in cursor])
        if arr.size == 0:
            raise NoSkillDataException

        arr = np.hstack(
            [arr, (arr[:, 1] % 2 == (arr[:, 0] + 1) % 2)[..., None]])
        df = pd.DataFrame(arr, columns=['radiant_win', 'pick', 'win'])
        df.loc[df['pick'] == -1, 'pick'] = 40

        return df

    def combo_picks(self):
        # combo_dict = defaultdict(Counter)
        # wins, played, gold, damage, kills, deaths, assists, xp, tower
        combo_dict = defaultdict(lambda: np.zeros(9).astype(np.float64))
        single_dict = defaultdict(lambda: np.zeros(9).astype(np.float64))
        combos = self.db.get_collection('combos')
        cursor: CommandCursor = self.matches.find(
            {
                "duration": {
                    "$gt": 900
                },
                "start_time": {
                    "$gt": time.time() - 60 * 60 * 24 * self.days
                }
            }, {
                "players.ability_upgrades_arr": 1,
                "players.gold_per_min": 1,
                "players.hero_damage": 1,
                "players.kills": 1,
                "players.deaths": 1,
                "players.assists": 1,
                "players.xp_per_min": 1,
                "players.tower_damage": 1,
                "duration": 1,
                "radiant_win": 1
            })

        for match in tqdm(cursor):
            mmin = match['duration'] / 60
            for i, player in enumerate(match['players']):
                win = 1 if (i % 2 == 0 and match['radiant_win']) or (
                    i % 2 == 1 and not match['radiant_win']) else 0
                skills = np.unique(player['ability_upgrades_arr'])

                # wins, played, gold, damage, kills, deaths, assists, xp, tower
                new_stats = [
                    win, 1, player['gold_per_min'],
                    player['hero_damage'] / mmin, player['kills'] / mmin,
                    player['deaths'] / mmin, player['assists'] / mmin,
                    player['xp_per_min'], player['tower_damage'] / mmin
                ]
                for skill in skills:
                    single_dict[skill] += new_stats
                for combo in combinations(
                        np.unique(player['ability_upgrades_arr']), 2):
                    combo_dict[combo] += new_stats

        def get_average(skill1, skill2):
            skill1_win = single_dict[skill1][0] * 1.0 / single_dict[skill1][1]
            skill2_win = single_dict[skill2][0] * 1.0 / single_dict[skill2][1]
            return (skill1_win + skill2_win) / 2

        combos.delete_many({})
        combo_docs = [
            {
                "_id": {
                    "skill1": int(key[0]),
                    "skill2": int(key[1])
                },
                "avg_win_pct":
                get_average(key[0], key[1]),
                "raw_win_pct":
                vals[0] / vals[1],
                "win_pct":
                vals[0] / vals[1] - .5 / np.sqrt(vals[1]),
                "synergy":
                vals[0] / vals[1] - .5 / np.sqrt(vals[1]) -
                get_average(key[0], key[1]),
                "gold":
                vals[2] / vals[1],
                "damage":
                vals[3] / vals[1],
                "kills":
                vals[4] / vals[1],
                "deaths":
                vals[5] / vals[1],
                "assists":
                vals[6] / vals[1],
                "xp":
                vals[7] / vals[1],
                "tower":
                vals[8] / vals[1],
                "matches":
                vals[1]
            } for key, vals in combo_dict.items()
            if vals[1] > 25 and vals[0] / vals[1] - .5 / np.sqrt(vals[1]) > .5
        ]
        combos.insert_many(combo_docs)
        for skill_id, vals in single_dict.items():
            try:
                if skill_id is None:
                    continue
                self.abilities.update_one({"_id": int(skill_id)}, {
                    "$set": {
                        "win_rate": vals[0] / vals[1],
                        "gold": vals[2] / vals[1],
                        "damage": vals[3] / vals[1],
                        "kills": vals[4] / vals[1],
                        "deaths": vals[5] / vals[1],
                        "assists": vals[6] / vals[1],
                        "xp": vals[7] / vals[1],
                        "tower": vals[8] / vals[1],
                    }
                })
            except WriteError as e:
                print(f"Failed to write {skill_id}")

        pass

    def save_pick_analytics(self):
        self.abilities.update_one({"_id": self.ability_id}, {
            "$set":
            dict(survival=list(self.get_pick_survival()),
                 **self.get_pick_win_rates(),
                 **self.get_pick_summary(),
                 **self.get_pick_rates())
        },
                                  upsert=True)

    def get_pick_rates(self):
        df = self.pick_data
        picks = self.pick_data['pick'].count() * 1.0
        return dict(
            pick_rate=df.loc[df['pick'].between(0, 39), 'pick'].count() /
            picks,
            pick_rate_rounds=[
                df.loc[df['pick'].between(x * 10, x * 10 + 9), 'pick'].count()
                / picks for x in range(4)
            ])

    def get_pick_win_rates(self):
        df = self.pick_data
        return dict(win_rate_rounds=[
            df.loc[df['pick'].between(x * 10, x * 10 + 9), 'win'].mean()
            for x in range(4)
        ])

    def get_pick_summary(self):
        return self.pick_data['pick'].describe().to_dict()

    def get_pick_median(self):
        return self.pick_data['pick'].median()

    def get_pick_survival(self):
        data = self.pick_data
        df = (data.groupby('pick').count().sort_index().reindex(
            pd.Index(range(data['pick'].min(), data['pick'].max() + 1)),
            fill_value=0).iloc[:,
                               [0]].rename(columns={data.columns[0]: 'count'}))
        df['survival'] = (
            (1 - df['count'] /
             (df['count'].sum() - df['count'].shift(1).cumsum())).cumprod())

        df = df.reindex(range(40))

        df = df.where(df.ffill().notna(), 1)

        df = df.where(df.bfill().notna(), 0)

        return df['survival'].values

    def set_pick_id(self, ability_id):
        self.ability_id = ability_id
        self.pick_data = self.__load_ability_picks__()
        return self

    def hero_stats(self):
        cursor: CommandCursor = self.matches.find(
            {
                "duration": {
                    "$gt": 900
                },
                "start_time": {
                    "$gt": time.time() - 60 * 60 * 24 * self.days
                }
            }, {
                "players.ability_upgrades_arr": 1,
                "players.hero_id": 1,
                "players.gold_per_min": 1,
                "players.hero_damage": 1,
                "players.kills": 1,
                "players.deaths": 1,
                "players.assists": 1,
                "players.xp_per_min": 1,
                "players.tower_damage": 1,
                "duration": 1,
                "players.is_radiant": 1,
                "radiant_win": 1
            })

        heros_coll = self.db.get_collection("heros")
        heros_coll.delete_many({})
        hero_skill = defaultdict(
            lambda: defaultdict(lambda: np.zeros(2, np.int64)))
        hero_stats = defaultdict(lambda: np.zeros(2, np.int64))
        for match in tqdm(cursor):
            for player in match["players"]:
                hero_id = player["hero_id"]
                win = 0 if (player["is_radiant"] ^ match["radiant_win"]) else 1
                hero_stats[hero_id] += [win, 1]

                for skill in np.unique(player['ability_upgrades_arr']):
                    hero_skill[hero_id][skill] += [win, 1]

        for hero_id, skills in tqdm(hero_skill.items()):
            h_stat = hero_stats[hero_id]
            hero_dict = {
                "_id": hero_id,
                "win_rate": float(h_stat[0] * 1.0 / h_stat[1])
            }

            hero_skills = []
            for skill_id, stats in skills.items():
                win_pct = float(stats[0] * 1.0 / (stats[1]))
                if stats[1] > 15 and win_pct - .5 / np.sqrt(stats[1]) > .5:
                    hero_skills.append({
                        "id":
                        int(skill_id),
                        "matches":
                        int(stats[1]),
                        "win_rate":
                        float(stats[0] * 1.0 / stats[1])
                    })
            hero_dict["skills"] = hero_skills
            heros_coll.insert_one(hero_dict)
Esempio n. 27
0
    def test_ship_of_theseus(self):
        c = MongoClient(
            self.seed,
            replicaSet=self.name,
            serverSelectionTimeoutMS=self.server_selection_timeout)
        db = c.get_database(
            "pymongo_test",
            write_concern=WriteConcern(w=len(c.secondaries) + 1))
        db.test.insert_one({})
        find_one = db.test.find_one

        primary = ha_tools.get_primary()
        secondary1 = ha_tools.get_random_secondary()

        new_hosts = []
        for i in range(3):
            new_hosts.append(ha_tools.add_member())

            # RS closes all connections after reconfig.
            for j in xrange(30):
                try:
                    if ha_tools.get_primary():
                        break
                except (ConnectionFailure, OperationFailure):
                    pass

                time.sleep(1)
            else:
                self.fail("Couldn't recover from reconfig")

        # Wait for new members to join.
        for _ in xrange(120):
            if ha_tools.get_primary() and len(ha_tools.get_secondaries()) == 4:
                break

            time.sleep(1)
        else:
            self.fail("New secondaries didn't join")

        ha_tools.kill_members([primary, secondary1], 9)
        time.sleep(5)

        wait_until(lambda: (ha_tools.get_primary()
                            and len(ha_tools.get_secondaries()) == 2),
                   "fail over",
                   timeout=30)

        time.sleep(2 * self.heartbeat_frequency)

        # No error.
        find_one()
        find_one(read_preference=SECONDARY)

        # All members down.
        ha_tools.kill_members(new_hosts, 9)
        self.assertRaises(
            ConnectionFailure,
            find_one, read_preference=SECONDARY)

        ha_tools.restart_members(new_hosts)

        # Should be able to reconnect to set even though original seed
        # list is useless. Use SECONDARY so we don't have to wait for
        # the election, merely for the client to detect members are up.
        time.sleep(2 * self.heartbeat_frequency)
        find_one(read_preference=SECONDARY)

        # Kill new members and switch back to original two members.
        ha_tools.kill_members(new_hosts, 9)
        self.assertRaises(
            ConnectionFailure,
            find_one, read_preference=SECONDARY)

        ha_tools.restart_members([primary, secondary1])

        # Wait for members to figure out they're secondaries.
        wait_until(lambda: len(ha_tools.get_secondaries()) == 2,
                   "detect two secondaries",
                   timeout=30)

        # Should be able to reconnect to set again.
        time.sleep(2 * self.heartbeat_frequency)
        find_one(read_preference=SECONDARY)
Esempio n. 28
0
    def test_secondary_connection(self):
        self.c = MongoClient(
            self.seed,
            replicaSet=self.name,
            serverSelectionTimeoutMS=self.server_selection_timeout)
        wait_until(lambda: len(self.c.secondaries), "discover secondary")
        # Wait for replication...
        w = len(self.c.secondaries) + 1
        db = self.c.get_database("pymongo_test",
                                 write_concern=WriteConcern(w=w))

        db.test.delete_many({})
        db.test.insert_one({'foo': 'bar'})

        # Test direct connection to a primary or secondary
        primary_host, primary_port = ha_tools.get_primary().split(':')
        primary_port = int(primary_port)
        (secondary_host,
         secondary_port) = ha_tools.get_secondaries()[0].split(':')
        secondary_port = int(secondary_port)
        arbiter_host, arbiter_port = ha_tools.get_arbiters()[0].split(':')
        arbiter_port = int(arbiter_port)

        # MongoClient succeeds no matter the read preference
        for kwargs in [
            {'read_preference': PRIMARY},
            {'read_preference': PRIMARY_PREFERRED},
            {'read_preference': SECONDARY},
            {'read_preference': SECONDARY_PREFERRED},
            {'read_preference': NEAREST},
        ]:
            client = MongoClient(
                primary_host,
                primary_port,
                serverSelectionTimeoutMS=self.server_selection_timeout,
                **kwargs)
            wait_until(lambda: primary_host == client.host,
                       "connect to primary")

            self.assertEqual(primary_port, client.port)
            self.assertTrue(client.is_primary)

            # Direct connection to primary can be queried with any read pref
            self.assertTrue(client.pymongo_test.test.find_one())

            client = MongoClient(
                secondary_host,
                secondary_port,
                serverSelectionTimeoutMS=self.server_selection_timeout,
                **kwargs)
            wait_until(lambda: secondary_host == client.host,
                       "connect to secondary")

            self.assertEqual(secondary_port, client.port)
            self.assertFalse(client.is_primary)

            # Direct connection to secondary can be queried with any read pref
            # but PRIMARY
            if kwargs.get('read_preference') != PRIMARY:
                self.assertTrue(client.pymongo_test.test.find_one())
            else:
                self.assertRaises(
                    AutoReconnect, client.pymongo_test.test.find_one)

            # Since an attempt at an acknowledged write to a secondary from a
            # direct connection raises AutoReconnect('not master'), MongoClient
            # should do the same for unacknowledged writes.
            try:
                client.get_database(
                    "pymongo_test",
                    write_concern=WriteConcern(w=0)).test.insert_one({})
            except AutoReconnect as e:
                self.assertEqual('not master', e.args[0])
            else:
                self.fail(
                    'Unacknowledged insert into secondary client %s should'
                    'have raised exception' % (client,))

            # Test direct connection to an arbiter
            client = MongoClient(
                arbiter_host,
                arbiter_port,
                serverSelectionTimeoutMS=self.server_selection_timeout,
                **kwargs)
            wait_until(lambda: arbiter_host == client.host,
                       "connect to arbiter")

            self.assertEqual(arbiter_port, client.port)
            self.assertFalse(client.is_primary)

            # See explanation above
            try:
                client.get_database(
                    "pymongo_test",
                    write_concern=WriteConcern(w=0)).test.insert_one({})
            except AutoReconnect as e:
                self.assertEqual('not master', e.args[0])
            else:
                self.fail(
                    'Unacknowledged insert into arbiter client %s should'
                    'have raised exception' % (client,))
Esempio n. 29
0
class TestReadPreference(HATestCase):

    # Speed up assertReadFrom() when no server is suitable.
    server_selection_timeout = 0.001

    def setUp(self):
        super(TestReadPreference, self).setUp()

        members = [
            # primary
            {'tags': {'dc': 'ny', 'name': 'primary'}},

            # secondary
            {'tags': {'dc': 'la', 'name': 'secondary'}, 'priority': 0},

            # other_secondary
            {'tags': {'dc': 'ny', 'name': 'other_secondary'}, 'priority': 0},
        ]

        res = ha_tools.start_replica_set(members)
        self.seed, self.name = res

        primary = ha_tools.get_primary()
        self.primary = partition_node(primary)
        self.primary_tags = ha_tools.get_tags(primary)
        # Make sure priority worked
        self.assertEqual('primary', self.primary_tags['name'])

        self.primary_dc = {'dc': self.primary_tags['dc']}

        secondaries = ha_tools.get_secondaries()

        (secondary, ) = [
            s for s in secondaries
            if ha_tools.get_tags(s)['name'] == 'secondary']

        self.secondary = partition_node(secondary)
        self.secondary_tags = ha_tools.get_tags(secondary)
        self.secondary_dc = {'dc': self.secondary_tags['dc']}

        (other_secondary, ) = [
            s for s in secondaries
            if ha_tools.get_tags(s)['name'] == 'other_secondary']

        self.other_secondary = partition_node(other_secondary)
        self.other_secondary_tags = ha_tools.get_tags(other_secondary)
        self.other_secondary_dc = {'dc': self.other_secondary_tags['dc']}

        self.c = MongoClient(
            self.seed,
            replicaSet=self.name,
            serverSelectionTimeoutMS=self.server_selection_timeout)
        self.w = len(self.c.secondaries) + 1
        self.db = self.c.get_database("pymongo_test",
                                      write_concern=WriteConcern(w=self.w))
        self.db.test.delete_many({})
        self.db.test.insert_many([{'foo': i} for i in xrange(10)])

        self.clear_ping_times()

    def set_ping_time(self, host, ping_time_seconds):
        ServerDescription._host_to_round_trip_time[host] = ping_time_seconds

    def clear_ping_times(self):
        ServerDescription._host_to_round_trip_time.clear()

    def test_read_preference(self):
        # We pass through four states:
        #
        #       1. A primary and two secondaries
        #       2. Primary down
        #       3. Primary up, one secondary down
        #       4. Primary up, all secondaries down
        #
        # For each state, we verify the behavior of PRIMARY,
        # PRIMARY_PREFERRED, SECONDARY, SECONDARY_PREFERRED, and NEAREST
        c = MongoClient(
            self.seed,
            replicaSet=self.name,
            serverSelectionTimeoutMS=self.server_selection_timeout)
        wait_until(lambda: c.primary, "discover primary")
        wait_until(lambda: len(c.secondaries) == 2, "discover secondaries")

        def assertReadFrom(member, *args, **kwargs):
            utils.assertReadFrom(self, c, member, *args, **kwargs)

        def assertReadFromAll(members, *args, **kwargs):
            utils.assertReadFromAll(self, c, members, *args, **kwargs)

        def unpartition_node(node):
            host, port = node
            return '%s:%s' % (host, port)

        # To make the code terser, copy hosts into local scope
        primary = self.primary
        secondary = self.secondary
        other_secondary = self.other_secondary

        bad_tag = {'bad': 'tag'}

        # 1. THREE MEMBERS UP -------------------------------------------------
        #       PRIMARY
        assertReadFrom(primary, PRIMARY)

        #       PRIMARY_PREFERRED
        # Trivial: mode and tags both match
        assertReadFrom(primary, PRIMARY_PREFERRED, self.primary_dc)

        # Secondary matches but not primary, choose primary
        assertReadFrom(primary, PRIMARY_PREFERRED, self.secondary_dc)

        # Chooses primary, ignoring tag sets
        assertReadFrom(primary, PRIMARY_PREFERRED, self.primary_dc)

        # Chooses primary, ignoring tag sets
        assertReadFrom(primary, PRIMARY_PREFERRED, bad_tag)
        assertReadFrom(primary, PRIMARY_PREFERRED, [bad_tag, {}])

        #       SECONDARY
        assertReadFromAll([secondary, other_secondary], SECONDARY)

        #       SECONDARY_PREFERRED
        assertReadFromAll([secondary, other_secondary], SECONDARY_PREFERRED)

        # Multiple tags
        assertReadFrom(secondary, SECONDARY_PREFERRED, self.secondary_tags)

        # Fall back to primary if it's the only one matching the tags
        assertReadFrom(primary, SECONDARY_PREFERRED, {'name': 'primary'})

        # No matching secondaries
        assertReadFrom(primary, SECONDARY_PREFERRED, bad_tag)

        # Fall back from non-matching tag set to matching set
        assertReadFromAll([secondary, other_secondary],
            SECONDARY_PREFERRED, [bad_tag, {}])

        assertReadFrom(other_secondary,
            SECONDARY_PREFERRED, [bad_tag, {'dc': 'ny'}])

        #       NEAREST
        self.clear_ping_times()

        assertReadFromAll([primary, secondary, other_secondary], NEAREST)

        assertReadFromAll([primary, other_secondary],
            NEAREST, [bad_tag, {'dc': 'ny'}])

        self.set_ping_time(primary, 0)
        self.set_ping_time(secondary, .03) # 30 ms
        self.set_ping_time(other_secondary, 10)

        # Nearest member, no tags
        assertReadFrom(primary, NEAREST)

        # Tags override nearness
        assertReadFrom(primary, NEAREST, {'name': 'primary'})
        assertReadFrom(secondary, NEAREST, self.secondary_dc)

        # Make secondary fast
        self.set_ping_time(primary, .03) # 30 ms
        self.set_ping_time(secondary, 0)

        assertReadFrom(secondary, NEAREST)

        # Other secondary fast
        self.set_ping_time(secondary, 10)
        self.set_ping_time(other_secondary, 0)

        assertReadFrom(other_secondary, NEAREST)

        self.clear_ping_times()

        assertReadFromAll([primary, other_secondary], NEAREST, [{'dc': 'ny'}])

        # 2. PRIMARY DOWN -----------------------------------------------------
        killed = ha_tools.kill_primary()

        # Let monitor notice primary's gone
        time.sleep(2 * self.heartbeat_frequency)

        #       PRIMARY
        assertReadFrom(None, PRIMARY)

        #       PRIMARY_PREFERRED
        # No primary, choose matching secondary
        assertReadFromAll([secondary, other_secondary], PRIMARY_PREFERRED)
        assertReadFrom(secondary, PRIMARY_PREFERRED, {'name': 'secondary'})

        # No primary or matching secondary
        assertReadFrom(None, PRIMARY_PREFERRED, bad_tag)

        #       SECONDARY
        assertReadFromAll([secondary, other_secondary], SECONDARY)

        # Only primary matches
        assertReadFrom(None, SECONDARY, {'name': 'primary'})

        # No matching secondaries
        assertReadFrom(None, SECONDARY, bad_tag)

        #       SECONDARY_PREFERRED
        assertReadFromAll([secondary, other_secondary], SECONDARY_PREFERRED)

        # Mode and tags both match
        assertReadFrom(secondary, SECONDARY_PREFERRED, {'name': 'secondary'})

        #       NEAREST
        self.clear_ping_times()

        assertReadFromAll([secondary, other_secondary], NEAREST)

        # 3. PRIMARY UP, ONE SECONDARY DOWN -----------------------------------
        ha_tools.restart_members([killed])
        ha_tools.wait_for_primary()

        ha_tools.kill_members([unpartition_node(secondary)], 2)
        time.sleep(5)
        ha_tools.wait_for_primary()
        time.sleep(2 * self.heartbeat_frequency)

        #       PRIMARY
        assertReadFrom(primary, PRIMARY)

        #       PRIMARY_PREFERRED
        assertReadFrom(primary, PRIMARY_PREFERRED)

        #       SECONDARY
        assertReadFrom(other_secondary, SECONDARY)
        assertReadFrom(other_secondary, SECONDARY, self.other_secondary_dc)

        # Only the down secondary matches
        assertReadFrom(None, SECONDARY, {'name': 'secondary'})

        #       SECONDARY_PREFERRED
        assertReadFrom(other_secondary, SECONDARY_PREFERRED)
        assertReadFrom(
            other_secondary, SECONDARY_PREFERRED, self.other_secondary_dc)

        # The secondary matching the tag is down, use primary
        assertReadFrom(primary, SECONDARY_PREFERRED, {'name': 'secondary'})

        #       NEAREST
        assertReadFromAll([primary, other_secondary], NEAREST)
        assertReadFrom(other_secondary, NEAREST, {'name': 'other_secondary'})
        assertReadFrom(primary, NEAREST, {'name': 'primary'})

        # 4. PRIMARY UP, ALL SECONDARIES DOWN ---------------------------------
        ha_tools.kill_members([unpartition_node(other_secondary)], 2)

        #       PRIMARY
        assertReadFrom(primary, PRIMARY)

        #       PRIMARY_PREFERRED
        assertReadFrom(primary, PRIMARY_PREFERRED)
        assertReadFrom(primary, PRIMARY_PREFERRED, self.secondary_dc)

        #       SECONDARY
        assertReadFrom(None, SECONDARY)
        assertReadFrom(None, SECONDARY, self.other_secondary_dc)
        assertReadFrom(None, SECONDARY, {'dc': 'ny'})

        #       SECONDARY_PREFERRED
        assertReadFrom(primary, SECONDARY_PREFERRED)
        assertReadFrom(primary, SECONDARY_PREFERRED, self.secondary_dc)
        assertReadFrom(primary, SECONDARY_PREFERRED, {'name': 'secondary'})
        assertReadFrom(primary, SECONDARY_PREFERRED, {'dc': 'ny'})

        #       NEAREST
        assertReadFrom(primary, NEAREST)
        assertReadFrom(None, NEAREST, self.secondary_dc)
        assertReadFrom(None, NEAREST, {'name': 'secondary'})

        # Even if primary's slow, still read from it
        self.set_ping_time(primary, 100)
        assertReadFrom(primary, NEAREST)
        assertReadFrom(None, NEAREST, self.secondary_dc)

        self.clear_ping_times()
Esempio n. 30
0
class MONGODB:
    # 数据库链接对象
    client = None
    # 某一个数据库操作对象
    db = None
    # 数据库中的集合操作对象
    collection = None
    '''
    连接mongodb数据库
    '''
    def connect(self):
        user = "******"
        pwd = "qiangbi123"
        server = "115.28.161.44"
        port = '27017'
        # 表示 用于授权的
        db_name = "admin"
        uri = 'mongodb://' + user + ':' + pwd + '@' + server + ':' + port + '/' + db_name
        try:
            self.client = MongoClient(uri)
        except ServerSelectionTimeoutError as ex:
            pass

    '''
    获取数据库操作对象 dbname  mxmanage
    '''

    def getdb(self, dbname):
        self.db = self.client.get_database(dbname)

    '''
    获取指定集合的操作对象
    '''

    def getcollection(self, coll_name):
        self.collection = getattr(self.db, coll_name)

    '''
    多条数据插入集合操作
    '''

    def insert(self, data):
        self.collection.insert(data)

    '''
    单条数据插入集合
    '''

    def insertOne(self, data):
        self.collection.insert_one(data)

    '''
    集合中数据删除操作
    '''

    def delete(self):
        pass

    '''
    集合中数据更新操作
    condition 要更新的查询条件
    data 要更新成为什么字段
    '''

    def updateOne(self, condition, data):
        # 修改聚集内的记录
        # self.collection.update({"UserName": "******"}, {"$set": {'AccountID': random.randint(20, 50)}})
        # db.test.update_one({'x': 1}, {'$inc': {'x': 3}})
        # print(data)
        try:
            self.collection.update_one(condition, data)
        except Exception as e:
            pass

    '''
    查询一个
    '''

    def findOne(self, condition):
        try:
            return self.collection.find_one(condition)
        except ServerSelectionTimeoutError as ex:
            return {}
        except Exception as e:
            return {}

    '''
    批量查找
    '''

    def findMany(self, queue, start, count):
        # exclude 某个字段
        # db.Info.find_one({'$and': [{'$text': {'$search': "Hello"}, 'Username': '******'}]},
        #                  {"Expenses.description": 1, "fullName_normal": 1})
        cursor = self.collection.find().skip(start).limit(count)
        print(start)
        print(count)
        print(cursor)
        try:
            for data in cursor:
                queue.put(data)
            if queue.qsize() > 0:
                return True
            else:
                return False
        except Exception as ex:
            # print("failed get data" + ex.message)
            pass

    '''
    统计字段信息
    '''

    def count(self):
        return self.collection.count()

    def close(self):
        self.client.close()
Esempio n. 31
0
class Connection:
    _graph_map: dict[str, Connection] = {}
    _initialized_map: dict[str, bool] = {}

    def __new__(cls: type[Connection], graph_name: str) -> Connection:
        if not cls._graph_map.get(graph_name):
            cls._graph_map[graph_name] = super(Connection, cls).__new__(cls)
        return cls._graph_map[graph_name]

    def __init__(self: Connection, graph_name: str) -> None:
        if self.__class__._initialized_map.get(graph_name):
            return
        self._graph_name: str = graph_name
        self._url: Optional[str] = None
        self._client: Optional[MongoClient] = None
        self._database: Optional[Database] = None
        self._collections: dict[str, Collection] = {}
        self._connection_callbacks: dict[str, ConnectedCallback] = {}
        self._connected: bool = False
        self.__class__._initialized_map[graph_name] = True
        return None

    @property
    def graph_name(self: Connection) -> str:
        return self._graph_name

    @property
    def url(self: Connection) -> str:
        if self._url:
            return self._url
        return self._generate_default_url()

    def set_url(self: Connection, url: str) -> None:
        self._url = url

    def _generate_default_url(self: Connection) -> str:
        if self.graph_name == 'default':
            user_url = uconf()['pymongo.url'] or uconf()['pymongo.default.url']
        else:
            user_url = uconf()[f'pymongo.{self.graph_name}.url']
        if user_url is not None:
            self._url = user_url
            return user_url
        base = 'mongodb://localhost:27017/'
        proj = camelize(parameterize(path.basename(getcwd()))).lower()
        self._url = base + proj
        return self._url

    @property
    def client(self: Connection) -> MongoClient:
        if self._client is not None:
            return self._client
        self.connect()
        return self._client

    @property
    def database(self: Connection) -> Database:
        if self._database is not None:
            return self._database
        self.connect()
        return self._database

    def connect(self: Connection) -> None:
        self._client = MongoClient(self.url)
        self._database = self._client.get_database()
        self._connected = True
        for name, callback in self._connection_callbacks.items():
            self._call_callback(name, callback)

    def disconnect(self: Connection) -> None:
        if self._client is not None:
            self._client.close()
            self._client = None
            self._database = None
            self._collections = {}
            self._connected = False

    @property
    def connected(self: Collection) -> bool:
        return self._connected

    def collection(self: Connection,
                   name: str,
                   index_keys: list[str] | None = None) -> Collection:
        if self._collections.get(name) is not None:
            return self._collections[name]
        coll = self.database.get_collection(name)
        if index_keys is not None:
            ukeys = [(k, 1) for k in index_keys]
            coll.create_index(ukeys, name='ref', unique=True)
        self._collections[name] = coll
        return coll

    def add_connected_callback(self: Connection, name: str,
                               callback: ConnectedCallback) -> None:
        self._connection_callbacks[name] = callback
        if self._client:
            self._call_callback(name, callback)

    def _call_callback(self: Connection, name: str,
                       callback: ConnectedCallback) -> None:
        callback(self.collection(name))

    def collection_from(self: Connection, cls: type[T]) -> Collection:
        coll_name = cls.pconf.collection_name
        return self.collection(coll_name)

    default: ClassVar[Connection]

    @classmethod
    def get_collection(cls: type[Connection], pmcls: type[T]) -> Collection:
        graph = pmcls.cdef.jconf.cgraph.name
        connection = Connection(graph)
        return connection.collection_from(pmcls)

    @classmethod
    def from_class(cls: type[Connection], pmcls: type[T]) -> Connection:
        return Connection(pmcls.cdef.jconf.cgraph.name)
Esempio n. 32
0
class Storage:
    def __init__(self, host, db):
        self.client = MongoClient(host)
        self.db = self.client.get_database(db)
        self.event_data = self.db.get_collection("event_data")

    def get_event_by_password(self, password):

        return self.event_data.find_one({"password": password})

    def get_event_by_chat_group(self, chat_group):

        return self.event_data.find_one({"group_chat_id": chat_group})

    def set_manager(self, chat_id, password):

        event = self.get_event_by_password(password)
        logger.info(f"event {event['manager_id']}")
        if event["manager_id"] == "":
            logger.info("manager_id")
            self.event_data.update({"password": password},
                                   {'$set': {
                                       "manager_id": chat_id
                                   }})
            return True

        return False

    def set_user_id(self, event_password, chat_id):

        event = self.get_event_by_password(event_password)

        if chat_id not in event["users_chat_id"]:
            self.event_data.update_one({"password": event_password},
                                       {"$push": {
                                           "users_chat_id": chat_id
                                       }})

    def add_item_to_list(self, chat_id, item):

        event = self.get_event_by_chat_group(chat_id)

        if item not in event["items"]:
            self.event_data.update_one({"group_chat_id": chat_id},
                                       {"$push": {
                                           "items": item
                                       }})
            return True
        return False

    def remove_item_from_list(self, chat_id, item):

        logger.info(f"remove_item_from_list {item}")
        event = self.get_event_by_chat_group(chat_id)

        if item in event["items"]:
            self.event_data.update_one({"group_chat_id": chat_id},
                                       {"$pull": {
                                           "items": item
                                       }})
            return True
        return False

    def get_items(self, chat_id):
        event = self.get_event_by_chat_group(chat_id)
        return event['items']

    def get_items_by_password(self, password):
        event = self.get_event_by_password(password)
        return event['items']

    def get_taken_items(self, password):
        event = self.get_event_by_password(password)
        return event['taken_items']

    def get_remaining_items(self, password):
        logger.info(f"get_remaining_items: {password}")
        remaining_items = [
            item for item in self.get_items_by_password(password)
            if item not in self.get_taken_items(password)
        ]
        logger.info(f"remaining_items: {remaining_items}")

        return remaining_items

    def set_taken_item(self, password, item):

        if item in self.get_remaining_items(password):
            self.event_data.update_one({"password": password},
                                       {"$push": {
                                           "taken_items": item
                                       }})
            return True
        return False

    def set_costs(self, password, cost, chat_id):

        event = self.get_event_by_password(password)
        self.event_data.update_one(
            {"password": password},
            {"$push": {
                "expenses": [str(chat_id), cost]
            }})
        if chat_id not in event["responders"]:
            self.event_data.update_one({"password": password},
                                       {"$push": {
                                           "responders": chat_id
                                       }})

    def get_manager_id(self, password, chat_id):

        event = self.get_event_by_password(password)
        logger.info(f"event[manager_id] {event['manager_id']}")
        if event["manager_id"] == chat_id:
            logger.info(f"chat_id {chat_id}")
            return True
        return False

    def get_all_clumsys(self, password):

        event = self.get_event_by_password(password)
        logger.info(
            f"users_chat_id {event['users_chat_id']}, responders: {event['responders']}"
        )
        clumsys = [
            clumsy for clumsy in event["users_chat_id"]
            if clumsy not in event["responders"]
        ]
        logger.info(f"clumsys {clumsys}")
        return clumsys

    def set_balance(self, password):

        event = self.get_event_by_password(password)
        sum = 0
        logger.info(event["expenses"])
        for i, member in enumerate(event["expenses"]):
            sum += int(member[1])
            logger.info(f"member sum {sum}")

        avg = sum / len(event["users_chat_id"])

        for member in event["users_chat_id"]:
            sum = 0
            for member_ex in event["expenses"]:
                if member == int(member_ex[0]):
                    sum += int(member_ex[1])
                    logger.info(f"sum: {sum}")

            logger.info(f"sum: {sum} +-{avg}")
            self.event_data.update_one(
                {"password": password},
                {"$push": {
                    "balance": [member, sum - avg]
                }})

    def get_balance(self, password, chat_id):
        event = self.get_event_by_password(password)
        for member in event["balance"]:
            logger.info(member)
            if member[0] == chat_id:
                logger.info(member)
                return member[1]

    def set_who_paid(self, password, chat_id):

        event = self.get_event_by_password(password)
        self.event_data.update_one({"password": password},
                                   {"$push": {
                                       "who_paid": chat_id
                                   }})

    def get_cheaps(self, password):

        event = self.get_event_by_password(password)
        cheaps = [
            cheap for cheap in event["users_chat_id"]
            if cheap not in event["who_paid"]
        ]
        return cheaps

    def set_password(self, chat_id, password):
        logger.info(f"> set_password #{chat_id} #{password}")

        self.event_data.replace_one({"group_chat_id": chat_id}, {
            "group_chat_id": chat_id,
            "manager_id": "",
            "password": password,
            "items": [],
            "taken_items": [],
            "users_chat_id": [],
            "expenses": [],
            "balance": [],
            "responders": [],
            "who_paid": []
        },
                                    upsert=True)
Esempio n. 33
0
import random
import feedparser
import dict_questions
from pymongo.mongo_client import MongoClient
from datetime import datetime

client = MongoClient()
db = client.get_database("Teachild")
db.drop_collection('Parent')
db.drop_collection('Child')
db.drop_collection('Task')
parent_collection = db.get_collection('Parent')
child_collection = db.get_collection('Child')
task_collection = db.get_collection('Task')
'''
#0-39
dict_questions.dict_questions_math_level1
#0-29
dict_questions.dict_questions_math_level2
'''
NUMBER_Q_IN_TASK = 2


def make_task_for_level(dictt: dict) -> dict:
    l = []
    for i in range(1, NUMBER_Q_IN_TASK + 1):
        l.append(str(i))

    return dict(zip(l, random.choices(dictt, k=NUMBER_Q_IN_TASK)))

Esempio n. 34
0
import bson, json, datetime
from bson.objectid import ObjectId
from bson.json_util import loads, dumps

jobtemplate = {
    "url": None,
    "name": None,
    "target_commit": None,
    "target_commit_date": None,
    "status": "unclaimed",
    "worker": None,
    "source": None
}

dbclient = MongoClient()
db = dbclient.get_database(name='ca-core')
jobcollection = db.get_collection('jobs')
workercollection = db.get_collection('workers')

### workercollection


def get_worker_state(workername):
    try:
        result = workercollection.find_one({"name": workername})
        if result is None:
            return 404
        return result["state"]
    except Exception as e:
        log.err(e)
        raise e
Esempio n. 35
0
import pandas as pd
import lxml.html as html
import requests
import numpy as np
from pymongo.mongo_client import MongoClient
from pymongo.collection import Collection

client = MongoClient()
db = client.get_database("dota")
abilities: Collection = db.get_collection("abilities")

abilities.update_many({}, {
    "$unset": {
        "scepter_pickup_rate": 1,
        "scepter_win_wo": 1,
        "scepter_win_w": 1,
        "shard_pickup_rate": 1,
        "shard_win_wo": 1,
        "shard_win_w": 1
    }
})

format_pct = lambda xs: [
    None if x == '-' else float(x.strip().strip('%')) / 100 for x in xs
]
res = requests.get("https://windrun.io/ability-shifts")
doc = html.fromstring(res.content)
ids = [
    int(x.split("/")[2])
    for x in doc.xpath('//*[@id="ability-shift-stats"]/tbody/tr/td[2]/a/@href')
]
Esempio n. 36
0
from pymongo.mongo_client import MongoClient
client = MongoClient(
    "mongodb://*****:*****@aws-us-east-1-portal.25.dblayer.com:28209/ascension?ssl=true&ssl_cert_reqs=CERT_NONE&authMechanism=SCRAM-SHA-1"
)
db = client.get_database()
collection = db['metrics']
cursor = collection.find({})
print(cursor)
output = []
for doc in cursor:
    output.append(doc)
print(output)
Esempio n. 37
0
class TestReadPreference(HATestCase):

    # Speed up assertReadFrom() when no server is suitable.
    server_selection_timeout = 0.001

    def setUp(self):
        super(TestReadPreference, self).setUp()

        members = [
            # primary
            {'tags': {'dc': 'ny', 'name': 'primary'}},

            # secondary
            {'tags': {'dc': 'la', 'name': 'secondary'}, 'priority': 0},

            # other_secondary
            {'tags': {'dc': 'ny', 'name': 'other_secondary'}, 'priority': 0},
        ]

        res = ha_tools.start_replica_set(members)
        self.seed, self.name = res

        primary = ha_tools.get_primary()
        self.primary = partition_node(primary)
        self.primary_tags = ha_tools.get_tags(primary)
        # Make sure priority worked
        self.assertEqual('primary', self.primary_tags['name'])

        self.primary_dc = {'dc': self.primary_tags['dc']}

        secondaries = ha_tools.get_secondaries()

        (secondary, ) = [
            s for s in secondaries
            if ha_tools.get_tags(s)['name'] == 'secondary']

        self.secondary = partition_node(secondary)
        self.secondary_tags = ha_tools.get_tags(secondary)
        self.secondary_dc = {'dc': self.secondary_tags['dc']}

        (other_secondary, ) = [
            s for s in secondaries
            if ha_tools.get_tags(s)['name'] == 'other_secondary']

        self.other_secondary = partition_node(other_secondary)
        self.other_secondary_tags = ha_tools.get_tags(other_secondary)
        self.other_secondary_dc = {'dc': self.other_secondary_tags['dc']}

        self.c = MongoClient(
            self.seed,
            replicaSet=self.name,
            serverSelectionTimeoutMS=self.server_selection_timeout)
        self.w = len(self.c.secondaries) + 1
        self.db = self.c.get_database("pymongo_test",
                                      write_concern=WriteConcern(w=self.w))
        self.db.test.delete_many({})
        self.db.test.insert_many([{'foo': i} for i in xrange(10)])

        self.clear_ping_times()

    def set_ping_time(self, host, ping_time_seconds):
        ServerDescription._host_to_round_trip_time[host] = ping_time_seconds

    def clear_ping_times(self):
        ServerDescription._host_to_round_trip_time.clear()

    def test_read_preference(self):
        # We pass through four states:
        #
        #       1. A primary and two secondaries
        #       2. Primary down
        #       3. Primary up, one secondary down
        #       4. Primary up, all secondaries down
        #
        # For each state, we verify the behavior of PRIMARY,
        # PRIMARY_PREFERRED, SECONDARY, SECONDARY_PREFERRED, and NEAREST
        c = MongoClient(
            self.seed,
            replicaSet=self.name,
            serverSelectionTimeoutMS=self.server_selection_timeout)
        wait_until(lambda: c.primary, "discover primary")
        wait_until(lambda: len(c.secondaries) == 2, "discover secondaries")

        def assertReadFrom(member, *args, **kwargs):
            utils.assertReadFrom(self, c, member, *args, **kwargs)

        def assertReadFromAll(members, *args, **kwargs):
            utils.assertReadFromAll(self, c, members, *args, **kwargs)

        def unpartition_node(node):
            host, port = node
            return '%s:%s' % (host, port)

        # To make the code terser, copy hosts into local scope
        primary = self.primary
        secondary = self.secondary
        other_secondary = self.other_secondary

        bad_tag = {'bad': 'tag'}

        # 1. THREE MEMBERS UP -------------------------------------------------
        #       PRIMARY
        assertReadFrom(primary, PRIMARY)

        #       PRIMARY_PREFERRED
        # Trivial: mode and tags both match
        assertReadFrom(primary, PRIMARY_PREFERRED, self.primary_dc)

        # Secondary matches but not primary, choose primary
        assertReadFrom(primary, PRIMARY_PREFERRED, self.secondary_dc)

        # Chooses primary, ignoring tag sets
        assertReadFrom(primary, PRIMARY_PREFERRED, self.primary_dc)

        # Chooses primary, ignoring tag sets
        assertReadFrom(primary, PRIMARY_PREFERRED, bad_tag)
        assertReadFrom(primary, PRIMARY_PREFERRED, [bad_tag, {}])

        #       SECONDARY
        assertReadFromAll([secondary, other_secondary], SECONDARY)

        #       SECONDARY_PREFERRED
        assertReadFromAll([secondary, other_secondary], SECONDARY_PREFERRED)

        # Multiple tags
        assertReadFrom(secondary, SECONDARY_PREFERRED, self.secondary_tags)

        # Fall back to primary if it's the only one matching the tags
        assertReadFrom(primary, SECONDARY_PREFERRED, {'name': 'primary'})

        # No matching secondaries
        assertReadFrom(primary, SECONDARY_PREFERRED, bad_tag)

        # Fall back from non-matching tag set to matching set
        assertReadFromAll([secondary, other_secondary],
            SECONDARY_PREFERRED, [bad_tag, {}])

        assertReadFrom(other_secondary,
            SECONDARY_PREFERRED, [bad_tag, {'dc': 'ny'}])

        #       NEAREST
        self.clear_ping_times()

        assertReadFromAll([primary, secondary, other_secondary], NEAREST)

        assertReadFromAll([primary, other_secondary],
            NEAREST, [bad_tag, {'dc': 'ny'}])

        self.set_ping_time(primary, 0)
        self.set_ping_time(secondary, .03) # 30 ms
        self.set_ping_time(other_secondary, 10)

        # Nearest member, no tags
        assertReadFrom(primary, NEAREST)

        # Tags override nearness
        assertReadFrom(primary, NEAREST, {'name': 'primary'})
        assertReadFrom(secondary, NEAREST, self.secondary_dc)

        # Make secondary fast
        self.set_ping_time(primary, .03) # 30 ms
        self.set_ping_time(secondary, 0)

        assertReadFrom(secondary, NEAREST)

        # Other secondary fast
        self.set_ping_time(secondary, 10)
        self.set_ping_time(other_secondary, 0)

        assertReadFrom(other_secondary, NEAREST)

        self.clear_ping_times()

        assertReadFromAll([primary, other_secondary], NEAREST, [{'dc': 'ny'}])

        # 2. PRIMARY DOWN -----------------------------------------------------
        killed = ha_tools.kill_primary()

        # Let monitor notice primary's gone
        time.sleep(2 * self.heartbeat_frequency)

        #       PRIMARY
        assertReadFrom(None, PRIMARY)

        #       PRIMARY_PREFERRED
        # No primary, choose matching secondary
        assertReadFromAll([secondary, other_secondary], PRIMARY_PREFERRED)
        assertReadFrom(secondary, PRIMARY_PREFERRED, {'name': 'secondary'})

        # No primary or matching secondary
        assertReadFrom(None, PRIMARY_PREFERRED, bad_tag)

        #       SECONDARY
        assertReadFromAll([secondary, other_secondary], SECONDARY)

        # Only primary matches
        assertReadFrom(None, SECONDARY, {'name': 'primary'})

        # No matching secondaries
        assertReadFrom(None, SECONDARY, bad_tag)

        #       SECONDARY_PREFERRED
        assertReadFromAll([secondary, other_secondary], SECONDARY_PREFERRED)

        # Mode and tags both match
        assertReadFrom(secondary, SECONDARY_PREFERRED, {'name': 'secondary'})

        #       NEAREST
        self.clear_ping_times()

        assertReadFromAll([secondary, other_secondary], NEAREST)

        # 3. PRIMARY UP, ONE SECONDARY DOWN -----------------------------------
        ha_tools.restart_members([killed])
        ha_tools.wait_for_primary()

        ha_tools.kill_members([unpartition_node(secondary)], 2)
        time.sleep(5)
        ha_tools.wait_for_primary()
        time.sleep(2 * self.heartbeat_frequency)

        #       PRIMARY
        assertReadFrom(primary, PRIMARY)

        #       PRIMARY_PREFERRED
        assertReadFrom(primary, PRIMARY_PREFERRED)

        #       SECONDARY
        assertReadFrom(other_secondary, SECONDARY)
        assertReadFrom(other_secondary, SECONDARY, self.other_secondary_dc)

        # Only the down secondary matches
        assertReadFrom(None, SECONDARY, {'name': 'secondary'})

        #       SECONDARY_PREFERRED
        assertReadFrom(other_secondary, SECONDARY_PREFERRED)
        assertReadFrom(
            other_secondary, SECONDARY_PREFERRED, self.other_secondary_dc)

        # The secondary matching the tag is down, use primary
        assertReadFrom(primary, SECONDARY_PREFERRED, {'name': 'secondary'})

        #       NEAREST
        assertReadFromAll([primary, other_secondary], NEAREST)
        assertReadFrom(other_secondary, NEAREST, {'name': 'other_secondary'})
        assertReadFrom(primary, NEAREST, {'name': 'primary'})

        # 4. PRIMARY UP, ALL SECONDARIES DOWN ---------------------------------
        ha_tools.kill_members([unpartition_node(other_secondary)], 2)

        #       PRIMARY
        assertReadFrom(primary, PRIMARY)

        #       PRIMARY_PREFERRED
        assertReadFrom(primary, PRIMARY_PREFERRED)
        assertReadFrom(primary, PRIMARY_PREFERRED, self.secondary_dc)

        #       SECONDARY
        assertReadFrom(None, SECONDARY)
        assertReadFrom(None, SECONDARY, self.other_secondary_dc)
        assertReadFrom(None, SECONDARY, {'dc': 'ny'})

        #       SECONDARY_PREFERRED
        assertReadFrom(primary, SECONDARY_PREFERRED)
        assertReadFrom(primary, SECONDARY_PREFERRED, self.secondary_dc)
        assertReadFrom(primary, SECONDARY_PREFERRED, {'name': 'secondary'})
        assertReadFrom(primary, SECONDARY_PREFERRED, {'dc': 'ny'})

        #       NEAREST
        assertReadFrom(primary, NEAREST)
        assertReadFrom(None, NEAREST, self.secondary_dc)
        assertReadFrom(None, NEAREST, {'name': 'secondary'})

        # Even if primary's slow, still read from it
        self.set_ping_time(primary, 100)
        assertReadFrom(primary, NEAREST)
        assertReadFrom(None, NEAREST, self.secondary_dc)

        self.clear_ping_times()
Esempio n. 38
0
import sys
import datetime
from pprint import pprint

import feedparser
import pymongo
from pymongo.mongo_client import MongoClient

arg = sys.argv
new_rss = feedparser.parse(arg[1])

client = MongoClient()
db = client.get_database("my_rss")
all_rss = db.get_collection('all_rss')

ft = new_rss.feed.title
for e in new_rss.entries:
    tt = e.published_parsed
    dt = datetime.datetime(*tt[:6],
                           tzinfo=datetime.timezone.utc) if tt else None
    d = {
        'published_parsed': dt,
        'title': e.title,
        'link': e.link,
        'content': e.description,
        'feed_title': ft
    }
    result = all_rss.replace_one({'link': e.link}, d, upsert=True)

print('Feed added.')
Esempio n. 39
0
    def test_secondary_connection(self):
        self.c = MongoClient(
            self.seed,
            replicaSet=self.name,
            serverSelectionTimeoutMS=self.server_selection_timeout)
        wait_until(lambda: len(self.c.secondaries), "discover secondary")
        # Wait for replication...
        w = len(self.c.secondaries) + 1
        db = self.c.get_database("pymongo_test",
                                 write_concern=WriteConcern(w=w))

        db.test.delete_many({})
        db.test.insert_one({'foo': 'bar'})

        # Test direct connection to a primary or secondary
        primary_host, primary_port = ha_tools.get_primary().split(':')
        primary_port = int(primary_port)
        (secondary_host,
         secondary_port) = ha_tools.get_secondaries()[0].split(':')
        secondary_port = int(secondary_port)
        arbiter_host, arbiter_port = ha_tools.get_arbiters()[0].split(':')
        arbiter_port = int(arbiter_port)

        # MongoClient succeeds no matter the read preference
        for kwargs in [
            {'read_preference': PRIMARY},
            {'read_preference': PRIMARY_PREFERRED},
            {'read_preference': SECONDARY},
            {'read_preference': SECONDARY_PREFERRED},
            {'read_preference': NEAREST},
        ]:
            client = MongoClient(
                primary_host,
                primary_port,
                serverSelectionTimeoutMS=self.server_selection_timeout,
                **kwargs)
            wait_until(lambda: primary_host == client.host,
                       "connect to primary")

            self.assertEqual(primary_port, client.port)
            self.assertTrue(client.is_primary)

            # Direct connection to primary can be queried with any read pref
            self.assertTrue(client.pymongo_test.test.find_one())

            client = MongoClient(
                secondary_host,
                secondary_port,
                serverSelectionTimeoutMS=self.server_selection_timeout,
                **kwargs)
            wait_until(lambda: secondary_host == client.host,
                       "connect to secondary")

            self.assertEqual(secondary_port, client.port)
            self.assertFalse(client.is_primary)

            # Direct connection to secondary can be queried with any read pref
            # but PRIMARY
            if kwargs.get('read_preference') != PRIMARY:
                self.assertTrue(client.pymongo_test.test.find_one())
            else:
                self.assertRaises(
                    AutoReconnect, client.pymongo_test.test.find_one)

            # Since an attempt at an acknowledged write to a secondary from a
            # direct connection raises AutoReconnect('not master'), MongoClient
            # should do the same for unacknowledged writes.
            try:
                client.get_database(
                    "pymongo_test",
                    write_concern=WriteConcern(w=0)).test.insert_one({})
            except AutoReconnect as e:
                self.assertEqual('not master', e.args[0])
            else:
                self.fail(
                    'Unacknowledged insert into secondary client %s should'
                    'have raised exception' % (client,))

            # Test direct connection to an arbiter
            client = MongoClient(
                arbiter_host,
                arbiter_port,
                serverSelectionTimeoutMS=self.server_selection_timeout,
                **kwargs)
            wait_until(lambda: arbiter_host == client.host,
                       "connect to arbiter")

            self.assertEqual(arbiter_port, client.port)
            self.assertFalse(client.is_primary)
            
            # See explanation above
            try:
                client.get_database(
                    "pymongo_test",
                    write_concern=WriteConcern(w=0)).test.insert_one({})
            except AutoReconnect as e:
                self.assertEqual('not master', e.args[0])
            else:
                self.fail(
                    'Unacknowledged insert into arbiter client %s should'
                    'have raised exception' % (client,))
Esempio n. 40
0
def worker(workerthread, numofsymbols):
    try:
        #Create an initial value for each security
        last_value = []
        for i in range(0, numofsymbols):
            last_value.append(round(random.uniform(1, 100), 2))

        #Wait until MongoDB Server is online and ready for data
        while True:
            print('Checking MongoDB Connection')
            if checkmongodbconnection() == False:
                print('Problem connecting to MongoDB, sleeping 10 seconds')
                time.sleep(10)
            else:
                break
        print('Successfully connected to MongoDB')

        c = MongoClient(MONGO_URI, server_api=ServerApi("1", strict=False))
        db = c.get_database(name=args.database)
        txtime = dt.now()
        txtime_end = txtime + timedelta(minutes=args.duration)
        if args.drop:
            print('\nDropping collection ' + args.collection + '\n')
            db.drop_collection(args.collection)
        if args.timeseries:
            collection = db.create_collection(name=args.collection,
                                              timeseries={
                                                  "timeField": "tx_time",
                                                  "granularity": "seconds"
                                              })
            print('Create collection result=' + collection.full_name)
        print(
            'Data Generation Summary:\n{:<12} {:<12}\n{:<12} {:<12}\n{:<12} {:<12}'
            .format('# symbols', args.symbols, 'Database', args.database,
                    'Collection', args.collection))
        print('\n{:<8}  {:<50}'.format('Symbol', 'Company Name'))
        for x in range(len(company_name)):
            print('{:<8}  {:<50}'.format(company_symbol[x], company_name[x]))
        print('\n{:<12} {:<12}'.format('Start time',
                                       txtime.strftime('%Y-%m-%d %H:%M:%S')))
        if args.duration:
            print('{:<12} {:<12}\n'.format(
                'End time', txtime_end.strftime('%Y-%m-%d %H:%M:%S')))
        else:
            print('No end time - run until user stop (control-Z)\n\n')
        counter = 0
        bContinue = True
        while bContinue:
            for i in range(0, numofsymbols):
                #Get the last value of this particular security
                x = getvalue(last_value[i])
                last_value[i] = x
                try:
                    if args.AsString:
                        result = db[args.collection].insert_one({
                            'company_symbol':
                            company_symbol[i],
                            'company_name':
                            company_name[i],
                            'price':
                            x,
                            'tx_time':
                            txtime.strftime('%Y-%m-%dT%H:%M:%SZ')
                        })
                    else:
                        result = db[args.collection].insert_one({
                            'company_symbol':
                            company_symbol[i],
                            'company_name':
                            company_name[i],
                            'price':
                            x,
                            'tx_time':
                            txtime
                        })
                    counter = counter + 1
                    if counter % 100 == 0:
                        if args.duration > 0:
                            print('Generated ' + str(counter) +
                                  ' samples ({0:.0%})'.format(
                                      counter /
                                      (numofsymbols * args.duration * 60)))
                        else:
                            print('Generated ' + str(counter))
                    if args.duration > 0:
                        if txtime > txtime_end:
                            bContinue = False
                            continue
                except Exception as e:
                    print("error: " + str(e))
            txtime += timedelta(seconds=1)
        duration = txtime - dt.now()
        print('\nFinished - ' + str(duration).split('.')[0])
    except:
        print('Unexpected error:', sys.exc_info()[0])
        raise
Esempio n. 41
0
    def test_ship_of_theseus(self):
        c = MongoClient(
            self.seed,
            replicaSet=self.name,
            serverSelectionTimeoutMS=self.server_selection_timeout)
        db = c.get_database(
            "pymongo_test",
            write_concern=WriteConcern(w=len(c.secondaries) + 1))
        db.test.insert_one({})
        find_one = db.test.find_one

        primary = ha_tools.get_primary()
        secondary1 = ha_tools.get_random_secondary()

        new_hosts = []
        for i in range(3):
            new_hosts.append(ha_tools.add_member())

            # RS closes all connections after reconfig.
            for j in xrange(30):
                try:
                    if ha_tools.get_primary():
                        break
                except (ConnectionFailure, OperationFailure):
                    pass

                time.sleep(1)
            else:
                self.fail("Couldn't recover from reconfig")

        # Wait for new members to join.
        for _ in xrange(120):
            if ha_tools.get_primary() and len(ha_tools.get_secondaries()) == 4:
                break

            time.sleep(1)
        else:
            self.fail("New secondaries didn't join")

        ha_tools.kill_members([primary, secondary1], 9)
        time.sleep(5)

        wait_until(lambda: (ha_tools.get_primary()
                            and len(ha_tools.get_secondaries()) == 2),
                   "fail over",
                   timeout=30)

        time.sleep(2 * self.heartbeat_frequency)

        # No error.
        find_one()
        find_one(read_preference=SECONDARY)

        # All members down.
        ha_tools.kill_members(new_hosts, 9)
        self.assertRaises(
            ConnectionFailure,
            find_one, read_preference=SECONDARY)

        ha_tools.restart_members(new_hosts)

        # Should be able to reconnect to set even though original seed
        # list is useless. Use SECONDARY so we don't have to wait for
        # the election, merely for the client to detect members are up.
        time.sleep(2 * self.heartbeat_frequency)
        find_one(read_preference=SECONDARY)

        # Kill new members and switch back to original two members.
        ha_tools.kill_members(new_hosts, 9)
        self.assertRaises(
            ConnectionFailure,
            find_one, read_preference=SECONDARY)

        ha_tools.restart_members([primary, secondary1])

        # Wait for members to figure out they're secondaries.
        wait_until(lambda: len(ha_tools.get_secondaries()) == 2,
                   "detect two secondaries",
                   timeout=30)

        # Should be able to reconnect to set again.
        time.sleep(2 * self.heartbeat_frequency)
        find_one(read_preference=SECONDARY)