示例#1
0
def save_capped_db(ops, coll):
    try:
        result = coll.bulk_write(ops)
    except Exception as e:
        log.exception("Error saving CMC tickers. %s", str(e))
        db = get_db()
        stats = db.command("collstats",coll.name)

        if stats['capped'] == False:
            return False

        max_size = stats['maxSize']

        # Capped collection full. Drop and re-create w/ indexes.
        if stats['size'] / max_size > 0.9:
            from pymongo import IndexModel, ASCENDING, DESCENDING

            log.info("Capped collection > 90% full. Dropping and recreating.")
            name = coll.name
            coll.drop()

            db.create_collection(name, capped=True, size=max_size)
            idx1 = IndexModel( [("symbol", ASCENDING)], name="symbol")
            idx2 = IndexModel( [("date", DESCENDING)], name="date_-1")
            db[name].create_indexes([idx1, idx2])

            log.info("Retrying bulk_write")
            try:
                result = db[name].bulk_write(ops)
            except Exception as e:
                log.exception("Error saving CMC tickers. %s", str(e))
                return False
        else:
            log.error("Size is <90% max. Unknown error.")
    return True
示例#2
0
def create_indexes() -> None:
    ticker_ts_index = IndexModel([('ticker', ASCENDING), ('time_series.time', ASCENDING)], unique=True)
    name_index = IndexModel([('name', ASCENDING)], unique=True)
    cusip_index = IndexModel([('cusip', ASCENDING)], unique=True)
    isin_index = IndexModel([('isin', ASCENDING)], unique=True)
    sedol_index = IndexModel([('sedol', ASCENDING)], unique=True)
    _collection.create_indexes([ticker_ts_index, name_index, cusip_index, isin_index, sedol_index])
示例#3
0
class DbReservation(BaseDocument):
    __collection_name__ = 'reservation'
    __indexes__ = [
        IndexModel([('end', ASCENDING), ('start', ASCENDING)]),
        IndexModel([('items', ASCENDING), ('end', ASCENDING),
                    ('start', ASCENDING)]),
        IndexModel([('user_id', ASCENDING), ('end', ASCENDING),
                    ('start', ASCENDING)]),
        IndexModel([('user_id', ASCENDING), ('items', ASCENDING),
                    ('end', ASCENDING), ('start', ASCENDING)]),
    ]

    id: UUID = Field(..., alias='_id')
    type: ReservationType = Field(...)
    name: str = Field(...)

    start: date = Field(...)
    end: date = Field(...)

    user_id: str = Field(...)
    team_id: Optional[str] = None

    contact: str = Field(...)

    items: List[UUID] = Field(...)
示例#4
0
 def _create_indexes(self):
     uid_unique_index = IndexModel("uid", unique=True)
     creator_index = IndexModel("splash_md.creator")
     sort_index = IndexModel([("splash_md.last_edit", DESCENDING),
                              ("uid", DESCENDING)])
     self._collection.create_indexes(
         [uid_unique_index, creator_index, sort_index])
示例#5
0
 def __create_index(self, mongo_db_name, col_name):
     self.client[mongo_db_name][col_name].create_indexes([
         IndexModel([("insertion_timestamp", pymongo.ASCENDING)]),
         IndexModel([("level", pymongo.ASCENDING)]),
         IndexModel([("caller", pymongo.ASCENDING)]),
         IndexModel([("pid", pymongo.ASCENDING)])
     ])
示例#6
0
 def build_index(self):
     indexes_model = [
         IndexModel([("date_report", ASCENDING)]),
         IndexModel([("media", ASCENDING)]),
         IndexModel([("success", ASCENDING)])
     ]
     self.collection.create_indexes(indexes_model)
def create_index_models(coll_name_ext, ind_field_names):
    '''
        Функция не индексирует, а лишь готовит руководящие индексацией
        IndexModel-объекты. В случае db-VCF и db-BED, независимо от того,
        указал исследователь имена индексируемых полей или нет, модели
        индексации формируются для полей с локациями и основополагающими
        идентификаторами. А то, что облигатные индексы полей с хромосами и
        позициями проектируются составными, позволит потом парсерам эффективно
        сортировать свои результаты стандартным для вычислительной генетики
        образом - по геномной локации. Для обозначенных исследователем
        полей планируется построение одиночных индексов. Если они
        придутся на хромосому или позицию db-VCF/db-BED, то будут
        сосуществовать с обязательным компаундным индексом. Обновить
        набор индексов впоследствии можно будет отдельной программой из
        состава проекта, а также с помощью MongoDB Shell или Compass.
        '''
    if coll_name_ext == 'vcf':
        index_models = [
            IndexModel([('#CHROM', ASCENDING), ('POS', ASCENDING)]),
            IndexModel([('ID', ASCENDING)])
        ]
    elif coll_name_ext == 'bed':
        index_models = [
            IndexModel([('chrom', ASCENDING), ('start', ASCENDING),
                        ('end', ASCENDING)]),
            IndexModel([('name', ASCENDING)])
        ]
    else:
        index_models = []
    if ind_field_names is not None:
        index_models += [
            IndexModel([(ind_field_name, ASCENDING)])
            for ind_field_name in ind_field_names
        ]
    return index_models
示例#8
0
class AuthorizationCode(BaseDocument, AuthorizationCodeMixin):
    __indexes__ = [
        IndexModel([('expiration_time', ASCENDING)], expireAfterSeconds=0),
        IndexModel([('user_id', ASCENDING)]),
    ]
    __collection_name__ = 'authorization_code'

    code: str = Field(..., alias='_id')

    user_id: str = ...
    client_id: str
    redirect_uri: str
    response_type: Optional[str]
    scope: Optional[str]
    nonce: Optional[str]
    auth_time: int = ...
    expiration_time: datetime = ...

    code_challenge: Optional[str]
    code_challenge_method: Optional[str]

    def is_expired(self):
        return self.expiration_time < datetime.utcnow()

    def get_redirect_uri(self):
        return self.redirect_uri

    def get_scope(self):
        return self.scope

    def get_auth_time(self):
        return self.auth_time

    def get_nonce(self):
        return self.nonce
def get_content_collection(chat_id):
    logger.info('Start copying data.')
    mongo_client = pymongo.MongoClient()
    db = mongo_client['tg_backup']
    # Move chat data to separate collection
    # First of all: create index!
    content_collection = db['content']
    chat_id_index_name = 'chat_id'
    chat_id_index = IndexModel([('chat_id', ASCENDING)], name=chat_id_index_name)
    if chat_id_index_name not in content_collection.index_information():
        content_collection.create_indexes([chat_id_index])
    # Second: create new collection
    tmp_collection = db['tmp_content']
    cursor_for_copy = content_collection.find({'chat_id': chat_id, **MONGO_HAS_SOME_TEXT})
    # Third: move all docs with any text if not already
    if tmp_collection.count() != cursor_for_copy.count():
        tmp_collection.remove()
        copy_bar = tqdm(cursor_for_copy,
            total=cursor_for_copy.count(),
            unit='msg',
            smoothing=0.01,
            desc='Moving data')
        for doc in copy_bar:
            tmp_collection.insert(doc)
        logger.info('Copied successfuly.')
        # Fourth: create new indexes
        msg_date_index_name = 'date_index'
        msg_date_index = IndexModel([('date', DESCENDING)], name=msg_date_index_name)
        if msg_date_index_name not in tmp_collection.index_information():
            tmp_collection.create_indexes([msg_date_index])
    else:
        logger.info('Data already copied.')
    return tmp_collection
示例#10
0
 def __init__(self, mongodb_url: str, db_name: str, collection_name: str):
     self.collection_name = collection_name
     self.db_name = db_name
     self._conn: MongoClient = MongoClient(mongodb_url)
     self._get_collection().create_indexes([
         IndexModel([("image_hash", HASHED)]),
         IndexModel([("fid", ASCENDING)], unique=True),
     ])
示例#11
0
    class Meta:
        indexes = [
            IndexModel([('userName', 1)], unique=True),
            IndexModel([('email', 1)], unique=True)
        ]  # unique field 지정은 이렇게

        collection_name = 'users'  # 지정 안해주면 "User" collection을 따로 만들어버림
        final = True  # _cls 필드 저장 안하도록 설정
示例#12
0
 def set_meal_indexes(self):
     self.db.meals.create_indexes([
         IndexModel([("product_ids", ASCENDING)],
                    name='product_ids_',
                    unique=True),
         IndexModel([("status", ASCENDING)], name='status_'),
         IndexModel([("amount", ASCENDING)], name='amount_')
     ])
示例#13
0
 def set_couriers_indexes(self):
     self.db.couriers.create_indexes([
         IndexModel([("position_id", ASCENDING)],
                    name='position_id_',
                    unique=True),
         IndexModel([("status", ASCENDING)], name='status_'),
         IndexModel([("age", DESCENDING)], name='age_')
     ])
示例#14
0
 def set_feedback_indexes(self):
     self.db.feedback.create_indexes([
         IndexModel([("customer_id", ASCENDING)], name='customer_id_'),
         IndexModel([("order_id", ASCENDING)], name='order_id_'),
         IndexModel([("restaurant_id", ASCENDING)], name='restaurant_id_'),
         IndexModel([("courier_id", ASCENDING)], name='courier_id_'),
         IndexModel([("rating", ASCENDING)], name='rating_')
     ])
示例#15
0
class Calls:
    calls: pymotyc.Collection[CallModel] = pymotyc.Collection(
        indexes=[
            IndexModel('call_id', unique=True),
            IndexModel('call_guid', unique=True),
            'phone', 'topic'
        ],
    )
示例#16
0
def create_indexes():
    conn = MongoClient(conf.MONGO_URL)
    conn.main.static.create_indexes([
        IndexModel([("id", ASCENDING)], unique=True),
        IndexModel([("expire", ASCENDING)], expireAfterSeconds=600)
    ])
    conn.main.teams.create_indexes(
        [IndexModel([("ip", ASCENDING)], unique=True)])
    conn.close()
示例#17
0
 def set_restaurants_indexes(self):
     self.db.restaurants.create_indexes([
         IndexModel([("position_id", ASCENDING)],
                    name='position_id_',
                    unique=True),
         IndexModel([("meal_ids", ASCENDING)], name='meal_ids_'),
         IndexModel([("order_ids", ASCENDING)], name='order_ids_'),
         IndexModel([("status", ASCENDING)], name='status_')
     ])
示例#18
0
 def __init__(self):
     index_keywords = IndexModel([('keywords', pymongo.TEXT)], name=index_name, default_language='english')
     index_sha1=IndexModel([('sha1', pymongo.ASCENDING)], name="index_sha1")
     client = pymongo.MongoClient(dbstr)
     self.collection = client['crawlerdb']['news']
     if not index_name in self.collection.index_information():
         self.collection.create_indexes([index_keywords])
     if not "index_sha1" in self.collection.index_information():
         self.collection.create_indexes([index_sha1])
示例#19
0
    def _init_collection(self):
        indexes = [
            IndexModel([('gold', ASCENDING)]),
            IndexModel([('subject', ASCENDING)])
        ]

        logger.debug('inserting %d indexes', len(indexes))
        self.collection.create_indexes(indexes)
        logger.debug('done')
示例#20
0
class Peer(BaseClass):
    """Holds information about the chains state."""
    COLLECTION = "peers"

    INDEXES = [
        IndexModel([("type", ASCENDING)]),
        IndexModel([("address", ASCENDING)]),
        IndexModel([("last_seen", DESCENDING)])
    ]
示例#21
0
 def __init__(self, function_result_status_persistance_conf, queue_name):
     self.function_result_status_persistance_conf = function_result_status_persistance_conf
     if self.function_result_status_persistance_conf.is_save_status:
         task_status_col = self.mongo_db_task_status.get_collection(queue_name)
         task_status_col.create_indexes([IndexModel([("insert_time_str", -1)]), IndexModel([("insert_time", -1)]),
                                         IndexModel([("params_str", -1)]), IndexModel([("success", 1)])
                                         ], )
         task_status_col.create_index([("utime", 1)],
                                      expireAfterSeconds=function_result_status_persistance_conf.expire_seconds)  # 只保留7天。
         self._mongo_bulk_write_helper = MongoBulkWriteHelper(task_status_col, 100, 2)
示例#22
0
        def _model(index):
            """Converts to IndexModel"""
            if isinstance(index, list):
                index = IndexModel([_tuple(x) for x in index])

            elif not isinstance(index, IndexModel):
                index = IndexModel([_tuple(index)])

            index.document['background'] = True
            return index
示例#23
0
    class Meta:
        """Post meta."""

        collection = 'posts'
        indexes = [
            IndexModel([('title', ASCENDING)], unique=True,
                       name='title_index'),
            IndexModel([('author', ASCENDING), ('created', DESCENDING)],
                       name='author_created_index')
        ]
示例#24
0
 def __init__(self, db_url: str):
     self._client = MongoClient(db_url)
     self._database = self._client[urlparse(db_url).path[1:]]
     self.data1 = self._init_collection(
         "data1",
         [IndexModel("created_at")],
     )
     self.data2 = self._init_collection(
         "data2",
         [IndexModel("created_at")],
     )
示例#25
0
async def setup_db(app):
    user_indexes = [
        IndexModel([("username", ASCENDING)],
                   name="username_index",
                   unique=True),
        IndexModel([("cards", ASCENDING)], name="cards_index", sparse=True),
        IndexModel([("tokens.token", ASCENDING)],
                   name="token_index",
                   sparse=True),
    ]
    await app["db"].users.create_indexes(user_indexes)
示例#26
0
 def _make_indexes(self, collection: str) -> None:
     """Makes indexes used by Corintick.
     Metadata is not used for querying and therefore not indexed.
     Making `ix1` unique is meant to avoid accidentally inserting duplicate documents.
     """
     ix1 = IndexModel([('uid', 1), ('start', -1), ('end', -1)],
                      unique=True,
                      name='default')
     ix2 = IndexModel([('uid', 1), ('end', -1), ('start', -1)],
                      name='reverse')
     self.db.get_collection(collection).create_indexes([ix1, ix2])
示例#27
0
    def _init_collection(self):
        indexes = [
            IndexModel([('subject_id', ASCENDING)]),
            IndexModel([('user_id', ASCENDING)]),
            IndexModel([('subject_id', ASCENDING), ('user_name', ASCENDING)]),
            IndexModel([('seen_before', ASCENDING),
                        ('classification_id', ASCENDING)])]

        logger.debug('inserting %d indexes', len(indexes))
        self.collection.create_indexes(indexes)
        logger.debug('done')
示例#28
0
class TournamentDAO(DataStore):
    __metaclass__ = Singleton

    index_1 = IndexModel([('tournament_id', pymongo.ASCENDING)],
                         unique=True,
                         sparse=True)
    index_2 = IndexModel([('name', pymongo.ASCENDING)], unique=True)

    def __init__(self):
        super().__init__('Tournament')
        self._create_indexes([self.index_1, self.index_2])
示例#29
0
def create_indexes(collection):
    while True:
        try:
            id = IndexModel([("id", ASCENDING)], unique=True)
            hashtags = IndexModel([("hashtags", ASCENDING)])
            created = IndexModel([("created", ASCENDING)])
            collection.create_indexes([id, hashtags, created])
        except NotMasterError:
            print("Waiting for mongodb-server to become primary...")
            time.sleep(1)
            continue
        break
示例#30
0
    def parse_indexes(cls, indexes=[]):
        """only used for create_indexes"""

        indexes_ = []
        for item in indexes or cls.__dict__.get("__indexes__", []):
            if isinstance(item, str):
                indexes_.append(IndexModel(get_sort(item, for_index=True)))
            else:
                indexes_.append(
                    IndexModel(get_sort(item[0], for_index=True), **item[1]))

        return indexes_