Beispiel #1
0
def get_orders_filters(
    barista: str,
    start_date: date = None,
    end_date: date = None,
) -> Cursor:
    """
    Get all order from a barista.

    :param str barista: barista
    :param date start_date: date start
    :param date end_date: date end
    :return Cursor: pymongo cursor
    """
    collection = _get_mongo_db_client()[os.getenv('MONGODB_COLLECTION')]

    filters = dict()
    if barista is not None:
        filters['barista'] = barista

    if start_date is not None:
        start_dt = datetime.strptime(f'{start_date} 00:00:00',
                                     "%Y-%m-%d %H:%M:%S")
        object_id_start = ObjectId.from_datetime(start_dt)
        filters['_id'] = {'$gte': object_id_start}

    if end_date is not None:
        end_dt = datetime.strptime(f'{end_date} 00:00:00', "%Y-%m-%d %H:%M:%S")
        object_id_end = ObjectId.from_datetime(end_dt)
        filters['_id'] = {'$lte': object_id_end}

    result = collection.find(filters)
    return result
Beispiel #2
0
def daterange(duration):
	now = datetime.utcnow()
	
	current = pk > oid.from_datetime(now - duration)
	previous = (pk > oid.from_datetime(now - duration * 2)) & (pk < oid.from_datetime(now - duration))
	
	return current, previous
    def list_orders(
        self,
        barista: str,
        start_date: date = None,
        end_date: date = None,
    ) -> List[Order]:
        filters = dict()
        if barista is not None:
            filters['barista'] = barista

        if start_date is not None:
            start_dt = datetime.strptime(f'{start_date} 00:00:00',
                                         "%Y-%m-%d %H:%M:%S")
            object_id_start = ObjectId.from_datetime(start_dt)
            filters['_id'] = {'$gte': object_id_start}

        if end_date is not None:
            end_dt = datetime.strptime(f'{end_date} 00:00:00',
                                       "%Y-%m-%d %H:%M:%S")
            object_id_end = ObjectId.from_datetime(end_dt)
            if filters.get('_id'):
                filters['_id']['$lte'] = object_id_end
            else:
                filters['_id'] = {'$lte': object_id_end}

        result = self._collection.find(filters)

        orders = list(result)

        for order in orders:
            order['is_ready'] = self.__order_is_ready(order=order)

        return [
            MongoDBOrderHydration().hydrate(order=order) for order in orders
        ]
Beispiel #4
0
 def delete_item(self, activity_id, **kwargs):
     require_access(c.project.neighborhood, 'admin')
     activity = Activity.query.get(_id=ObjectId(activity_id))
     if not activity:
         raise exc.HTTPGone
     # find other copies of this activity on other user/projects timelines
     # but only within a small time window, so we can do efficient searching
     activity_ts = activity._id.generation_time
     time_window = timedelta(hours=1)
     all_copies = Activity.query.find({
         '_id': {
             '$gt': ObjectId.from_datetime(activity_ts - time_window),
             '$lt': ObjectId.from_datetime(activity_ts + time_window),
         },
         'obj': activity.obj,
         'target': activity.target,
         'actor': activity.actor,
         'verb': activity.verb,
         'tags': activity.tags,
     }).all()
     log.info('Deleting %s copies of activity record: %s %s %s', len(all_copies),
              activity.actor.activity_url, activity.verb, activity.obj.activity_url)
     for activity in all_copies:
         activity.query.delete()
     return {'success': True}
Beispiel #5
0
    def test_no_transition(self):
        now = datetime.datetime.now()
        oid_a = ObjectId.from_datetime(now)
        oid_b = ObjectId.from_datetime(now - datetime.timedelta(seconds=1))
        oid_c = ObjectId.from_datetime(now - datetime.timedelta(seconds=2))
        self.content_store.append({"_id": oid_a}, "_id")
        self.content_store.append({"_id": oid_b}, "_id")
        self.content_store.append({"_id": oid_c}, "_id")

        def process(context, documents):
            return list(
                map(
                    lambda oid: StateUpdate({"_id": oid}, "state2"),
                    [oid_a, oid_b, oid_c],
                ))

        demo_worker, demo_work_context = self.create_demo_worker(
            process, "state1", {"state2"})
        demo_worker.work(demo_work_context)

        assert self.actual_documents_by_id_desc() == [
            {
                "_id": oid_a
            },
            {
                "_id": oid_b
            },
            {
                "_id": oid_c
            },
        ]
Beispiel #6
0
 def delete_item(self, activity_id, **kwargs):
     require_access(c.project.neighborhood, 'admin')
     activity = Activity.query.get(_id=ObjectId(activity_id))
     if not activity:
         raise exc.HTTPGone
     # find other copies of this activity on other user/projects timelines
     # but only within a small time window, so we can do efficient searching
     activity_ts = activity._id.generation_time
     time_window = timedelta(hours=1)
     all_copies = Activity.query.find({
         '_id': {
             '$gt': ObjectId.from_datetime(activity_ts - time_window),
             '$lt': ObjectId.from_datetime(activity_ts + time_window),
         },
         'obj': activity.obj,
         'target': activity.target,
         'actor': activity.actor,
         'verb': activity.verb,
         'tags': activity.tags,
     }).all()
     log.info('Deleting %s copies of activity record: %s %s %s',
              len(all_copies), activity.actor.activity_url, activity.verb,
              activity.obj.activity_url)
     for activity in all_copies:
         activity.query.delete()
     return {'success': True}
Beispiel #7
0
def hongbao():
    """
    定期统计用户发送口令, 获取红包的情况

    规则: 用户向派派发送口令, 获得红包
    :return:
    """
    from datetime import datetime
    from bson import ObjectId
    import re

    redis = _redis_client()

    # 获得已发红包的用户
    processed_users = set(json.loads(redis.get('viae/viae.provisional.hongbao/processed_users') or '[]'))

    # 获得红包处理进度的时间戳
    utc_tz = timezone('UTC')
    processed_since = redis.get('viae/viae.provisional.hongbao/processed_ts')
    logger.info('Processing from %s' % processed_since)
    processed_since = datetime.strptime(processed_since, '%Y-%m-%d %H:%M:%S').replace(tzinfo=utc_tz)

    dummy_id = ObjectId.from_datetime(processed_since)

    # 找到哪些用户发送过红包口令
    pattern = re.compile(u'(体验旅行派APP领现金红包|新用户口令|领新用户红包|从微信过来领红包|下单送北京大房免费住)', re.IGNORECASE)
    sender_list = mongo_hedy.Message.distinct('senderId',
                                              {'_id': {'$gt': dummy_id}, 'receiverId': 10000, 'contents': pattern})

    # 这些用户必须不在已发送红包的列表中, 并且为两天内注册的
    final_senders = {}
    user_dummy_id = ObjectId.from_datetime(processed_since - timedelta(days=7))
    for s in filter(lambda v: v not in processed_users, sender_list):
        u = mongo_yunkai.UserInfo.find_one({'userId': s, '_id': {'$gt': user_dummy_id}}, {'userId': 1, 'nickName': 1})
        if not u:
            continue
        final_senders[u['userId']] = u

    if final_senders:
        # 准备报表
        sections = []
        for uid, user in sorted(final_senders.items(), key=lambda v: v[0]):
            messages = mongo_hedy.Message.find({'senderId': uid, 'receiverId': 10000}, {'contents': 1})
            c = '\n'.join([tmp['contents'] for tmp in messages])
            sections.append(u'%d: %s\n%s\n\n' % (uid, user['nickName'], c))
            processed_users.add(uid)

        email_contents = ''.join(sections).strip()

        from viae.job import send_email_to_group, send_email

        logger.info('Sending hongbao stats')
        send_email_to_group(groups='MARKETPLACE', subject=u'红包申请统计', body=email_contents)

    # 默认7天过期
    expire = 7 * 24 * 3600
    redis.set('viae/viae.provisional.hongbao/processed_users', json.dumps(list(processed_users)), expire)
    redis.set('viae/viae.provisional.hongbao/processed_ts',
              (datetime.utcnow() - timedelta(minutes=20)).replace(tzinfo=utc_tz).strftime('%Y-%m-%d %H:%M:%S'), expire)
Beispiel #8
0
    def _check_value_valid_not_none_(self, value):
        if self.allow_none and value is None:
            return

        if isinstance(value, str) and not ObjectId.is_valid(value):
            raise FieldOidStringInvalid(self.key, value)
        if isinstance(value, datetime):
            try:
                ObjectId.from_datetime(value)
            except struct.error:
                raise FieldOidDatetimeOutOfRange(self.key, value)
Beispiel #9
0
	def to_foreign(self, obj, name, value):  # pylint:disable=unused-argument
		if isinstance(value, OID):
			return value
		
		if isinstance(value, datetime):
			return OID.from_datetime(value)
		
		if isinstance(value, timedelta):
			return OID.from_datetime(datetime.utcnow() + value)
		
		if isinstance(value, MutableMapping) and '_id' in value:
			return OID(value['_id'])
		
		return OID(unicode(value))
Beispiel #10
0
def tickets_stats_24hr():
    window = datetime.utcnow() - timedelta(hours=24)
    return TM.Ticket.query.find({
        '_id': {
            '$gte': ObjectId.from_datetime(window)
        }
    }).count()
Beispiel #11
0
 def __init__(self, month, num, link, year,
                       news, safe_title, transcript, alt, 
                       img, title, day, **kwargs):
     dummy_id = str(ObjectId.from_datetime(datetime.datetime.now()))
     db.Model.__init__(self, id=dummy_id, month=month, num=num, link=link, year=year,
                       news=news, safe_title=safe_title, transcript=transcript, alt=alt, 
                       img=img, title=title, day=day, **kwargs)
Beispiel #12
0
def time_to_object_id(timestamps):
    """
    时间戳转化为ObjectId
    :param timestamps:
    :return:
    """
    return ObjectId.from_datetime(int(timestamps))
 def purge_old_entries(self):
     """
     Purge entries older than the expiry
     :return:
     """
     service = superdesk.get_resource_service("audit")
     logger.info("Starting to purge audit logs at {}".format(utcnow()))
     for _ in range(100):  # make sure we don't get stuck
         lookup = {
             "$and": [{
                 "_id": {
                     "$lt": ObjectId.from_datetime(self.expiry)
                 }
             }]
         }
         req = ParsedRequest()
         req.sort = '[("_id", 1)]'
         req.projection = '{"_id": 1}'
         req.max_results = 1000
         audits = service.get_from_mongo(req=req, lookup=lookup)
         items = list(item.get("_id") for item in audits)
         if len(items) == 0:
             logger.info("Finished purging audit logs at {}".format(
                 utcnow()))
             return
         logger.info("Found {} audit items at {}".format(
             len(items), utcnow()))
         service.delete_ids_from_mongo(items)
     logger.warning("Audit purge didn't finish in 100 iterations.")
Beispiel #14
0
    def load(self):
        self.log.debug("[LOAD] legacy_processor is preparing ...")

        # extract sensors id list from mongoDB
        sensorID_list = dict()
        try:
            _iter = self.MongoClient.typecapteur.find()
            # parse 'typecapteur' collection (e.g temperature, co2, humidity etc etc)
            for document in _iter:
                # parse sensors: each sensor has an ID
                # key nomCapteur: <topic/unitID/subID> e.g u4/campusfab/temperature/auto_92F8/79
                #   value = list( id associated with <topic/unitID/subID>, id piece )
                for capteur_doc in document["Capteurs"]:
                    sensorID_list[
                        capteur_doc["idCapteur"]] = capteur_doc["nomCapteur"]
        except Exception as e:
            self.log.warning("while getting list of capteur " + str(e))

        print("sensorID: " + str(sensorID_list))

        # mongodb filter
        _mongo_filter = dict()
        # | start_id
        if (self._start_id is not None):
            try:
                _mongo_filter['_id'] = {"$gte": ObjectId(self._start_id)}
            except Exception as ex:
                self.log.warning("while creating mongoDB filter from _id: " +
                                 str(ex))

                sys.exit(1)
        # | start_date
        elif (self._start_date is not None):
            try:
                # generate an id from data
                # [apr.20] there exist a two hours shift between ObjectId time and real utc measuretime ?!?!
                _mongo_filter['_id'] = {
                    "$gte": ObjectId.from_datetime(self._start_date)
                }
            except Exception as ex:
                self.log.warning("while creating mongoDB filter from _id: " +
                                 str(ex))
                # self._shutdownEvent.set()
                sys.exit(1)

        # generate mongoDB iterator

        _src_iterator = self.MongoClient[self.coll_name].find(_mongo_filter)
        # _src_iterator = self._src.find( self._collection_name, query=_mongo_filter, skip=1000000 )

        # generate importer iterator
        cur_iter = dict()
        cur_iter['sensorID'] = sensorID_list
        cur_iter['iterators'] = [_src_iterator]

        self.overall_processed = 0
        print(_src_iterator[0])
        # log.info("MongoDB connection is UP featuring:\n\t{0:,d} measures :)\n\t{1:,d} unmanaged measures :(".format(mydb.measure.count(),mydb.failedData.count()) )

        return cur_iter
Beispiel #15
0
    def test_all_valid_multiple_transitions(self):
        now = datetime.datetime.now()
        oid_a = ObjectId.from_datetime(now)
        oid_b = ObjectId.from_datetime(now - datetime.timedelta(seconds=1))
        oid_c = ObjectId.from_datetime(now - datetime.timedelta(seconds=2))
        self.content_store.append({"_id": oid_a, "_state": "state1"}, "_id")
        self.content_store.append({"_id": oid_b, "_state": "state1"}, "_id")
        self.content_store.append({"_id": oid_c, "_state": "state1"}, "_id")

        def process(context, documents):
            return list(
                map(
                    lambda oid: StateUpdate({"_id": oid}, "state2",
                                            {"$set": {
                                                "attr1": "a"
                                            }}),
                    [oid_a, oid_b],
                )) + list(
                    map(
                        lambda oid: StateUpdate({"_id": oid}, "state3",
                                                {"$set": {
                                                    "attr2": "b"
                                                }}),
                        [oid_c],
                    ))

        demo_worker, demo_work_context = self.create_demo_worker(
            process, "state1", {"state2", "state3"})
        demo_worker.work(demo_work_context)

        assert self.actual_documents_by_id_desc() == [
            {
                "_id": oid_a,
                "_state": "state2",
                "attr1": "a"
            },
            {
                "_id": oid_b,
                "_state": "state2",
                "attr1": "a"
            },
            {
                "_id": oid_c,
                "_state": "state3",
                "attr2": "b"
            },
        ]
Beispiel #16
0
def referrer_stat():
    """
    统计用户注册时的邀请信息
    :return:
    """
    from viae.database import redis_client, mongo_yunkai
    from pytz import timezone
    from bson import ObjectId

    # 上一次处理邀请是什么时候
    ts = redis_client.get('viae/viae.job.referrer_stat/ts')
    if not ts:
        ts = datetime.utcnow().replace(year=2012, tzinfo=timezone('UTC'))
    else:
        ts = datetime.strptime(ts, '%Y-%m-%d %H:%M:%S').replace(tzinfo=timezone('UTC'))

    dummy_id = ObjectId.from_datetime(ts)

    # 这一时间段中, 哪些注册用户是有referrer的?
    referrers = {}  # {100056: {nickname: 'Zephyre', inviting: [100256, 100884]}}
    user_cache = {}  # {100056: userInfo}
    for new_user in mongo_yunkai.UserInfo.find({'_id': {'$gt': dummy_id}, 'referrer': {'$ne': None}},
                                               {'userId': 1, 'nickName': 1, 'referrer': 1}):
        # 缓存用户
        new_user_id = long(new_user['userId'])
        user_cache[new_user_id] = new_user

        # 查找推荐人
        referrer_id = long(new_user['referrer'])
        if referrer_id not in referrers:
            r = mongo_yunkai.UserInfo.find_one({'userId': referrer_id}, {'userId': 1, 'nickName': 1})
            referrers[referrer_id] = {'nickname': r['nickName'], 'inviting': set([])}

            user_cache[r['userId']] = r

        # referrer就是邀请人
        referrer = referrers[referrer_id]
        referrer['inviting'].add(new_user_id)

    # 构造统计报表
    time_range = ' ~ '.join(map(lambda v: str(v), [ts, datetime.now().replace(tzinfo=timezone('UTC'))]))
    sections = []
    # 按照邀请人数的数量, 从多到少排序
    for uid, info in sorted(referrers.items(), key=lambda v: len(v[1]['inviting']), reverse=True):
        head = u'%s(%d)邀请了%d位用户:' % (user_cache[uid]['nickName'], uid, len(info['inviting']))

        invited_desc_list = []
        for invited_uid in info['inviting']:
            invited_desc_list.append(u'%s(%d)' % (user_cache[invited_uid]['nickName'], invited_uid))

        invited_desc = '\n'.join(invited_desc_list)

        sections.append('%s\n%s' % (head, invited_desc))

    if sections:
        send_email_to_group('MARKETPLACE', u'新用户邀请状况统计', time_range + '\n\n' + '\n\n'.join(sections))

    now_ts = (datetime.utcnow().replace(tzinfo=timezone('UTC')) - timedelta(hours=1)).strftime('%Y-%m-%d %H:%M:%S')
    redis_client.set('viae/viae.job.referrer_stat/ts', now_ts, 7 * 24 * 3600)
Beispiel #17
0
def _get_records_by_date(date):
    """ Get learning records from LRS (Querying by specific date) """
    result = lrs.find(
        {
            "actor.name": session['username'],
            "_id": {
                "$gt": ObjectId.from_datetime(date),
                "$lt": ObjectId.from_datetime(date + timedelta(days=1))
            }
        }, {
            "actor.name": 1,
            "action": 1,
            "eventTime": 1,
            "object.name": 1
        })

    return result
def test_cleanup_orphaned_versions_integration(library):
    _id = ObjectId.from_datetime(dt(2013, 1, 1))
    with patch('bson.ObjectId', return_value=_id):
        with ArcticTransaction(library, symbol, 'u1', 'l1') as mt:
            mt.write(symbol, ts1)
    assert library._versions.find({'parent': {'$size': 1}}).count() == 1
    library._cleanup_orphaned_versions(False)
    assert library._versions.find({'parent': {'$size': 1}}).count() == 1
def test_cleanup_orphaned_versions_integration(library):
    _id = ObjectId.from_datetime(dt(2013, 1, 1))
    with patch('bson.ObjectId', return_value=_id):
        with ArcticTransaction(library, symbol, 'u1', 'l1') as mt:
            mt.write(symbol, ts1)
    assert mongo_count(library._versions, filter={'parent': {'$size': 1}}) == 1
    library._cleanup_orphaned_versions(False)
    assert mongo_count(library._versions, filter={'parent': {'$size': 1}}) == 1
Beispiel #20
0
    def find(self, start, end, limit=50, *args, **kwargs):
        """
        find by creation date, using start and end dates as range
        """
        # check if spec has been specified, build on top of it
        fc = kwargs.get('spec', dict())

        # filter _id on start and end dates
        fc['_id'] = {'$gte': ObjectId.from_datetime(start),
                     '$lte': ObjectId.from_datetime(end)}

        if not self.collection:
            collection_name = kwargs.get('collection_name',
                             MONGODB_DEFAULT_COLLECTION)
            self.set_collection(collection_name)

        return self.collection.find(fc, limit=limit)
Beispiel #21
0
def general_obj_from_time(from_datetime=None, time_delta=None):
    if from_datetime is None or not isinstance(from_datetime,
                                               datetime.datetime):
        from_datetime = datetime.datetime.now()  # 时间元组
    if time_delta:  # time_delta 是datetime.timedelta 类型,可以进行时间的加减运算
        from_datetime = from_datetime + time_delta

    return ObjectId.from_datetime(from_datetime)
    def delete_older_than(cls, datetime_: datetime) -> int:
        """
        Delete all batches older than the given datetime.

        :param datetime_: the datetime before which the batches are to be deleted.
        :return: the number of batches that were deleted.
        """
        return cls.objects.filter(
            id__lte=ObjectId.from_datetime(datetime_)).delete()
Beispiel #23
0
 def _cast_to_desired_type_(self, value) -> Any:
     if isinstance(value, datetime):
         return ObjectId.from_datetime(value)
     elif isinstance(value, str):
         return ObjectId(value)
     elif isinstance(value, ObjectId):
         return value
     else:
         raise FieldValueInvalid(self.key, value)
Beispiel #24
0
    def _delete_recent_module_(self, keyword):
        """Delete modules which is created and marked inactive within `Bot.AutoReply.DeleteDataMins` minutes."""
        now = now_utc_aware()

        self.delete_many(
            {
                OID_KEY: {
                    "$lt":
                    ObjectId.from_datetime(now),
                    "$gt":
                    ObjectId.from_datetime(now - timedelta(
                        minutes=Bot.AutoReply.DeleteDataMins))
                },
                AutoReplyModuleModel.KEY_KW_CONTENT: keyword,
                AutoReplyModuleModel.Active.key: False
            },
            collation=case_insensitive_collation
            if AutoReply.CaseInsensitive else None)
Beispiel #25
0
def get_object_id(from_datetime=None, span_days=0, span_hours=0, span_minutes=0, span_weeks=0):
    '''根据时间手动生成一个objectid,此id不作为存储使用'''
    if not from_datetime:
        from_datetime = datetime.datetime.now()
    from_datetime = from_datetime + datetime.timedelta(days=span_days,
                                                       hours=span_hours,
                                                       minutes=span_minutes,
                                                       weeks=span_weeks)
    return ObjectId.from_datetime(generation_time=from_datetime)
Beispiel #26
0
    def unprocessed_before(cls, datetime_: datetime) -> bool:
        """
        Assess whether there are unprocessed Uploads older than the specified datetime.

        :param datetime_: the datetime to check against.
        :return: True if there are unprocessed Uploads older than the specified datetime, False
          otherwise.
        """
        return (cls.objects.filter(id__lte=ObjectId.from_datetime(datetime_),
                                   to_publish=True).count() > 0)
Beispiel #27
0
    def find(self, start, end, limit=50, *args, **kwargs):
        """
        find by creation date, using start and end dates as range
        """
        # check if spec has been specified, build on top of it
        fc = kwargs.get('spec', dict())

        # filter _id on start and end dates
        fc['_id'] = {
            '$gte': ObjectId.from_datetime(start),
            '$lte': ObjectId.from_datetime(end)
        }

        if not self.collection:
            collection_name = kwargs.get('collection_name',
                                         MONGODB_DEFAULT_COLLECTION)
            self.set_collection(collection_name)

        return self.collection.find(fc, limit=limit)
Beispiel #28
0
def comment_cud(post_id, comment_id):
    if request.method == 'POST':
        comment = request.get_json()
        comment['timestamp'] = str(datetime.datetime.now())
        comment['id'] = ObjectId.from_datetime(datetime.datetime.now())
        database['posts'].update({'_id': ObjectId(post_id)},
                                 {'$push': {
                                     'comments': comment
                                 }})
        comment['id'] = str(comment['id'])
        comm_user = ugd({'_id': ObjectId(comment['commenting_user'])})
        comment['commenting_user'] = comm_user
        return comment
    elif request.method == 'PUT':
        comment = request.get_json()
        comment['timestamp'] = str(datetime.datetime.now())

        new_comment = comment
        new_comment['commenting_user'] = comment['commenting_user']['id']

        #Remove old comment
        database['posts'].update(
            {'_id': ObjectId(post_id)},
            {'$pull': {
                'comments': {
                    'id': ObjectId(comment_id)
                }
            }})

        # Insert new comment
        database['posts'].update({'_id': ObjectId(post_id)},
                                 {'$push': {
                                     'comments': new_comment
                                 }})
        return comment

    elif request.method == 'DELETE':
        comment = database['posts'].find({"_id": ObjectId(post_id)},
                                         {'comments': 1})
        comment = [x['comments'] for x in comment][0]
        comment = [x for x in comment if x['id'] == ObjectId(comment_id)][0]

        comment['id'] = str(comment['id'])

        database['posts'].update(
            {'_id': ObjectId(post_id)},
            {'$pull': {
                'comments': {
                    'id': ObjectId(comment_id)
                }
            }})

        return comment

    return {}
Beispiel #29
0
def hot(top, days=1):
    """
        dtype : day, week, month 
        get top hot news by dtype
    """
    now = datetime.datetime.now()
    start = now + datetime.timedelta(-days)

    cond = {'_id':{'$gte':ObjectId.from_datetime(start)}}
    r,v = m_page(TName,size=top,sort=[('hot',-1),('_id',-1)],**cond)
    return v
Beispiel #30
0
 def __init__(self, first_name, last_name, email, password, **kwargs):
     """Create instance."""
     dummy_id = str(ObjectId.from_datetime(datetime.datetime.now()))
     db.Model.__init__(self,
                       id=dummy_id,
                       first_name=first_name,
                       last_name=last_name,
                       email=email,
                       password=password,
                       **kwargs)
     self.set_password(password)
Beispiel #31
0
def object_id_from_datetime(from_datetime,
                            span_days=None,
                            span_hours=None,
                            span_minutes=None,
                            span_weeks=None):
    '''根据时间手动生成一个objectid,此id不作为存储使用'''

    print from_datetime

    dateArray = datetime.datetime.utcfromtimestamp(from_datetime)
    vtime = dateArray - datetime.timedelta(days=span_days)
    return ObjectId.from_datetime(generation_time=vtime)
Beispiel #32
0
def remove_old_stories():
    client = get_mongo_client()
    db = client.get_default_database()
    article_collection = db['articles']

    two_days_ago = datetime.utcnow() - timedelta(days=2)
    two_days_ago = ObjectId.from_datetime(two_days_ago)

    query = {'_id': {'$lt': two_days_ago}}

    article_collection.remove(query)
    close_mongo_client(client)
Beispiel #33
0
    def delete_older_than(cls, datetime_: datetime) -> None:
        """
        Delete all Uploads older than the given datetime.

        :param datetime_: the datetime to check against.
        """
        objects = cls.objects.filter(id__lte=ObjectId.from_datetime(datetime_))
        n_deleted = objects.delete()
        _LOGGER.info(
            "Upload documents deletion completed.",
            extra=dict(n_deleted=n_deleted,
                       created_before=datetime_.isoformat()),
        )
    def test_id_based_key(self):
        """Test ID-based key generation."""

        # From record
        rec = IdBasedKeySample()
        rec.id_ = ObjectId.from_datetime(dt.datetime.fromtimestamp(123456789))
        key1 = rec.to_key()
        self.assertEqual(key1, 'IdBasedKeySample=' + str(rec.id_))

        # Using static method
        key1 = rec.to_key()
        key2 = IdBasedKeySample.create_key(id_=rec.id_)
        self.assertEqual(key2, 'IdBasedKeySample=' + str(rec.id_))
Beispiel #35
0
def add_order(order: Order, ) -> Order:
    """
    Add a new document at mongoDB.

    :param Order order: order
    :return Order: order inserted
    """
    collection = _get_mongo_db_client()[os.getenv('MONGODB_COLLECTION')]
    now = datetime.now()
    order.dt_created = now.strftime("%Y-%m-%d %H:%M:%S")
    collection.insert_one(
        document=dict(_id=ObjectId.from_datetime(now), **order.dict()))
    return order
Beispiel #36
0
    def purge_old_entries(self):
        """Purge entries older than the expiry"""

        logger.info("Starting to purge audit logs at {}".format(utcnow()))
        service = superdesk.get_resource_service("audit")
        time_start = time()
        service.delete_from_mongo(
            {"_id": {
                "$lt": ObjectId.from_datetime(self.expiry)
            }})
        time_diff = time() - time_start
        logger.info(
            f"Finished purging audit logs. Took {time_diff:.4f} seconds")
Beispiel #37
0
    def delete_older_than(cls, datetime_: datetime) -> None:
        """
        Delete all batches older than the given datetime.

        :param datetime_: the datetime before which the batches are to be deleted.
        """
        objects = cls.objects.filter(id__lte=ObjectId.from_datetime(datetime_))
        count = objects.count()
        objects.delete()
        _LOGGER.info(
            "BatchFile documents deletion completed.",
            extra=dict(n_deleted=count, created_before=datetime_.isoformat()),
        )
Beispiel #38
0
    def setup_class(cls):
        setup_connection('mongodb://localhost',
                         'YUCCASTATS_TESTS_DB_STATS',
                         'test_statistics',
                         'YUCCASTATS_TESTS_DB_SUPPORT')

        tenants = [
            {"idTenant": -4, "tenantName": "smartlab",
             "tenantDescription": "Smartlab and reference environment",
             "tenantCode": "test-smartlab",
             "dataCollectionName": "data",
             "dataCollectionDb": "YUCCASTATS_TESTS_DB_smartlab",
             "measuresCollectionName": "measures",
             "measuresCollectionDb": "YUCCASTATS_TESTS_DB_smartlab",
             "socialCollectionName": "social",
             "socialCollectionDb": "YUCCASTATS_TESTS_DB_smartlab",
             "mediaCollectionName": "media",
             "mediaCollectionDb": "YUCCASTATS_TESTS_DB_smartlab",
             "archiveDataCollectionName": "archivedata",
             "archiveDataCollectionDb": "YUCCASTATS_TESTS_DB_smartlab",
             "archiveMeasuresCollectionName": "archivemeasures",
             "archiveMeasuresCollectionDb": "YUCCASTATS_TESTS_DB_smartlab"},
            {"idTenant": -1, "tenantName": "csp",
             "tenantDescription": "CSP - Innovazione nelle ICT",
             "tenantCode": "test-csp",
             "dataCollectionName": "data",
             "dataCollectionDb": "YUCCASTATS_TESTS_DB_csp",
             "measuresCollectionName": "Measures",
             "measuresCollectionDb": "YUCCASTATS_TESTS_DB_csp",
             "socialCollectionName": "social",
             "socialCollectionDb": "YUCCASTATS_TESTS_DB_csp",
             "mediaCollectionName": "media",
             "mediaCollectionDb": "YUCCASTATS_TESTS_DB_csp",
             "archiveDataCollectionName": "archivedata",
             "archiveDataCollectionDb": "YUCCASTATS_TESTS_DB_csp",
             "archiveMeasuresCollectionName": "archivemeasures",
             "archiveMeasuresCollectionDb": "YUCCASTATS_TESTS_DB_csp"},
        ]
        get_support_db().tenant.insert(tenants)
        cls.tenants = [t['tenantCode'] for t in tenants]

        cls.counts = {}
        for tenant_info in tenants:
            cols = get_data_collections(tenant_info)
            for idx, col in enumerate(cols):
                items_to_insert = (idx+1)*10
                cls.counts[col.name] = items_to_insert

                ids = [ObjectId.from_datetime(datetime(2001, 1, 1, 1, x) + timedelta(days=x))
                       for x in range(items_to_insert)]
                col.insert((dict(count=i, _id=ids[i]) for i in range(items_to_insert)))
Beispiel #39
0
    def find_tokens(cls, kw, from_id=None):
        first_request = False;
        if not from_id:
            d = datetime.datetime.utcnow() - datetime.timedelta(seconds=cls.TTL)
            from_id = ObjectId.from_datetime(d)
            first_request = True

        q = mongoengine.Q(tokens__all=kw) & mongoengine.Q(id__gt=from_id)
        obj = cls.objects(q)

        if not len(obj) and first_request:
            obj = cls.objects(mongoengine.Q(tokens__all=kw))

        return obj[:25]
Beispiel #40
0
 def put(self,message_id):
     '''发布评论'''
     args = add_comment_parser.parse_args()
     message = Message.objects(id=message_id).first()
     user = User.objects(id=args['user_id']).first()
     comment = Comment()
     comment.comment_id = str(ObjectId.from_datetime(generation_time=datetime.datetime.now()))
     comment.content = args['content']
     comment.publisher = user.name
     comment.publisher_id = str(user.id)
     comment.publish_time = time.time() * 1000
     message.comments.append(comment)
     message.update(comments = message.comments)
     return {'code':200,'msg':'发布评论成功'}
Beispiel #41
0
    def get_by_table_id(self, table_id, since_date):
        table_id = utils.as_object_id(table_id)

        if since_date:
            # Create a dummy chat id which is then used to ensure that only
            # chats which were created after the since date are returned
            query_chat_id = ObjectId.from_datetime(since_date)
            query = self.db.chat.find({"table_id": table_id, "_id": {"$gt": query_chat_id}})
        else:
            query = self.db.chat.find({"table_id": table_id})

        results = list(query.sort("_id", 1).limit(100))

        chats = []
        for document in results:
            chats.append(ChatMessage().from_db(document))
        return chats
Beispiel #42
0
    def _monitor(self, collection, query_key, query):
        # TODO: Handle doc removal
        # TODO: Batch requests
        try:
            query = {'doc.{0}'.format(k): v for k, v in query.items()}
            query['_id'] = {'$gt': ObjectId.from_datetime(datetime.utcnow())}
            opslog = self.opslog(collection)
            cursor = opslog.find(query, tailable=True, await_data=True)
            item = tail(cursor.tail)
            while True:
                ops, err = next(item)
                if err:
                    raise err

                print(ops)
                if not ops['doc'].get('_id'):
                    _log.warn('Opslog for collection "{0}" contains a '
                              'document with no _id'.format(collection))
                    continue

                if ops['op'] == 'insert':
                    doc = ops['doc']
                elif ops['op'] == 'update':
                    doc = ops['updated']

                response = json.dumps({
                    'response': 'subscribe',
                    'query': query_key,
                    'collection': collection,
                    'result': [doc],
                })

                for request in list(self.subscriptions[query_key]):
                    if request.is_closed:
                        self.subscriptions[query_key].remove(request)
                        continue
                    request.send(response)

                if not self.subscriptions[query_key]:
                    break
        except Exception as e:
            _log.exception(e)
        finally:
            if query_key in self.subscriptions:
                del self.subscriptions[query_key]
Beispiel #43
0
 def _get_or_generate_id(self, doc):
     return doc[self.doc_id] if self.doc_id in doc else ObjectId.from_datetime(datetime.now())
Beispiel #44
0
def id_between(after, before, query={}):
  query.update({
          '$and': [{'_id': {'$gte': ObjectId.from_datetime(after),
                            '$lte': ObjectId.from_datetime(before)}}]
  })
  return query
def tickets_stats_24hr():
    window = datetime.utcnow() - timedelta(hours=24)
    return TM.Ticket.query.find({'_id': {'$gte': ObjectId.from_datetime(window)}}).count()
Beispiel #46
0
	def test_document_assignment(self, Sample):
		now = _dt.utcnow().replace(microsecond=0)
		instance = Sample({'_id': ObjectId.from_datetime(now)})
		assert instance.field.replace(tzinfo=None) == now
Beispiel #47
0
def oid_con(seed):
    value = int(seed)
    dt = datetime.datetime.fromtimestamp(value)
    return ObjectId.from_datetime(dt)