Exemplo n.º 1
0
    def get_period(self):
        date = datetime.now()
        try:
            date_initial = input(
                'Digite a data inicial no seguinte formato DIA-MES-ANO, exemplo {0}-{1}-{2}: '
                .format(date.day, date.month, date.year))
            gte = datetime.strptime("{0} 00:00:00.000000".format(date_initial),
                                    "%d-%m-%Y %H:%M:%S.%f").replace(
                                        tzinfo=FixedOffset(-180, "-0300"))
        except:
            print('Data inicial ou formato, incorreto, vamos tentar de novo')
            gte, lte = self.get_period()

        try:
            date_ended = input(
                'Digite a data final no seguinte formato DIA-MES-ANO, exemplo {0}-{1}-{2}: '
                .format(date.day, date.month, date.year))
            lte = datetime.strptime("{0} 23:59:59.000000".format(date_ended),
                                    "%d-%m-%Y %H:%M:%S.%f").replace(
                                        tzinfo=FixedOffset(-180, "-0300"))
        except:
            print(
                'Data final invalida ou formato, incorreto, vamos tentar de novo'
            )
            gte, lte = self.get_period()
        return gte, lte
Exemplo n.º 2
0
    def test_local_datetime(self):
        # Timezone -60 minutes of UTC, with DST between April and July.
        tz = DSTAwareTimezone(60, "sixty-minutes", 4, 7)

        # It's not DST.
        local = datetime.datetime(year=2025,
                                  month=12,
                                  hour=2,
                                  day=1,
                                  tzinfo=tz)
        options = CodecOptions(tz_aware=True, tzinfo=tz)
        # Encode with this timezone, then decode to UTC.
        encoded = BSON.encode({'date': local}, codec_options=options)
        self.assertEqual(local.replace(hour=1, tzinfo=None),
                         encoded.decode()['date'])

        # It's DST.
        local = datetime.datetime(year=2025, month=4, hour=1, day=1, tzinfo=tz)
        encoded = BSON.encode({'date': local}, codec_options=options)
        self.assertEqual(local.replace(month=3, day=31, hour=23, tzinfo=None),
                         encoded.decode()['date'])

        # Encode UTC, then decode in a different timezone.
        encoded = BSON.encode({'date': local.replace(tzinfo=utc)})
        decoded = encoded.decode(options)['date']
        self.assertEqual(local.replace(hour=3), decoded)
        self.assertEqual(tz, decoded.tzinfo)

        # Test round-tripping.
        self.assertEqual(local,
                         (BSON.encode({
                             'date': local
                         }, codec_options=options).decode(options)['date']))

        # Test around the Unix Epoch.
        epochs = (EPOCH_AWARE,
                  EPOCH_AWARE.astimezone(FixedOffset(120, 'one twenty')),
                  EPOCH_AWARE.astimezone(FixedOffset(-120,
                                                     'minus one twenty')))
        utc_co = CodecOptions(tz_aware=True)
        for epoch in epochs:
            doc = {'epoch': epoch}
            # We always retrieve datetimes in UTC unless told to do otherwise.
            self.assertEqual(
                EPOCH_AWARE,
                BSON.encode(doc).decode(codec_options=utc_co)['epoch'])
            # Round-trip the epoch.
            local_co = CodecOptions(tz_aware=True, tzinfo=epoch.tzinfo)
            self.assertEqual(
                epoch,
                BSON.encode(doc).decode(codec_options=local_co)['epoch'])
Exemplo n.º 3
0
def parse_datetime(string):
    """Try to parse a `datetime.datetime` out of a string.

    Return the parsed datetime, or ``None`` if unsuccessful.
    """
    match = re.match(DATETIME_PATTERN, string)
    if match:
        time_parts = match.groupdict()
        if time_parts['microsecond'] is not None:
            time_parts['microsecond'] = (
                time_parts['microsecond'].ljust(6, '0'))
        tzinfo = time_parts.pop('tzinfo')
        if 'Z' == tzinfo:
            tzinfo = utc
        elif tzinfo is not None:
            offset_hours = int(tzinfo[1:3])
            offset_minutes = int(tzinfo[4:]) if len(tzinfo) > 3 else 0
            offset_total = offset_hours * 60 + offset_minutes
            sign = '+'
            if '-' == tzinfo[0]:
                offset_total *= -1
                sign = '-'
            offset_name = '%s%02d:%02d' % (
                sign, offset_hours, offset_minutes)
            tzinfo = FixedOffset(offset_total, offset_name)
        time_parts = {k: int(time_parts[k]) for k in time_parts
                      if time_parts[k] is not None}
        time_parts['tzinfo'] = tzinfo
        return datetime.datetime(**time_parts)
Exemplo n.º 4
0
def thread_function(user):
    logger.info("Starting thread %s", user)

    query = {}
    query["$and"] = [{
        "event_timestamp": {
            "$lt":
            datetime.datetime.strptime("2019-11-29 14:53:00.000000",
                                       "%Y-%m-%d %H:%M:%S.%f").replace(
                                           tzinfo=FixedOffset(-300, "-0500"))
        }
    }, {
        "info.opted_into_marketing_communications": {
            "$in": [True, False]
        }
    }, {
        "event_type": "user_profile_set"
    }, {
        "entity_id": user
    }]
    # get the most recent opt-in flag set for the user
    sort = [("event_timestamp", 1)]
    cursor = collection.find(query, sort=sort, limit=1)

    for doc in cursor:
        opted_in = doc["info"]["opted_into_marketing_communications"]

        logger.info("User %s opt-in flag: %s", user, opted_in)

        # only include the users that opted in before nov 29
        if opted_in:
            logger.info("Adding user %s", user)
            usersQueue.put(user)

    logger.info("Thread %s finished", user)
Exemplo n.º 5
0
def get_set_user_profile_timestamp(user):
    logger.info("Getting set user profile timestamp for user %s", user)

    query = {}
    query["$and"] = [{
        "entity_id": user
    }, {
        "event_type": "user_profile_set"
    }, {
        "request_type": "set_user_profile"
    }, {
        "event_timestamp": {
            "$gte":
            datetime.datetime.strptime("2019-11-29 14:53:00.000000",
                                       "%Y-%m-%d %H:%M:%S.%f").replace(
                                           tzinfo=FixedOffset(-300, "-0500"))
        }
    }, {
        "info.opted_into_marketing_communications": False
    }]

    sort = [("event_timestamp", 1)]

    cursor = collection.find(query, sort=sort, limit=1)

    for doc in cursor:
        timestamp = doc["event_timestamp"]

        logger.info("User %s timestamp %s", user, timestamp)

        return timestamp

    logger.error("User %s had no timestamp", user)

    return None
Exemplo n.º 6
0
 def test_naive_decode(self):
     aware = datetime.datetime(1993, 4, 4, 2,
                               tzinfo=FixedOffset(555, "SomeZone"))
     naive_utc = (aware - aware.utcoffset()).replace(tzinfo=None)
     self.assertEqual(datetime.datetime(1993, 4, 3, 16, 45), naive_utc)
     after = BSON.encode({"date": aware}).decode()["date"]
     self.assertEqual(None, after.tzinfo)
     self.assertEqual(naive_utc, after)
Exemplo n.º 7
0
 def test_aware_datetime(self):
     aware = datetime.datetime(1993, 4, 4, 2,
                               tzinfo=FixedOffset(555, "SomeZone"))
     as_utc = (aware - aware.utcoffset()).replace(tzinfo=utc)
     self.assertEqual(datetime.datetime(1993, 4, 3, 16, 45, tzinfo=utc),
                      as_utc)
     after = BSON.encode({"date": aware}).decode(tz_aware=True)["date"]
     self.assertEqual(utc, after.tzinfo)
     self.assertEqual(as_utc, after)
def make_time_query(start, end):
    query = {}
    query["$and"] = [{
        u"tokens": {
            u"$exists": True
        }
    }, {
        u"created_at": {
            u"$gte":
            datetime.strptime(start, "%Y-%m-%d %H:%M:%S.%f").replace(
                tzinfo=FixedOffset(-240, "-0400"))
        }
    }, {
        u"created_at": {
            u"$lte":
            datetime.strptime(end, "%Y-%m-%d %H:%M:%S.%f").replace(
                tzinfo=FixedOffset(-240, "-0400"))
        }
    }]
    return query
Exemplo n.º 9
0
    def test_from_datetime(self):
        if 'PyPy 1.8.0' in sys.version:
            # See https://bugs.pypy.org/issue1092
            raise SkipTest("datetime.timedelta is broken in pypy 1.8.0")
        d = datetime.datetime.utcnow()
        d = d - datetime.timedelta(microseconds=d.microsecond)
        oid = ObjectId.from_datetime(d)
        self.assertEqual(d, oid.generation_time.replace(tzinfo=None))
        self.assertEqual("0" * 16, str(oid)[8:])

        aware = datetime.datetime(1993, 4, 4, 2,
                                  tzinfo=FixedOffset(555, "SomeZone"))
        as_utc = (aware - aware.utcoffset()).replace(tzinfo=utc)
        oid = ObjectId.from_datetime(aware)
        self.assertEqual(as_utc, oid.generation_time)
Exemplo n.º 10
0
    def test_from_datetime(self):
        d = datetime.datetime.utcnow()
        d = d - datetime.timedelta(microseconds=d.microsecond)
        oid = ObjectId.from_datetime(d)
        self.assertEqual(d, oid.generation_time.replace(tzinfo=None))
        self.assertEqual("0" * 16, str(oid)[8:])

        aware = datetime.datetime(1993,
                                  4,
                                  4,
                                  2,
                                  tzinfo=FixedOffset(555, "SomeZone"))
        as_utc = (aware - aware.utcoffset()).replace(tzinfo=utc)
        oid = ObjectId.from_datetime(aware)
        self.assertEqual(as_utc, oid.generation_time)
Exemplo n.º 11
0
 def rate_limit(self, user, neighborhood):
     '''Check the various config-defined project registration rate
     limits, and if any are exceeded, raise ProjectRatelimitError.
     '''
     if security.has_access(neighborhood, 'admin', user=user)():
         return
     # have to have the replace because, despite being UTC,
     # the result from utcnow() is still offset-naive  :-(
     # maybe look into making the mongo connection offset-naive?
     now = datetime.utcnow().replace(tzinfo=FixedOffset(0, 'UTC'))
     project_count = len(list(user.my_projects()))
     rate_limits = json.loads(config.get('project.rate_limits', '{}'))
     for rate, count in rate_limits.items():
         user_age = now - user._id.generation_time
         user_age = (user_age.microseconds + (user_age.seconds + user_age.days * 24 * 3600) * 10**6) / 10**6
         if user_age < int(rate) and project_count >= count:
             raise forge_exc.ProjectRatelimitError()
Exemplo n.º 12
0
    def test_datetime(self):
        # only millis, not micros
        self.round_trip({"date": datetime.datetime(2009, 12, 9, 15,
                                                   49, 45, 191000, utc)})

        jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000+0000"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000000+0000"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000+00:00"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000000+00:00"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000000+00"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000Z"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000000Z"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        # No explicit offset
        jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000000"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        # Localtime behind UTC
        jsn = '{"dt": { "$date" : "1969-12-31T16:00:00.000-0800"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        jsn = '{"dt": { "$date" : "1969-12-31T16:00:00.000000-0800"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        jsn = '{"dt": { "$date" : "1969-12-31T16:00:00.000-08:00"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        jsn = '{"dt": { "$date" : "1969-12-31T16:00:00.000000-08:00"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        jsn = '{"dt": { "$date" : "1969-12-31T16:00:00.000000-08"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        # Localtime ahead of UTC
        jsn = '{"dt": { "$date" : "1970-01-01T01:00:00.000+0100"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        jsn = '{"dt": { "$date" : "1970-01-01T01:00:00.000000+0100"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        jsn = '{"dt": { "$date" : "1970-01-01T01:00:00.000+01:00"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        jsn = '{"dt": { "$date" : "1970-01-01T01:00:00.000000+01:00"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])
        jsn = '{"dt": { "$date" : "1970-01-01T01:00:00.000000+01"}}'
        self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"])

        dtm = datetime.datetime(1, 1, 1, 1, 1, 1, 0, utc)
        jsn = '{"dt": {"$date": -62135593139000}}'
        self.assertEqual(dtm, json_util.loads(jsn)["dt"])
        jsn = '{"dt": {"$date": {"$numberLong": "-62135593139000"}}}'
        self.assertEqual(dtm, json_util.loads(jsn)["dt"])

        # Test dumps format
        pre_epoch = {"dt": datetime.datetime(1, 1, 1, 1, 1, 1, 10000, utc)}
        post_epoch = {"dt": datetime.datetime(1972, 1, 1, 1, 1, 1, 10000, utc)}
        self.assertEqual(
            '{"dt": {"$date": -62135593138990}}',
            json_util.dumps(pre_epoch))
        self.assertEqual(
            '{"dt": {"$date": 63075661010}}',
            json_util.dumps(post_epoch))
        self.assertEqual(
            '{"dt": {"$date": {"$numberLong": "-62135593138990"}}}',
            json_util.dumps(pre_epoch, json_options=STRICT_JSON_OPTIONS))
        self.assertEqual(
            '{"dt": {"$date": "1972-01-01T01:01:01.010Z"}}',
            json_util.dumps(post_epoch, json_options=STRICT_JSON_OPTIONS))

        number_long_options = json_util.JSONOptions(
            datetime_representation=DatetimeRepresentation.NUMBERLONG)
        self.assertEqual(
            '{"dt": {"$date": {"$numberLong": "63075661010"}}}',
            json_util.dumps(post_epoch, json_options=number_long_options))
        self.assertEqual(
            '{"dt": {"$date": {"$numberLong": "-62135593138990"}}}',
            json_util.dumps(pre_epoch, json_options=number_long_options))

        # ISO8601 mode assumes naive datetimes are UTC
        pre_epoch_naive = {"dt": datetime.datetime(1, 1, 1, 1, 1, 1, 10000)}
        post_epoch_naive = {
            "dt": datetime.datetime(1972, 1, 1, 1, 1, 1, 10000)}
        self.assertEqual(
            '{"dt": {"$date": {"$numberLong": "-62135593138990"}}}',
            json_util.dumps(pre_epoch_naive, json_options=STRICT_JSON_OPTIONS))
        self.assertEqual(
            '{"dt": {"$date": "1972-01-01T01:01:01.010Z"}}',
            json_util.dumps(post_epoch_naive,
                            json_options=STRICT_JSON_OPTIONS))

        # Test tz_aware and tzinfo options
        self.assertEqual(
            datetime.datetime(1972, 1, 1, 1, 1, 1, 10000, utc),
            json_util.loads(
                '{"dt": {"$date": "1972-01-01T01:01:01.010+0000"}}')["dt"])
        self.assertEqual(
            datetime.datetime(1972, 1, 1, 1, 1, 1, 10000, utc),
            json_util.loads(
                '{"dt": {"$date": "1972-01-01T01:01:01.010+0000"}}',
                json_options=json_util.JSONOptions(tz_aware=True,
                                                   tzinfo=utc))["dt"])
        self.assertEqual(
            datetime.datetime(1972, 1, 1, 1, 1, 1, 10000),
            json_util.loads(
                '{"dt": {"$date": "1972-01-01T01:01:01.010+0000"}}',
                json_options=json_util.JSONOptions(tz_aware=False))["dt"])
        self.round_trip(pre_epoch_naive, json_options=json_util.JSONOptions(
            tz_aware=False))

        # Test a non-utc timezone
        pacific = FixedOffset(-8 * 60, 'US/Pacific')
        aware_datetime = {"dt": datetime.datetime(2002, 10, 27, 6, 0, 0, 10000,
                                                  pacific)}
        self.assertEqual(
            '{"dt": {"$date": "2002-10-27T06:00:00.010-0800"}}',
            json_util.dumps(aware_datetime, json_options=STRICT_JSON_OPTIONS))
        self.round_trip(aware_datetime, json_options=json_util.JSONOptions(
            tz_aware=True, tzinfo=pacific))
        self.round_trip(aware_datetime, json_options=json_util.JSONOptions(
            datetime_representation=DatetimeRepresentation.ISO8601,
            tz_aware=True, tzinfo=pacific))
Exemplo n.º 13
0
 def test_tzinfo(self):
     self.assertRaises(TypeError, CodecOptions, tzinfo='pacific')
     tz = FixedOffset(42, 'forty-two')
     self.assertRaises(ValueError, CodecOptions, tzinfo=tz)
     self.assertEqual(tz, CodecOptions(tz_aware=True, tzinfo=tz).tzinfo)
Exemplo n.º 14
0
from test.field_types import FieldTestCase

# (expected value, value to be converted)
DATETIME_CASES = [
    (  # datetimes are given back as-is.
        datetime.datetime(2006, 7, 2, 1, 3, 4, 123456, utc),
        datetime.datetime(2006, 7, 2, 1, 3, 4, 123456, utc)),
    (  # parse str() of datetime.
        datetime.datetime(2006, 7, 2, 1, 3, 4, 123456,
                          utc), '2006-07-02 01:03:04.123456+00:00'),
    (  # alternative format
        datetime.datetime(2006, 7, 2, 1, 3, 4, 123456,
                          utc), '2006-7-2T01:03:04.123456Z'),
    (  # with fixed timezone offset
        datetime.datetime(2006, 7, 2, 1, 3, 4, 123456,
                          FixedOffset(270, '+04:30')),
        '2006-7-2T01:03:04.123456+04:30'),
    (  # missing microseconds
        datetime.datetime(2006, 7, 2, 1, 3, 4, 0,
                          FixedOffset(270,
                                      '+04:30')), '2006-7-2T01:03:04+04:30'),
    (  # missing seconds
        datetime.datetime(2006, 7, 2, 1, 3, 0, 0,
                          FixedOffset(270, '+04:30')), '2006-7-2T01:03+04:30'),
    (  # only hour and minute
        datetime.datetime(2006, 7, 2, 0, 0, 0, 0,
                          FixedOffset(270, '+04:30')), '2006-7-2+04:30'),
    (  # with negative timezone offset
        datetime.datetime(2006, 7, 2, 1, 3, 4, 123456,
                          FixedOffset(-180, '-03:00')),
        '2006-7-2T01:03:04.123456-0300'),
Exemplo n.º 15
0
DATETIME_CASES = [
    (   # datetimes are given back as-is.
        datetime.datetime(2006, 7, 2, 1, 3, 4, 123456, utc),
        datetime.datetime(2006, 7, 2, 1, 3, 4, 123456, utc)
    ),
    (   # parse str() of datetime.
        datetime.datetime(2006, 7, 2, 1, 3, 4, 123456, utc),
        '2006-07-02 01:03:04.123456+00:00'
    ),
    (   # alternative format
        datetime.datetime(2006, 7, 2, 1, 3, 4, 123456, utc),
        '2006-7-2T01:03:04.123456Z'
    ),
    (   # with fixed timezone offset
        datetime.datetime(
            2006, 7, 2, 1, 3, 4, 123456, FixedOffset(270, '+04:30')),
        '2006-7-2T01:03:04.123456+04:30'
    ),
    (   # missing microseconds
        datetime.datetime(2006, 7, 2, 1, 3, 4, 0, FixedOffset(270, '+04:30')),
        '2006-7-2T01:03:04+04:30'
    ),
    (   # missing seconds
        datetime.datetime(2006, 7, 2, 1, 3, 0, 0, FixedOffset(270, '+04:30')),
        '2006-7-2T01:03+04:30'
    ),
    (  # only hour and minute
        datetime.datetime(2006, 7, 2, 0, 0, 0, 0, FixedOffset(270, '+04:30')),
        '2006-7-2+04:30'
    ),
    (   # with negative timezone offset
Exemplo n.º 16
0
def get_total_conversation():
    # ======= Variables for query input =========
    start_date = datetime(year=2019, month=1,
                          day=29)  # "2019-01-29 00:00:00.000000"
    time_zone = "+0800"
    region = "Asia/Singapore"  # taken from tzlist wiki, either region or time "+08:00" will work
    interval_between_message_as_convo = minutes_to_milliseconds(3)  # minutes

    formatted_date = datetime.strptime(
        str(start_date), "%Y-%m-%d %H:%M:%S").replace(tzinfo=FixedOffset(
            480, time_zone))  # 2019-01-29 00:00:00+08:00

    #  ===== METHODOLOGY =====
    # 1. Filter by start date and bot receiver ID
    # 2. Group by sender_id and push them to an array { messages: [ msg1, msg2] }
    # 3. pack the array and add index into it { each_message_with_index: [ (msg1, index), (msg2, index)] }
    # 4. pack as array pair with the next entry { pairs: [ { current: msg1, prev: msg1 }, {current: msg2, prev: msg1} ] }
    # 5. unwind the list [ { current: msg1, prev: msg1 }, {current: msg2, prev: msg1} ] into elements
    # 6. add datefields(timezone already considered) { day: 1, month: 29, year:2019 { current: msg1, prev: msg1 } }
    # 7. get the difference between dates { day: 1, month: 29, year:2019 time_difference: 18460 }
    # 8. group by date then count the interval with more than time_difference specified
    # { day: 1, month: 29, year:2019, more_than_time_interval: 4 }
    # p/s note that the 4 here means there are GAPS between conversation more than the time_interval, we would need to offset this by 1
    # for each user, as when the user starts a conversation, there will be no GAPS between convo.
    # 9. group by date, month, year and sum the count
    # 10. move the field date, month, year to root level
    # 11. sort by year, month, date

    pipeline = [{
        "$match": {
            "created_at": {
                "$gte": formatted_date
            },
            "$or": [{
                "receiver_id": bot_list.pop()
            }, {
                "receiver_id": bot_list.pop()
            }]
        }
    }, {
        "$group": {
            "_id": "$sender_id",
            "messages": {
                "$push": "$created_at"
            }
        }
    }, {
        "$addFields": {
            "each_message_with_index": {
                "$zip": {
                    "inputs":
                    ["$messages", {
                        "$range": [0, {
                            "$size": "$messages"
                        }]
                    }]
                }
            }
        }
    }, {
        "$project": {
            "pairs": {
                "$map": {
                    "input": "$each_message_with_index",
                    "in": {
                        "current": {
                            "$arrayElemAt": ["$$this", 0]
                        },
                        "prev": {
                            "$arrayElemAt": [
                                "$messages", {
                                    "$max": [
                                        0, {
                                            "$subtract": [{
                                                "$arrayElemAt": ["$$this", 1]
                                            }, 1]
                                        }
                                    ]
                                }
                            ]
                        }
                    }
                }
            }
        }
    }, {
        "$unwind": {
            "path": "$pairs"
        }
    }, {
        "$addFields": {
            "day": {
                "$dayOfMonth": {
                    "date": "$pairs.current",
                    "timezone": region
                }
            },
            "month": {
                "$month": {
                    "date": "$pairs.current",
                    "timezone": region
                }
            },
            "year": {
                "$year": {
                    "date": "$pairs.current",
                    "timezone": region
                }
            }
        }
    }, {
        "$addFields": {
            "time_difference": {
                "$subtract": ["$pairs.current", "$pairs.prev"]
            }
        }
    }, {
        "$project": {
            "_id": 1,
            "day": 1,
            "month": 1,
            "year": 1,
            "more_than_time_interval": {
                "$cond": [{
                    "$or": [{
                        "$gt": [
                            "$time_difference",
                            interval_between_message_as_convo
                        ]
                    }, {
                        "$eq": ["$time_difference", 0]
                    }]
                }, 1, 0]
            }
        }
    }, {
        "$group": {
            "_id": {
                "day": "$day",
                "month": "$month",
                "year": "$year"
            },
            "convo_count": {
                "$sum": "$more_than_time_interval"
            }
        }
    }, {
        "$addFields": {
            "day": "$_id.day",
            "month": "$_id.month",
            "year": "$_id.year"
        }
    }, {
        "$sort": SON([("year", 1), ("month", 1), ("day", 1)])
    }]

    cursor = collection_message.aggregate(pipeline, allowDiskUse=False)

    today = date.today()
    offset = (today.weekday() - 5) % 7
    last_sunday = today - timedelta(days=offset)

    convo_results = defaultdict(nested_dict)
    total_convo = 0
    # {'month': 3, 'day': 29, 'convo_count': 281.0, 'year': 2019}
    for entries in cursor:
        year = int(entries['year'])
        month = int(entries['month'])
        day = int(entries['day'])
        entry_message_count = entries['convo_count']
        convo_results[year][month][day] = entry_message_count

        # other variables
        total_convo += entry_message_count

        # getting yearly message
        yearly_total = convo_results[year].get('yearly_total_message')
        yearly_total = 0 if yearly_total is None else yearly_total
        new_yearly_total = yearly_total + entry_message_count
        convo_results[year]['yearly_total_message'] = new_yearly_total

        # getting monthly
        monthly_total = convo_results[year][month].get('monthly_total_message')
        monthly_total = 0 if monthly_total is None else monthly_total
        new_monthly_total = monthly_total + entry_message_count
        convo_results[year][month]['monthly_total_message'] = new_monthly_total

        # getting weekly
        convo_date = date(year=year, month=month, day=day)

        if convo_date > last_sunday:
            weekly_total = convo_results.get('wtd')
            weekly_total = 0 if weekly_total is None else weekly_total
            new_weekly_total = weekly_total + entry_message_count
            convo_results['wtd'] = new_weekly_total

    # adding cumulative for each month, taking logic out for readability
    cumulative = 0
    for cumulative_month in range(1, month + 1):
        cumulative += convo_results[year][cumulative_month][
            'monthly_total_message']
        convo_results[year][cumulative_month][
            'cumulative_by_year'] = cumulative

    # compiling stats at root level, this is redundant and can be removed
    # TODO to check whethere we want to reset
    convo_results['mtd'] = convo_results[year][month]['monthly_total_message']
    convo_results['today'] = convo_results[year][month][day]
    convo_results['total'] = total_convo
    convo_results['ytd'] = convo_results[year]['yearly_total_message']

    return json.loads(json.dumps(convo_results))


# start_stopwatch = time.time()
# results = get_total_conversation()
# end_stopwatch = time.time()
# pp.pprint(results)

# print(end_stopwatch - start_stopwatch)  # 1.52 seconds on a slow slow computer :<