Пример #1
0
    def granularity(period_name, timezone=None, origin=None):
        if not period_name or period_name == 'all':
            return 'all'
        iso_8601_dict = {
            '5 seconds': 'PT5S',
            '30 seconds': 'PT30S',
            '1 minute': 'PT1M',
            '5 minutes': 'PT5M',
            '1 hour': 'PT1H',
            '6 hour': 'PT6H',
            'one day': 'P1D',
            '1 day': 'P1D',
            '7 days': 'P7D',
            'week': 'P1W',
            'week_starting_sunday': 'P1W',
            'week_ending_saturday': 'P1W',
            'month': 'P1M',
        }

        granularity = {'type': 'period'}
        # if timezone:
        #     granularity['timeZone'] = timezone
        granularity['timeZone'] = "Asia/Shanghai"

        if origin:
            dttm = utils.parse_human_datetime(origin)
            granularity['origin'] = dttm.isoformat()

        if period_name in iso_8601_dict:
            granularity['period'] = iso_8601_dict[period_name]
            if period_name in ('week_ending_saturday', 'week_starting_sunday'):
                # use Sunday as start of the week
                granularity['origin'] = '2016-01-03T00:00:00'
        elif not isinstance(period_name, string_types):
            granularity['type'] = 'duration'
            granularity['duration'] = period_name
        elif period_name.startswith('P'):
            # identify if the string is the iso_8601 period
            granularity['period'] = period_name
        else:
            granularity['type'] = 'duration'
            granularity['duration'] = utils.parse_human_timedelta(
                period_name).total_seconds() * 1000
        return granularity
Пример #2
0
    def granularity(period_name, timezone=None, origin=None):
        if not period_name or period_name == 'all':
            return 'all'
        iso_8601_dict = {
            '5 seconds': 'PT5S',
            '30 seconds': 'PT30S',
            '1 minute': 'PT1M',
            '5 minutes': 'PT5M',
            '1 hour': 'PT1H',
            '6 hour': 'PT6H',
            'one day': 'P1D',
            '1 day': 'P1D',
            '7 days': 'P7D',
            'week': 'P1W',
            'week_starting_sunday': 'P1W',
            'week_ending_saturday': 'P1W',
            'month': 'P1M',
        }

        granularity = {'type': 'period'}
        if timezone:
            granularity['timeZone'] = timezone

        if origin:
            dttm = utils.parse_human_datetime(origin)
            granularity['origin'] = dttm.isoformat()

        if period_name in iso_8601_dict:
            granularity['period'] = iso_8601_dict[period_name]
            if period_name in ('week_ending_saturday', 'week_starting_sunday'):
                # use Sunday as start of the week
                granularity['origin'] = '2016-01-03T00:00:00'
        elif not isinstance(period_name, string_types):
            granularity['type'] = 'duration'
            granularity['duration'] = period_name
        elif period_name.startswith('P'):
            # identify if the string is the iso_8601 period
            granularity['period'] = period_name
        else:
            granularity['type'] = 'duration'
            granularity['duration'] = utils.parse_human_timedelta(
                period_name).total_seconds() * 1000
        return granularity
Пример #3
0
def compute_time_compare(granularity, periods):
    if not granularity:
        return None
    # convert old db_engine_spec granularity to ISO duration
    if granularity in db_engine_specs_map:
        granularity = db_engine_specs_map[granularity]

    try:
        obj = isodate.parse_duration(granularity) * periods
    except isodate.isoerror.ISO8601Error:
        # if parse_human_timedelta can parse it, return it directly
        delta = '{0} {1}{2}'.format(periods, granularity, 's' if periods > 1 else '')
        obj = parse_human_timedelta(delta)
        if obj:
            return delta
        raise Exception('Unable to parse: {0}'.format(granularity))

    if isinstance(obj, isodate.duration.Duration):
        return isodate_duration_to_string(obj)
    elif isinstance(obj, datetime.timedelta):
        return timedelta_to_string(obj)
Пример #4
0
 def test_parse_human_timedelta(self, mock_now):
     mock_now.return_value = datetime(2016, 12, 1)
     self.assertEquals(parse_human_timedelta('now'), timedelta(0))
Пример #5
0
    def get_data(self, df):
        fd = self.form_data
        df = df.fillna(0)
        if fd.get("granularity") == "all":
            raise Exception("Pick a time granularity for your time series")

        df = df.pivot_table(index=DTTM_ALIAS,
                            columns=fd.get('groupby'),
                            values=fd.get('metrics'))

        fm = fd.get("resample_fillmethod")
        if not fm:
            fm = None
        how = fd.get("resample_how")
        rule = fd.get("resample_rule")
        if how and rule:
            df = df.resample(rule, how=how, fill_method=fm)
            if not fm:
                df = df.fillna(0)

        if self.sort_series:
            dfs = df.sum()
            dfs.sort_values(ascending=False, inplace=True)
            df = df[dfs.index]

        if fd.get("contribution"):
            dft = df.T
            df = (dft / dft.sum()).T

        rolling_periods = fd.get("rolling_periods")
        rolling_type = fd.get("rolling_type")

        if rolling_type in ('mean', 'std', 'sum') and rolling_periods:
            if rolling_type == 'mean':
                df = pd.rolling_mean(df, int(rolling_periods), min_periods=0)
            elif rolling_type == 'std':
                df = pd.rolling_std(df, int(rolling_periods), min_periods=0)
            elif rolling_type == 'sum':
                df = pd.rolling_sum(df, int(rolling_periods), min_periods=0)
        elif rolling_type == 'cumsum':
            df = df.cumsum()

        num_period_compare = fd.get("num_period_compare")
        if num_period_compare:
            num_period_compare = int(num_period_compare)
            prt = fd.get('period_ratio_type')
            if prt and prt == 'growth':
                df = (df / df.shift(num_period_compare)) - 1
            elif prt and prt == 'value':
                df = df - df.shift(num_period_compare)
            else:
                df = df / df.shift(num_period_compare)

            df = df[num_period_compare:]

        chart_data = self.to_series(df)

        time_compare = fd.get('time_compare')
        if time_compare:
            query_object = self.query_obj()
            delta = utils.parse_human_timedelta(time_compare)
            query_object['inner_from_dttm'] = query_object['from_dttm']
            query_object['inner_to_dttm'] = query_object['to_dttm']
            query_object['from_dttm'] -= delta
            query_object['to_dttm'] -= delta

            df2 = self.get_df(query_object)
            df2[DTTM_ALIAS] += delta
            df2 = df2.pivot_table(index=DTTM_ALIAS,
                                  columns=fd.get('groupby'),
                                  values=fd.get('metrics'))
            chart_data += self.to_series(df2,
                                         classed='superset',
                                         title_suffix="---")
            chart_data = sorted(chart_data, key=lambda x: x['key'])
        return chart_data
Пример #6
0
 def test_parse_human_timedelta(self, mock_now):
     mock_now.return_value = datetime(2016, 12, 1)
     self.assertEquals(parse_human_timedelta('now'), timedelta(0))
Пример #7
0
    def get_data(self, df):
        fd = self.form_data
        df = df.fillna(0)
        if fd.get("granularity") == "all":
            raise Exception("Pick a time granularity for your time series")

        df = df.pivot_table(
            index=DTTM_ALIAS,
            columns=fd.get('groupby'),
            values=fd.get('metrics'))

        fm = fd.get("resample_fillmethod")
        if not fm:
            fm = None
        how = fd.get("resample_how")
        rule = fd.get("resample_rule")
        if how and rule:
            df = df.resample(rule, how=how, fill_method=fm)
            if not fm:
                df = df.fillna(0)

        if self.sort_series:
            dfs = df.sum()
            dfs.sort_values(ascending=False, inplace=True)
            df = df[dfs.index]

        if fd.get("contribution"):
            dft = df.T
            df = (dft / dft.sum()).T

        rolling_periods = fd.get("rolling_periods")
        rolling_type = fd.get("rolling_type")

        if rolling_type in ('mean', 'std', 'sum') and rolling_periods:
            if rolling_type == 'mean':
                df = pd.rolling_mean(df, int(rolling_periods), min_periods=0)
            elif rolling_type == 'std':
                df = pd.rolling_std(df, int(rolling_periods), min_periods=0)
            elif rolling_type == 'sum':
                df = pd.rolling_sum(df, int(rolling_periods), min_periods=0)
        elif rolling_type == 'cumsum':
            df = df.cumsum()

        num_period_compare = fd.get("num_period_compare")
        if num_period_compare:
            num_period_compare = int(num_period_compare)
            prt = fd.get('period_ratio_type')
            if prt and prt == 'growth':
                df = (df / df.shift(num_period_compare)) - 1
            elif prt and prt == 'value':
                df = df - df.shift(num_period_compare)
            else:
                df = df / df.shift(num_period_compare)

            df = df[num_period_compare:]

        chart_data = self.to_series(df)

        time_compare = fd.get('time_compare')
        if time_compare:
            query_object = self.query_obj()
            delta = utils.parse_human_timedelta(time_compare)
            query_object['inner_from_dttm'] = query_object['from_dttm']
            query_object['inner_to_dttm'] = query_object['to_dttm']
            query_object['from_dttm'] -= delta
            query_object['to_dttm'] -= delta

            df2 = self.get_df(query_object)
            df2[DTTM_ALIAS] += delta
            df2 = df2.pivot_table(
                index=DTTM_ALIAS,
                columns=fd.get('groupby'),
                values=fd.get('metrics'))
            chart_data += self.to_series(
                df2, classed='superset', title_suffix="---")
            chart_data = sorted(chart_data, key=lambda x: x['key'])
        return chart_data