示例#1
0
def get_error_by_files(filename1, filename2, time_period):
    data1, _, _ = get_data(
        os.path.join(config.UPLOAD_FOLDER, secure_filename(filename1)))
    data2, _, _ = get_data(
        os.path.join(config.UPLOAD_FOLDER, secure_filename(filename2)))
    data1 = group_segment_data_by_time_period(data1, time_period)
    data2 = group_segment_data_by_time_period(data2, time_period)
    return [
        get_error(data1[tuple], data2[tuple], time_period)
        for tuple in data1.keys()
    ]
    def test_daily_time_grouper(self):
        time_period = 'day'

        grouped_data = group_segment_data_by_time_period(
            self.data, time_period)

        self.assertDictEqual(self.data, grouped_data)
    def test_monthly_time_grouper_inventory_and_dates_length_match(self):
        time_period = 'month'

        grouped_data = group_segment_data_by_time_period(
            self.data, time_period)

        for tuple, segment in grouped_data.items():
            self.assertEqual(len(segment.inventory_volumes),
                             len(segment.dates))
    def test_time_grouper_with_unallowed_time_period(self):
        time_period = 'not a time period'

        grouped_data = group_segment_data_by_time_period(
            self.data, time_period)

        for segment in grouped_data.values():
            self.assertFalse(segment.dates)
            self.assertFalse(segment.inventory_volumes)
    def test_weekly_time_grouper_days_in_between_dates(self):
        time_period = 'week'

        grouped_data = group_segment_data_by_time_period(
            self.data, time_period)

        for tuple, segment in grouped_data.items():
            for i in range(len(segment.dates) - 1):
                in_a_week = segment.dates[i].seconds + 60 * 60 * 24 * 7
                self.assertEqual(in_a_week, segment.dates[i + 1].seconds)
    def test_weekly_time_grouper_first_day_in_group(self):
        time_period = 'week'

        grouped_data = group_segment_data_by_time_period(
            self.data, time_period)

        for tuple, segment in grouped_data.items():
            for i in range(len(segment.dates) - 1):
                date = datetime.fromtimestamp(segment.dates[i].seconds)
                self.assertEqual(0, date.weekday())
    def test_monthly_time_grouper_first_day_in_group(self):
        time_period = 'month'

        grouped_data = group_segment_data_by_time_period(
            self.data, time_period)

        for tuple, segment in grouped_data.items():
            self.assertEqual(len(segment.inventory_volumes),
                             len(segment.dates))
            for i in range(len(segment.dates) - 1):
                date = datetime.fromtimestamp(segment.dates[i].seconds)
                self.assertEqual(1, date.day)
    def test_monthly_time_grouper_gap_between_dates(self):
        time_period = 'month'

        grouped_data = group_segment_data_by_time_period(
            self.data, time_period)

        for tuple, segment in grouped_data.items():
            self.assertEqual(len(segment.inventory_volumes),
                             len(segment.dates))
            for i in range(len(segment.dates) - 1):
                date = datetime.fromtimestamp(segment.dates[i].seconds)
                days_in_current_month = monthrange(date.year, date.month)[1]
                in_a_month = segment.dates[
                    i].seconds + 60 * 60 * 24 * days_in_current_month
                self.assertEqual(in_a_month, segment.dates[i + 1].seconds)
    def test_weekly_time_grouper_inventory_volumes_sum(self):
        time_period = 'week'

        grouped_data = group_segment_data_by_time_period(
            self.data, time_period)

        for tuple, segment in grouped_data.items():
            self.assertEqual(len(segment.inventory_volumes),
                             len(segment.dates))
            for i in range(len(segment.dates) - 1):
                date_index_in_original_data = list(
                    self.data[tuple].dates).index(segment.dates[i])
                inventory_volumes_sum = sum(self.data[tuple].inventory_volumes[
                    date_index_in_original_data:date_index_in_original_data +
                    7])
                self.assertEqual(inventory_volumes_sum,
                                 segment.inventory_volumes[i])
    def test_monthly_time_grouper_inventory_volumes_sum(self):
        time_period = 'month'

        grouped_data = group_segment_data_by_time_period(
            self.data, time_period)

        for tuple, segment in grouped_data.items():
            self.assertEqual(len(segment.inventory_volumes),
                             len(segment.dates))
            for i in range(len(segment.dates) - 1):
                date = datetime.fromtimestamp(segment.dates[i].seconds)
                days_in_current_month = monthrange(date.year, date.month)[1]
                date_index_in_original_data = list(
                    self.data[tuple].dates).index(segment.dates[i])
                inventory_volumes_sum = sum(self.data[tuple].inventory_volumes[
                    date_index_in_original_data:date_index_in_original_data +
                    days_in_current_month])
                self.assertEqual(inventory_volumes_sum,
                                 segment.inventory_volumes[i])
 def get(self, filename):
     try:
         page = int(request.args.get('page')) if request.args.get('page') else 0
         per_page = int(request.args.get('per_page')) if request.args.get('per_page') else config.per_page
     except ValueError:
         abort(400, 'page and per_page values should be numbers')
     data_filter = DataFilter(request.args)
     filename = secure_filename(filename)
     data, countries, devices = get_data(os.path.join(config.UPLOAD_FOLDER, filename))
     time_period = request.args.get('time_period') if request.args.get('time_period') else config.time_period
     grouped_data = group_segment_data_by_time_period(data, time_period)
     filtered_data = data_filter.filter(grouped_data)
     order_type = request.args.get('order_by') if request.args.get('order_by') else config.order_by
     ordered_data = sort_data_by_order_type(list(filtered_data.values()), order_type)
     response = data_pb2.SegmentedTimelineDataResponse()
     response.data.extend(ordered_data[(page * per_page):(page * per_page + per_page)])
     response.countries.extend(countries)
     response.devices.extend(devices)
     return MessageToDict(response)
    def get(self, original_filename, filename_for_comparison):
        try:
            page = int(
                request.args.get('page')) if request.args.get('page') else 0
            per_page = int(request.args.get('per_page')) if request.args.get(
                'per_page') else config.per_page
        except ValueError:
            abort(400, 'page and per_page values should be numbers')
        filename1 = secure_filename(original_filename)
        filename2 = secure_filename(filename_for_comparison)
        original_data, _, _ = get_data(
            os.path.join(config.UPLOAD_FOLDER, filename1))
        comparison_data, _, _ = get_data(
            os.path.join(config.UPLOAD_FOLDER, filename2))
        time_period = request.args.get('time_period') if request.args.get(
            'time_period') else config.time_period
        grouped_original_data = group_segment_data_by_time_period(
            original_data, time_period)
        grouped_comparison_data = group_segment_data_by_time_period(
            comparison_data, time_period)
        data_filter = DataFilter(request.args)
        filtered_original_data = data_filter.filter(grouped_original_data)
        filtered_comparison_data = data_filter.filter(grouped_comparison_data)

        original_segmented_timeline_data = data_pb2.SegmentedTimelineDataResponse(
        )
        comparison_segmented_timeline_data = data_pb2.SegmentedTimelineDataResponse(
        )

        if (filename1, filename2) not in error_importance_by_files:
            errors = sorted([
                get_error(original_data[tuple], comparison_data[tuple],
                          time_period) for tuple in original_data.keys()
            ],
                            key=lambda x: x.median,
                            reverse=True)
            order_type = request.args.get('order_by') if request.args.get(
                'order_by') else config.error_order_by
            sorted_errors = sort_data_by_order_type(errors, order_type)
            error_importance_by_files[(filename1, filename2)] = [
                (error.country, error.device) for error in sorted_errors
            ]

        error_importance = error_importance_by_files[(filename1, filename2)]
        sorted_segment_keys_by_importance = [
            segment_key for segment_key in error_importance
            if segment_key in filtered_original_data.keys()
        ]
        sorted_segment_keys_by_importance_for_page = sorted_segment_keys_by_importance[
            (page * per_page):(page * per_page + per_page)]
        original_segmented_timeline_data.data.extend([
            filtered_original_data[key]
            for key in sorted_segment_keys_by_importance_for_page
        ])
        comparison_segmented_timeline_data.data.extend([
            filtered_comparison_data[key]
            for key in sorted_segment_keys_by_importance_for_page
        ])
        response = data_pb2.SegmentedTimelineCompareResponse(
            original_data=original_segmented_timeline_data,
            comparison_data=comparison_segmented_timeline_data)
        return MessageToDict(response)