def bake_data_for_graphs():
    reports = load_as_json('data.json')['reports']
    league_table = load_as_json('league_table.json')
    baked_data = bake_data_for_all_teams(reports, league_table)

    graph_data = {}
    place_keys = ['home', 'away']
    keys = ['overall', 'top_five', 'top_half', 'bottom_half', 'bottom_five']
    for team, stats in baked_data.items():
        graph_data[team] = {'shorter': {}, 'taller': {}}
        for place in place_keys:
            graph_data[team]['shorter'][place] = {}
            graph_data[team]['taller'][place] = {}
            for key in keys:
                graph_data[team]['shorter'][place][key] = {}
                graph_data[team]['shorter'][place][key] = process_data(stats['matches']['shorter'][place][key],
                                                                       stats['wins']['shorter'][place][key],
                                                                       stats['draws']['shorter'][place][key],
                                                                       stats['losses']['shorter'][place][key])

                graph_data[team]['taller'][place][key] = {}
                graph_data[team]['taller'][place][key] = process_data(stats['matches']['taller'][place][key],
                                                                      stats['wins']['taller'][place][key],
                                                                      stats['draws']['taller'][place][key],
                                                                      stats['losses']['taller'][place][key])

    return graph_data
def bake_data_for_graphs():
    reports = load_as_json('data.json')['reports']
    league_table = load_as_json('league_table.json')
    baked_data = bake_data_for_all_teams(reports, league_table)

    graph_data = {}
    place_keys = ['home', 'away']
    keys = ['overall', 'top_five', 'top_half', 'bottom_half', 'bottom_five']
    for team, stats in baked_data.items():
        graph_data[team] = {'shorter': {}, 'taller': {}}
        for place in place_keys:
            graph_data[team]['shorter'][place] = {}
            graph_data[team]['taller'][place] = {}
            for key in keys:
                graph_data[team]['shorter'][place][key] = {}
                graph_data[team]['shorter'][place][key] = process_data(
                    stats['matches']['shorter'][place][key],
                    stats['wins']['shorter'][place][key],
                    stats['draws']['shorter'][place][key],
                    stats['losses']['shorter'][place][key])

                graph_data[team]['taller'][place][key] = {}
                graph_data[team]['taller'][place][key] = process_data(
                    stats['matches']['taller'][place][key],
                    stats['wins']['taller'][place][key],
                    stats['draws']['taller'][place][key],
                    stats['losses']['taller'][place][key])

    return graph_data
Exemple #3
0
    def persist_reports(self, new_reports):
        """

        :return:
        """
        try:
            reports = load_as_json('data.json')
        except ValueError:
            reports = {'reports': []}

        reports['reports'].extend(new_reports['reports'])
        valid_reports = self.validate(reports['reports'])
        dump_as_json(valid_reports, 'data.json', 'w')
    def persist_reports(self, new_reports):
        """

        :return:
        """
        try:
            reports = load_as_json('data.json')
        except ValueError:
            reports = {'reports': []}

        reports['reports'].extend(new_reports['reports'])
        valid_reports = self.validate(reports['reports'])
        dump_as_json(valid_reports, 'data.json', 'w')
Exemple #5
0
    def crawl_all_fixtures(self):
        try:
            entries = load_as_json('data.json')
        except ValueError:
            entries = {'reports': []}

        stored_entries = len(entries['reports'])
        print "stored entries:", stored_entries
        while stored_entries < self.required_entries:
            batch = 2
            try:
                print "stored:", stored_entries, "req:", self.required_entries, "skip:", self.skip
                crawler = FixtureCrawler(FIXTURE_URL, self.skip, batch)
                new_reports = crawler.browse_monthly_fixtures()
            except ForbiddenAccessError:
                continue

            self.persist_reports(new_reports)
            self.skip += 2
            stored_entries += 2
    def crawl_all_fixtures(self):
        try:
            entries = load_as_json('data.json')
        except ValueError:
            entries = {'reports': []}

        stored_entries = len(entries['reports'])
        print "stored entries:", stored_entries
        while stored_entries < self.required_entries:
            batch = 2
            try:
                print "stored:", stored_entries, "req:", self.required_entries, "skip:", self.skip
                crawler = FixtureCrawler(FIXTURE_URL, self.skip, batch)
                new_reports = crawler.browse_monthly_fixtures()
            except ForbiddenAccessError:
                continue

            self.persist_reports(new_reports)
            self.skip += 2
            stored_entries += 2