예제 #1
0
    async def load_2012(cls):
        year = 2012
        with open(
                "data/old_champs/2012-2013/Match_Results_World Championship_Edison.html"
        ) as f:
            edison = ResultsPageHelper.load_matches(
                BeautifulSoup(f.read(), 'lxml').find("table"), "1213cmp1")
        with open(
                "data/old_champs/2012-2013/Match_Results_World Championship_Franklin.html"
        ) as f:
            franklin = ResultsPageHelper.load_matches(
                BeautifulSoup(f.read(), 'lxml').find("table"), "1213cmp2")
        with open("data/old_champs/2012-2013/finals.html") as f:
            finals = ResultsPageHelper.load_matches(
                BeautifulSoup(f.read(), 'lxml').find("table"), "1213cmp0")
        with open(
                "data/old_champs/2012-2013/Rankings_World Championship_Edison.html"
        ) as f:
            edison_rank = ResultsPageHelper.load_rankings(
                BeautifulSoup(f.read(), 'lxml').find("table"), edison)
        with open(
                "data/old_champs/2012-2013/Rankings_World Championship_Franklin.html"
        ) as f:
            franklin_rank = ResultsPageHelper.load_rankings(
                BeautifulSoup(f.read(), 'lxml').find("table"), franklin)
        with open("data/old_champs/2012-2013/awards") as f:
            awards = cls.load_awards_file(f.read(), year, '1213cmp0')
        events = cls.mk_champs(year, "2013-04-24", "2013-04-27")

        await cls.finalize([
            finals, franklin, edison, franklin_rank, edison_rank, events,
            awards
        ], events, year)
예제 #2
0
    async def load_2009(cls):
        year = 2009
        with open("data/old_champs/2009-2010/2009CMPresultsandrankings.html"
                  ) as f:
            data = f.read()
        with open("data/old_champs/2009-2010/awards2") as f:
            awards_data = f.read()

        soup = BeautifulSoup(data, 'lxml')

        tables = list(soup.find_all("table"))
        finals = ResultsPageHelper.load_matches(tables[0], "0910cmp0")
        franklin = ResultsPageHelper.load_matches(tables[1], "0910cmp1")
        edison = ResultsPageHelper.load_matches(tables[2], "0910cmp2")
        franklin_rank = ResultsPageHelper.load_rankings(tables[3],
                                                        franklin,
                                                        has_hs=False)
        edison_rank = ResultsPageHelper.load_rankings(tables[4],
                                                      edison,
                                                      has_hs=False)
        events = cls.mk_champs(year, "2010-04-14", "2010-04-17")
        awards = cls.load_awards_file(awards_data, year, events[-1].key)

        await cls.finalize([
            finals, franklin, edison, franklin_rank, edison_rank, events,
            awards
        ], events, year)
예제 #3
0
 async def load_resq_finals(cls, finals):
     with open("data/old_champs/2015-2016/finals.html") as f:
         matches = ResultsPageHelper.load_matches(BeautifulSoup(f.read(), 'lxml').find("table"), "1516cmp0")
         for a, b, c in matches:
             await a.upsert()
             await b.upsert()
             await c.upsert()
         finals.data_sources = ["FTCData Original Research"]
         await finals.upsert()
         await AwardHelper.generate_winners_finalists(finals)
예제 #4
0
    async def load_2011(cls):
        year = 2011
        with open("data/old_champs/2011-2012/2011-2012FTCCMPResults") as f:
            data = f.read()
        with open("data/old_champs/2011-2012/awards") as f:
            awards_data = f.read()

        soup = BeautifulSoup(data, 'lxml')

        tables = list(soup.find_all("table"))
        finals = ResultsPageHelper.load_matches(tables[3], "1112cmp0")

        franklin = ResultsPageHelper.load_matches(tables[15], "1112cmp1")
        edison = ResultsPageHelper.load_matches(tables[14], "1112cmp2")

        franklin_rank = ResultsPageHelper.load_rankings(tables[13], franklin)
        edison_rank = ResultsPageHelper.load_rankings(tables[12], edison)
        events = cls.mk_champs(year, "2012-04-25", "2012-04-28")
        awards = cls.load_awards_file(awards_data, year, events[-1].key)

        await cls.finalize([
            finals, franklin, edison, franklin_rank, edison_rank, events,
            awards
        ], events, year)
예제 #5
0
    async def load_2010(cls):
        year = 2010
        with open(
                "data/old_champs/2010-2011/2010-2011-ftc-world-championship-get-over-it!-results.html"
        ) as f:
            data = f.read()
        with open("data/old_champs/2010-2011/awards") as f:
            awards_data = f.read()

        soup = BeautifulSoup(data, 'lxml')

        tables = list(soup.find_all("table"))
        finals = ResultsPageHelper.load_matches(tables[0], "1011cmp0")
        edison = ResultsPageHelper.load_matches(tables[1], "1011cmp1")
        franklin = ResultsPageHelper.load_matches(tables[2], "1011cmp2")
        edison_rank = ResultsPageHelper.load_rankings(tables[3], edison)
        franklin_rank = ResultsPageHelper.load_rankings(tables[4], franklin)
        events = cls.mk_champs(year, "2011-04-27", "2011-04-30")
        awards = cls.load_awards_file(awards_data, year, events[-1].key)

        await cls.finalize([
            finals, franklin, edison, franklin_rank, edison_rank, events,
            awards
        ], events, year)
예제 #6
0
    async def load_1617velv(cls):
        tasks = []
        with open("data/ftc-data/events/1617velv/1617velv-event-list.csv") as f:
            csv_reader = csv.reader(f.read().split("\n"))
        for row in csv_reader:
            if not row:
                continue
            sdate = list(map(int, row[0].split("/")))
            date = datetime.datetime(year=sdate[2], month=sdate[0], day=sdate[1])
            name, state, fevent_type, _, region_code, ecode, divid, ftcdata_code, state_abbr, data_quality = [a.strip() for a in row[1:]]
            name = name.strip()
            if region_code in ("pa", "esr"):
                # ftcpenn loads this better than ftcdata ever did because it provides awards data,
                # there's no point in us covering it
                continue
            event_type = cls.EVENT_TYPE_MAP[fevent_type]
            if state.endswith(" SR"):
                event_type = EventType.SUPER_REGIONAL
            elif state.startswith("CMP "):
                event_type = EventType.WORLD_CHAMPIONSHIP

            divno = -1
            # append "Division" to the end of names
            if (ecode.endswith("d0") or ecode.endswith("d1") or ecode.endswith("d2")):
                divno = int(ecode[-1])
                ecode = ecode[:-2]
                #name += " Division"

            region = None
            rcode = {
                "txno": "txntx",
                "txwp": "txph",
                "nynyc": "nyc",
                "io": "ia",
                "nm": "az",
            }.get(region_code, region_code)
            if ecode == "cmphs":
                rcode = "mihs"
            elif rcode in ("wsr", "nsr", "ssr", "cmptx", "cmpmo"):
                ecode = ""
                region = None

            if ecode:
                region = await RegionHelper.region_unabbrev(rcode)

            if "Canada" in name:
                country = "Canada"
            else:
                country = "USA"
            event = Event(key=f"1617{rcode}{ecode}",
                          year=2016, name=name, state_prov=state, country=country,
                          start_date=date, end_date=date, event_type=event_type,
                          region=region,
                          playoff_type=PlayoffType.STANDARD)
            if divno > -1:
                if divno == 0:
                    event.division_keys = [event.key + "1", event.key + "2"]
                else:
                    event.parent_event_key = event.key + "0"
                event.key += str(divno)
            event.event_code = ftcdata_code
            base = f"data/ftc-data/events/1617velv/{region_code.lower()}/1617velv-{ftcdata_code}"

            if os.path.exists(base + "-MatchResultsDetails.html"):
                with open(base + "-MatchResultsDetails.html") as f:
                    matches = ResultsPageHelper.load_match_details(BeautifulSoup(f.read(), 'lxml'), event.key)
                if os.path.exists(base + "-MatchResultsRaw.csv"):
                    MatchDetailsHelper.parse_ftcdata_csv(matches, base + "-MatchResultsRaw.csv")
            elif os.path.exists(base + "-MatchResults.html"):
                with open(base + "-MatchResults.html") as f:
                    matches = ResultsPageHelper.load_matches(BeautifulSoup(f.read(), 'lxml'), event.key)
            else:
                print("warning: ", event.key, "don't exists!")
                continue

            with open(base + "-Rankings.html") as f:
                rankings = ResultsPageHelper.load_rankings(BeautifulSoup(f.read(), 'lxml'), matches)

            tasks.append(asyncio.create_task(EventHelper.insert_event(event, matches, rankings, None,
                                                                      tolerate_missing_finals=True,
                                                                      data_source="cheer4ftc ftc-data repository")))
            #print("loaded " + event.key)
        await asyncio.gather(*tasks)