def load_networks() -> None:
    """
    Load the networks fixture
    """
    fixture = load_data("networks.json", from_fixture=True)

    s = SessionLocal()

    for network in fixture:
        network_model = s.query(Network).filter_by(
            code=network["code"]).one_or_none()

        if not network_model:
            network_model = Network(code=network["code"])

        network_model.label = network["label"]
        network_model.country = network["country"]
        network_model.timezone = network["timezone"]
        network_model.timezone_database = network["timezone_database"]
        network_model.offset = network["offset"]
        network_model.interval_size = network["interval_size"]
        network_model.network_price = network["network_price"]

        if "interval_shift" in network:
            network_model.interval_shift = network["interval_shift"]

        if "export_set" in network:
            network_model.export_set = network["export_set"]

        try:
            s.add(network_model)
            s.commit()
        except Exception:
            logger.error("Have {}".format(network_model.code))
Exemple #2
0
def load_network_regions() -> None:
    """
    Load the network region fixture
    """
    fixture = load_data("network_regions.json", from_fixture=True)

    s = SessionLocal()

    for network_region in fixture:
        network_region_model = (s.query(NetworkRegion).filter_by(
            code=network_region["code"],
            network_id=network_region["network_id"]).one_or_none())

        if not network_region_model:
            network_region_model = NetworkRegion(code=network_region["code"])

        network_region_model.network_id = network_region["network_id"]

        try:
            s.add(network_region_model)
            s.commit()
            logger.debug("Loaded network region {}".format(
                network_region_model.code))
        except Exception:
            logger.error("Have {}".format(network_region_model.code))
def load_bom_stations_csv() -> None:
    """
    Imports the BOM fixed-width stations format

    Made redundant with the new JSON
    """
    s = SessionLocal()

    station_csv = load_data("stations_db.txt", from_fixture=True)

    lines = station_csv.split("\n")

    for line in lines:
        code, state, name, registered, lng, lat = parse_fixed_line(line)

        station = s.query(BomStation).filter_by(code=code).one_or_none()

        if not station:
            station = BomStation(
                code=code,
                state=state,
                name=name,
                registered=registered,
            )

        station.geom = "SRID=4326;POINT({} {})".format(lat, lng)

        try:
            s.add(station)
            s.commit()
        except Exception:
            logger.error("Have {}".format(station.code))
Exemple #4
0
def store_stats_database(statset: StatsSet) -> int:
    s = SessionLocal()

    records_to_store = [i.dict() for i in statset.stats]

    stmt = insert(Stats).values(records_to_store)
    stmt.bind = get_database_engine()
    stmt = stmt.on_conflict_do_update(
        index_elements=[
            "stat_date",
            "country",
            "stat_type",
        ],
        set_={
            "value": stmt.excluded.value,
        },
    )

    try:
        s.execute(stmt)
        s.commit()
    except Exception as e:
        logger.error("Error inserting records")
        logger.error(e)
        return 0
    finally:
        s.close()

    num_records = len(records_to_store)

    logger.info("Wrote {} records to database".format(num_records))

    return num_records
Exemple #5
0
def wikidata_join():
    session = SessionLocal()

    wikidata = load_data("wikidata-parsed.json", from_project=True)

    # session.add()

    for entry in wikidata:
        station_name = entry.get("name")

        station_lookup = (session.query(Station).filter(
            Station.name == station_name).all())

        if len(station_lookup) == 0:
            logger.info("Didn't find a station for {}".format(station_name))

        if len(station_lookup) == 1:
            station = station_lookup.pop()

            station.description = entry.get("description")
            station.wikipedia_link = entry.get("wikipedia")
            station.wikidata_id = entry.get("wikidata_id")

            session.add(station)
            logger.info("Updated station {}".format(station_name))

        if len(station_lookup) > 1:
            logger.info("Found multiple for station {}".format(station_name))

    session.commit()
Exemple #6
0
def load_bom_stations_json() -> None:
    """
    Imports BOM stations into the database from bom_stations.json

    The json is obtained using scripts/bom_stations.py
    """
    session = SessionLocal()

    bom_stations = load_data("bom_stations.json", from_project=True)
    bom_capitals = load_data("bom_capitals.json", from_project=True)

    codes = []

    if not bom_stations:
        logger.error("Could not load bom stations")

    stations_imported = 0

    for bom_station in bom_stations:

        if "code" not in bom_station:
            logger.error("Invalida bom station ..")
            continue

        if bom_station["code"] in codes:
            continue

        codes.append(bom_station["code"])

        station = session.query(BomStation).filter_by(
            code=bom_station["code"]).one_or_none()

        if not station:
            logger.info("New BOM station: %s", bom_station["name"])

            station = BomStation(code=bom_station["code"], )

        station.name = bom_station["name_full"]
        station.name_alias = bom_station["name"]
        station.website_url = bom_station["url"]
        station.feed_url = bom_station["json_feed"]
        station.priority = 5
        station.state = bom_station["state"]
        station.altitude = bom_station["altitude"]

        if "web_code" in bom_station:
            station.web_code = bom_station["web_code"]

        if bom_station["code"] in bom_capitals:
            station.is_capital = True
            station.priority = 1

        station.geom = "SRID=4326;POINT({} {})".format(bom_station["lng"],
                                                       bom_station["lat"])

        stations_imported += 1
        session.add(station)

    logger.info("Imported {} stations".format(stations_imported))
    session.commit()
Exemple #7
0
def process_unit_solution(table):
    session = SessionLocal()
    engine = get_database_engine()

    if "records" not in table:
        raise Exception("Invalid table no records")

    records = table["records"]

    records_to_store = []
    records_primary_keys = []

    for record in records:
        trading_interval = parse_date(
            record["SETTLEMENTDATE"], network=NetworkNEM, dayfirst=False
        )
        facility_code = normalize_duid(record["DUID"])

        if not trading_interval or not facility_code:
            continue

        # Since this can insert 1M+ records at a time we need to
        # do a separate in-memory check of primary key constraints
        # better way of doing this .. @TODO
        _unique_set = (trading_interval, facility_code, "NEM")

        if _unique_set not in records_primary_keys:

            records_to_store.append(
                {
                    "trading_interval": trading_interval,
                    "facility_code": facility_code,
                    "eoi_quantity": float(record["INITIALMW"]),
                    "network_id": "NEM",
                }
            )
            records_primary_keys.append(_unique_set)

    # free
    records_primary_keys = []

    logger.debug("Saving %d records", len(records_to_store))

    stmt = insert(FacilityScada).values(records_to_store)
    stmt.bind = engine
    stmt = stmt.on_conflict_do_update(
        constraint="facility_scada_pkey",
        set_={"eoi_quantity": stmt.excluded.eoi_quantity},
    )

    try:
        session.execute(stmt)
        session.commit()
    except Exception as e:
        logger.error("Error: {}".format(e))
        return 0
    finally:
        session.close()

    return len(records_to_store)
Exemple #8
0
def store_bom_records_temp_max(recs: List[Dict]) -> None:
    # records_to_store = [i for i in recs if i["temp_max"] is not None]
    records_to_store = recs

    first = recs[0]
    update_field = "temp_max"

    if "temp_min" in first:
        update_field = "temp_min"

    engine = get_database_engine()
    session = SessionLocal()

    # insert
    stmt = insert(BomObservation).values(records_to_store)
    stmt.bind = engine
    stmt = stmt.on_conflict_do_update(
        index_elements=["observation_time", "station_id"],
        set_={update_field: getattr(stmt.excluded, update_field)},
    )

    try:
        session.execute(stmt)
        session.commit()
    except Exception as e:
        logger.error("Error inserting records")
        logger.error(e)
        return {"num_records": 0}
    finally:
        session.close()

    return {"num_records": len(records_to_store)}
Exemple #9
0
def import_photos_from_fixtures() -> None:
    """ Import photos to stations """
    session = SessionLocal()
    photo_records = get_import_photo_data()

    for photo_record in photo_records:
        station = (session.query(Station).filter(
            Station.code == photo_record.station_code).one_or_none())

        if not station:
            logger.error("Could not find station {}".format(
                photo_record.station_code))
            continue

        img = get_image_from_web(photo_record.image_url)

        if not img:
            logger.error("No image for {}".format(photo_record.image_url))
            continue

        hash_id = image_get_crypto_hash(img)[-8:]
        file_name = "{}_{}_{}.{}".format(hash_id,
                                         station.name.replace(" ", "_"),
                                         "original", "jpeg")

        photo = session.query(Photo).filter_by(hash_id=hash_id).one_or_none()

        if not photo:
            photo = Photo(hash_id=hash_id, )

        photo.name = file_name
        photo.width = img.size[0]
        photo.height = img.size[1]
        photo.original_url = photo_record.image_url
        photo.license_type = photo_record.license
        photo.license_link = photo_record.license_link
        photo.author = photo_record.author
        photo.author_link = photo_record.author_link

        if photo_record.is_primary:
            photo.is_primary = True

        photo.approved = True
        photo.approved_by = "opennem.importer.photos"
        photo.approved_at = datetime.now()

        img_buff = img_to_buffer(img)

        write_photo_to_s3(file_name, img_buff)

        if station:
            station.photos.append(photo)

        session.add(photo)

        if station:
            session.add(station)

        session.commit()
Exemple #10
0
    def process_item(self, item, spider=None):

        s = SessionLocal()

        record_count = 0
        csvreader = csv.DictReader(item["content"].split("\n"))

        for row in csvreader:
            if "PARTICIPANT_CODE" not in row:
                logger.error("Invalid row")
                continue

            participant = None

            participant_code = normalize_duid(
                normalize_string(row["PARTICIPANT_CODE"])
            )
            participant_name = participant_name_filter(
                row["PARTICIPANT_DISPLAY_NAME"]
            ) or participant_name_filter(
                row.get("PARTICIPANT_FULL_NAME", None)
            )

            participant = (
                s.query(Participant)
                .filter(Participant.code == participant_code)
                .one_or_none()
            )

            if not participant:
                participant = Participant(
                    code=participant_code,
                    name=participant_name,
                    created_by=spider.name,
                )

                logger.info(
                    "Created new WEM participant: {}".format(participant_code)
                )

            elif participant.name != participant_name:
                participant.name = participant_name
                participant.updated_by = spider.name

                logger.info(
                    "Updated WEM participant: {}".format(participant_code)
                )

            try:
                s.add(participant)
                s.commit()
                record_count += 1
            except Exception as e:
                logger.error(e)

        s.close()

        return record_count
Exemple #11
0
def rooftop_facilities() -> None:

    session = SessionLocal()

    for state_map in STATE_NETWORK_REGION_MAP:
        state_rooftop_code = "{}_{}_{}".format(
            ROOFTOP_CODE,
            state_map["network"].upper(),
            state_map["state"].upper(),
        )

        rooftop_station = session.query(Station).filter_by(
            code=state_rooftop_code).one_or_none()

        if not rooftop_station:
            logger.info("Creating new station {}".format(state_rooftop_code))
            rooftop_station = Station(code=state_rooftop_code, )

        rooftop_station.name = "Rooftop Solar {}".format(state_map["state"])
        rooftop_station.description = "Solar rooftop facilities for {}".format(
            state_map["state"])
        rooftop_station.approved = False
        rooftop_station.approved_by = ""
        rooftop_station.created_by = "opennem.importer.rooftop"

        if not rooftop_station.location:
            rooftop_station.location = Location(state=state_map["state"])

        rooftop_fac = session.query(Facility).filter_by(
            code=state_rooftop_code).one_or_none()

        if not rooftop_fac:
            logger.info("Creating new facility {}".format(state_rooftop_code))
            rooftop_fac = Facility(code=state_rooftop_code)

        network = state_map["network"]

        # map to separate AEMO rooftop network
        if network.upper() == "NEM":
            network = "AEMO_ROOFTOP"

        rooftop_fac.network_id = network
        rooftop_fac.network_region = state_map["network_region"]
        rooftop_fac.fueltech_id = "solar_rooftop"
        rooftop_fac.status_id = "operating"
        rooftop_fac.active = True
        rooftop_fac.dispatch_type = DispatchType.GENERATOR
        rooftop_fac.approved_by = "opennem.importer.rooftop"
        rooftop_fac.created_by = "opennem.importer.rooftop"

        rooftop_station.facilities.append(rooftop_fac)
        session.add(rooftop_fac)

        session.add(rooftop_station)

    session.commit()
Exemple #12
0
    def process_item(self, item, spider=None):

        s = SessionLocal()

        record_count = 0
        csvreader = csv.DictReader(item["content"].split("\n"))

        for row in csvreader:
            if "Participant Code" not in row:
                logger.error("Invalid row")
                continue

            participant = None

            participant_code = normalize_duid(row["Participant Code"])
            participant_name = participant_name_filter(row["Participant Name"])

            participant = (
                s.query(Participant)
                .filter(Participant.code == participant_code)
                .one_or_none()
            )

            if not participant:
                participant = Participant(
                    code=participant_code,
                    name=participant_name,
                    # @TODO WEM provides these but nem doesn't so ignore for noe
                    # address=row["Address"],
                    # city=row["City"],
                    # state=row["State"],
                    # postcode=row["Postcode"],
                    created_by="pipeline.wem.participant",
                )

                logger.info(
                    "Created new WEM participant: {}".format(participant_code)
                )

            elif participant.name != participant_name:
                participant.name = participant_name
                participant.updated_by = "pipeline.wem.participant"
                logger.info(
                    "Updated WEM participant: {}".format(participant_code)
                )

            try:
                s.add(participant)
                s.commit()
                record_count += 1
            except Exception as e:
                logger.error(e)

        s.close()

        return record_count
Exemple #13
0
    def process_item(self, item, spider=None):

        s = SessionLocal()

        csvreader = csv.DictReader(item["content"].split("\n"))

        records_to_store = []
        primary_keys = []

        for row in csvreader:
            trading_interval = parse_date(row["TRADING_DAY_INTERVAL"],
                                          network=NetworkWEM,
                                          dayfirst=False)

            if trading_interval not in primary_keys:
                forecast_load = clean_float(row["FORECAST_EOI_MW"])

                records_to_store.append({
                    "created_by": spider.name,
                    "trading_interval": trading_interval,
                    "network_id": "WEM",
                    "network_region": "WEM",
                    "forecast_load": forecast_load,
                    # generation_scheduled=row["Scheduled Generation (MW)"],
                    # generation_total=row["Total Generation (MW)"],
                    "price": clean_float(row["PRICE"]),
                })
                primary_keys.append(trading_interval)

        stmt = insert(BalancingSummary).values(records_to_store)
        stmt.bind = get_database_engine()
        stmt = stmt.on_conflict_do_update(
            index_elements=[
                "trading_interval",
                "network_id",
                "network_region",
            ],
            set_={
                "price": stmt.excluded.price,
                "forecast_load": stmt.excluded.forecast_load,
            },
        )

        try:
            s.execute(stmt)
            s.commit()
        except Exception as e:
            logger.error("Error inserting records")
            logger.error(e)
            return 0
        finally:
            s.close()

        return len(records_to_store)
Exemple #14
0
def wikidata_join_mapping() -> None:
    """Attempts to join the wikidata to OpenNEM stations using the
    csv file with mappings"""

    session = SessionLocal()

    wikidata = load_data("wikidata-parsed.json", from_project=True)

    wikidata_mappings = None

    with open("opennem/data/wikidata_mappings.csv") as fh:
        csvreader = csv.DictReader(
            fh,
            fieldnames=[
                "code",
                "name",
                "network_id",
                "network_region",
                "fueltech_id",
                "wikidata_id",
            ],
        )
        wikidata_mappings = {
            i["code"]: i["wikidata_id"]
            for i in list(csvreader)
            if i["wikidata_id"] and i["code"] != "code"
        }

    for station_code, wikidata_id in wikidata_mappings.items():
        wikidata_record_lookup = list(
            filter(lambda x: x["wikidata_id"] == wikidata_id, wikidata))

        if len(wikidata_record_lookup) == 0:
            logger.error("Could not find {}".format(wikidata_id))
            continue

        wikidata_record = wikidata_record_lookup.pop()

        station = session.query(Station).filter(
            Station.code == station_code).one_or_none()

        if not station:
            logger.error("Didn't find a station for {}".format(station_code))
            continue

        station.description = wikidata_record.get("description")
        station.wikipedia_link = wikidata_record.get("wikipedia")
        station.wikidata_id = wikidata_record.get("wikidata_id")

        session.add(station)
        logger.info("Updated station {}".format(station_code))

    session.commit()
Exemple #15
0
def update_weather() -> None:
    wc = WillyClient()

    for bom_code, willyid in WILLY_MAP.items():
        r = wc.get_location_temp(willyid, days=3, start_date="2021-10-14")
        data_points = r["observationalGraphs"]["temperature"]["dataConfig"][
            "series"]["groups"]

        records = []
        pprint(r)

        for pointset in data_points:
            # print(p)
            for p in pointset["points"]:
                r_dict = {
                    "station_id":
                    bom_code,
                    "observation_time":
                    unix_timestamp_to_aware_datetime(p["x"],
                                                     "Australia/Sydney"),
                    "temp_air":
                    p["y"],
                }
                print("{} -> {}".format(r_dict["observation_time"],
                                        r_dict["temp_air"]))
                records.append(r_dict)

    session = SessionLocal()
    engine = get_database_engine()

    stmt = insert(BomObservation).values(records)
    stmt.bind = engine
    stmt = stmt.on_conflict_do_update(
        index_elements=["observation_time", "station_id"],
        set_={
            # "temp_apparent": stmt.excluded.temp_apparent,
            "temp_air": stmt.excluded.temp_air,
        },
    )

    try:
        session.execute(stmt)
        session.commit()
    except Exception as e:
        logger.error("Error: {}".format(e))
    finally:
        session.close()
Exemple #16
0
def process_pre_ap_price(table):
    session = SessionLocal()
    engine = get_database_engine()

    if "records" not in table:
        raise Exception("Invalid table no records")

    records = table["records"]

    records_to_store = []

    for record in records:
        trading_interval = parse_date(
            record["SETTLEMENTDATE"], network=NetworkNEM
        )

        if not trading_interval:
            continue

        records_to_store.append(
            {
                "network_id": "NEM",
                "network_region": record["REGIONID"],
                "trading_interval": trading_interval,
                "price": record["PRE_AP_ENERGY_PRICE"],
            }
        )

    stmt = insert(BalancingSummary).values(records_to_store)
    stmt.bind = engine
    stmt = stmt.on_conflict_do_update(
        constraint="balancing_summary_pkey",
        set_={"price": stmt.excluded.price,},
    )

    try:
        r = session.execute(stmt)
        session.commit()
        return r
    except Exception as e:
        logger.error("Error inserting records")
        logger.error(e)
    finally:
        session.close()

    return len(records_to_store)
Exemple #17
0
def process_meter_data_gen_duid(table):
    session = SessionLocal()
    engine = get_database_engine()

    if "records" not in table:
        raise Exception("Invalid table no records")

    records = table["records"]

    records_to_store = []

    for record in records:
        trading_interval = parse_date(
            record["INTERVAL_DATETIME"], network=NetworkNEM, dayfirst=False
        )

        if not trading_interval:
            continue

        records_to_store.append(
            {
                "network_id": "NEM",
                "trading_interval": trading_interval,
                "facility_code": normalize_duid(record["DUID"]),
                "eoi_quantity": record["MWH_READING"],
            }
        )

    stmt = insert(FacilityScada).values(records_to_store)
    stmt.bind = engine
    stmt = stmt.on_conflict_do_update(
        constraint="facility_scada_pkey",
        set_={"eoi_quantity": stmt.excluded.eoi_quantity,},
    )

    try:
        session.execute(stmt)
        session.commit()
    except Exception as e:
        logger.error("Error inserting records")
        logger.error(e)
        return 0
    finally:
        session.close()

    return len(records_to_store)
    def process_item(self, item, spider=None):

        session = SessionLocal()
        engine = get_database_engine()

        csvreader = csv.DictReader(item["content"].split("\n"))

        records_to_store = []

        for row in csvreader:
            trading_interval = parse_date(
                row["Trading Interval"], dayfirst=True, network=NetworkWEM
            )
            facility_code = normalize_duid(row["Facility Code"])

            records_to_store.append(
                {
                    "network_id": "WEM",
                    "trading_interval": trading_interval,
                    "facility_code": facility_code,
                    "eoi_quantity": row["EOI Quantity (MW)"] or None,
                    "generated": row["Energy Generated (MWh)"] or None,
                }
            )

        stmt = insert(FacilityScada).values(records_to_store)
        stmt.bind = engine
        stmt = stmt.on_conflict_do_update(
            constraint="facility_scada_pkey",
            set_={
                "eoi_quantity": stmt.excluded.eoi_quantity,
                "generated": stmt.excluded.generated,
            },
        )

        try:
            session.execute(stmt)
            session.commit()
        except Exception as e:
            logger.error("Error: {}".format(e))
        finally:
            session.close()

        return len(records_to_store)
Exemple #19
0
    def process_item(self, item, spider=None):
        if "records" not in item:
            logger.error("Invalid return response")

        records = item["records"]

        engine = get_database_engine()
        session = SessionLocal()

        records_to_store = []

        for record in records:
            records_to_store.append({
                # "network_id": "WEM" if state == "WA" else "NEM",
                # "trading_interval": interval_time,
                # "facility_code": facility_code,
                # "generated": generated,
            })

        # free
        primary_keys = None

        stmt = insert(FacilityScada).values(records_to_store)
        stmt.bind = engine
        stmt = stmt.on_conflict_do_update(
            index_elements=[
                "trading_interval", "network_id", "facility_code",
                "is_forecast"
            ],
            set_={
                "generated": stmt.excluded.generated,
            },
        )

        try:
            session.execute(stmt)
            session.commit()
        except Exception as e:
            logger.error("Error: {}".format(e))
        finally:
            session.close()

        return len(records_to_store)
Exemple #20
0
def import_rooftop_aemo_backfills() -> None:
    csvrecords = []
    backfill_file = Path(__file__).parent / "aemo_rooftop_backfill.csv"

    with backfill_file.open() as fh:
        fieldnames = fh.readline().strip().split(",")
        csvreader = csv.DictReader(fh, fieldnames=fieldnames)
        csvrecords = [BackfillRecord(**i) for i in csvreader]

    logger.debug("Loaded {} records".format(len(csvrecords)))

    export_records = [
        i.dict(exclude={"energy_v2", "energy_v3", "network_region"})
        for i in csvrecords
    ]

    session = SessionLocal()

    records_added = 0
    records_updated = 0

    for rec in export_records:
        model = (session.query(AggregateFacilityDaily).filter_by(
            network_id="AEMO_ROOFTOP_BACKFILL").filter_by(
                trading_day=rec["trading_day"]).filter_by(
                    facility_code=rec["facility_code"])).first()

        if not model:
            model = AggregateFacilityDaily(**rec)
            records_added += 1
        else:
            records_updated += 1

        model.energy = rec["energy"]
        model.market_value = rec["market_value"]

        session.add(model)

    session.commit()
    logger.debug("Added {} records and updated {}".format(
        records_added, records_updated))
def import_aemo_facility_closure_dates() -> bool:
    closure_records = parse_aemo_closures_xls()
    session = SessionLocal()

    for record in closure_records:
        facility: Optional[Facility] = (session.query(Facility).filter_by(
            network_id="NEM").filter_by(code=record.duid).one_or_none())

        if not facility:
            logger.info("Could not find facility {} - {}".format(
                record.duid, record.station_name))
            continue

        facility.expected_closure_date = record.expected_closure_date
        facility.expected_closure_year = record.expected_closure_year

        session.add(facility)

    session.commit()

    return True
def load_fueltechs() -> None:
    """
    Load the fueltechs fixture
    """
    fixture = load_data("fueltechs.json", from_fixture=True)

    s = SessionLocal()

    for ft in fixture:
        fueltech = s.query(FuelTech).filter_by(code=ft["code"]).one_or_none()

        if not fueltech:
            fueltech = FuelTech(code=ft["code"], )

        fueltech.label = ft["label"]
        fueltech.renewable = ft["renewable"]

        try:
            s.add(fueltech)
            s.commit()
        except Exception:
            logger.error("Have {}".format(fueltech.code))
def load_facilitystatus() -> None:
    """
    Load the facility status fixture
    """
    fixture = load_data("facility_status.json", from_fixture=True)

    s = SessionLocal()

    for status in fixture:
        facility_status = s.query(FacilityStatus).filter_by(
            code=status["code"]).one_or_none()

        if not facility_status:
            facility_status = FacilityStatus(code=status["code"], )

        facility_status.label = status["label"]

        try:
            s.add(facility_status)
            s.commit()
        except Exception:
            logger.error("Have {}".format(facility_status.code))
Exemple #24
0
def wikidata_join() -> None:
    """Attempts to join the wikidata to OpenNEM stations based
    on the name"""

    session = SessionLocal()

    wikidata = load_data("wikidata-parsed.json", from_project=True)

    # Use a better query
    # engine = get_database_engine()

    # station_lookup_query = """

    # """

    for entry in wikidata:
        station_name = entry.get("name")

        station_lookup = session.query(Station).filter(
            Station.name == station_name).all()

        if len(station_lookup) == 0:
            logger.info("Didn't find a station for {}".format(station_name))

        if len(station_lookup) == 1:
            station = station_lookup.pop()

            station.description = entry.get("description")
            station.wikipedia_link = entry.get("wikipedia")
            station.wikidata_id = entry.get("wikidata_id")

            session.add(station)
            logger.info("Updated station {}".format(station_name))

        if len(station_lookup) > 1:
            logger.info("Found multiple for station {}".format(station_name))

    session.commit()
def load_bom_stations_json():
    """
        Imports BOM stations into the database from bom_stations.json

        The json is obtained using scripts/bom_stations.py
    """
    session = SessionLocal()

    bom_stations = load_data("bom_stations.json", from_project=True)
    bom_capitals = load_data("bom_capitals.json", from_project=True)

    for bom_station in bom_stations:
        station = (session.query(BomStation).filter_by(
            code=bom_station["code"]).one_or_none())

        if not station:
            logger.info("New station: %s", bom_station["name"])

            station = BomStation(code=bom_station["code"], )

        station.name = bom_station["name_full"]
        station.name_alias = bom_station["name"]
        station.website_url = bom_station["url"]
        station.feed_url = bom_station["json_feed"]
        station.priority = 5
        station.state = bom_station["state"]
        station.altitude = bom_station["altitude"]

        if bom_station["code"] in bom_capitals:
            station.is_capital = True
            station.priority = 1

        station.geom = "SRID=4326;POINT({} {})".format(bom_station["lng"],
                                                       bom_station["lat"])

        session.add(station)

    session.commit()
Exemple #26
0
    def process_item(self, item, spider):

        s = SessionLocal()

        observation = BomObservation(
            station_id=item["station_id"],
            observation_time=parse_date(item["aifstime_utc"]),
            temp_apparent=item["apparent_t"],
            temp_air=item["air_temp"],
            press_qnh=item["press_qnh"],
            wind_dir=item["wind_dir"],
            wind_spd=item["wind_spd_kmh"],
        )

        try:
            s.add(observation)
            s.commit()
        except Exception as e:
            logger.error("Error: {}".format(e))
        finally:
            s.close()

        return item
Exemple #27
0
def import_emissions_map(file_name: str) -> None:
    """Import emission factors from CSV files for each network
    the format of the csv file is

    station_name,network_id,network_region,facility_code,emissions_factor_co2,fueltech_id,emission_factor_source
    """
    session = SessionLocal()

    content = load_data(file_name, from_project=True, skip_loaders=True)

    csv_content = content.splitlines()
    csvreader = csv.DictReader(csv_content)

    for rec in csvreader:
        network_id = rec["network_id"]
        facility_code = normalize_duid(rec["facility_code"])
        emissions_intensity = clean_float(rec["emissions_factor_co2"])

        if not facility_code:
            logger.info("No emissions intensity for {}".format(facility_code))
            continue

        facility = (session.query(Facility).filter_by(
            code=facility_code).filter_by(network_id=network_id).one_or_none())

        if not facility:
            logger.info("No stored facility for {}".format(facility_code))
            continue

        facility.emissions_factor_co2 = emissions_intensity
        session.add(facility)
        logger.info("Updated {} to {}".format(facility_code,
                                              emissions_intensity))

    session.commit()

    return None
Exemple #28
0
def import_osm_ways() -> int:
    """Gets the geometries for each OSM way id and store them against the station geom"""
    session = SessionLocal()

    stations_with_ways = (session.query(Station).join(Location).filter(
        Location.osm_way_id.isnot(None)).all())

    if not stations_with_ways or len(stations_with_ways) < 1:
        logger.error("No stations with ways!")
        return 0

    station_count = 0

    for station in stations_with_ways:

        location: Location = station.location

        geom_boundary = None

        try:
            geom_boundary = get_osm_geom(station.location.osm_way_id)
        except Exception as e:
            logger.error("get osm wkt error: {}".format(e))
            pass

        if not geom_boundary:
            logger.error("Error getting WKT from OSM")
            continue

        location.boundary = geom_boundary
        session.add(location)
        session.commit()

        logger.info("Updated boundary geom from OSM for station: {}".format(
            station.code))

    return station_count
Exemple #29
0
def import_osm_way_data() -> None:
    """Updates the OSM way ids for stations from the CSV file fixture"""
    session = SessionLocal()
    station_osm_records = get_import_osm_data()

    for station_osm_record in station_osm_records:
        station = (session.query(Station).filter(
            Station.code == station_osm_record.station_code).one_or_none())

        if not station:
            logger.error("Could not find station {}".format(
                station_osm_record.station_code))
            continue

        if not station.location:
            logger.error("Station {} does not have a location".format(
                station_osm_record.station_code))
            continue

        station.location.osm_way_id = station_osm_record.osm_way_id
        session.add(station)
        session.commit()

        logger.info("Updated station: {}".format(station.code))
Exemple #30
0
def process_trading_regionsum(table: Dict[str, Any], spider: Spider) -> Dict:
    session = SessionLocal()
    engine = get_database_engine()

    if "records" not in table:
        raise Exception("Invalid table no records")

    records = table["records"]

    limit = None
    records_to_store = []
    records_processed = 0
    primary_keys = []

    for record in records:
        trading_interval = parse_date(
            record["SETTLEMENTDATE"],
            network=NetworkNEM,
            dayfirst=False,
            date_format="%Y/%m/%d %H:%M:%S",
        )

        if not trading_interval:
            continue

        _pk = set([trading_interval, record["REGIONID"]])

        if _pk in primary_keys:
            continue

        primary_keys.append(_pk)

        demand_total = None

        if "TOTALDEMAND" in record:
            demand_total = clean_float(record["TOTALDEMAND"])

        records_to_store.append({
            "network_id": "NEM",
            "created_by": spider.name,
            "network_region": record["REGIONID"],
            "trading_interval": trading_interval,
            "demand_total": demand_total,
        })

        records_processed += 1

        if limit and records_processed >= limit:
            logger.info("Reached limit of: {} {}".format(
                limit, records_processed))
            break

    stmt = insert(BalancingSummary).values(records_to_store)
    stmt.bind = engine
    stmt = stmt.on_conflict_do_update(
        index_elements=["trading_interval", "network_id", "network_region"],
        set_={
            "demand_total": stmt.excluded.demand_total,
        },
    )

    try:
        session.execute(stmt)
        session.commit()
    except Exception as e:
        logger.error("Error inserting records")
        logger.error(e)
        return {"num_records": 0}

    finally:
        session.close()

    return {"num_records": len(records_to_store)}