示例#1
0
def test_energy_sum_average_fixture() -> None:
    records = load_energy_fixture_csv("power_nem_nsw1_coal_black_1_week.csv")

    power_results_bw01 = list(
        filter(lambda x: x["facility_code"] == "BW01", records))

    power_df = shape_energy_dataframe(power_results_bw01)

    energy_sum(power_df, NetworkNEM)

    assert len(records) == 32288, "Right length of records"
示例#2
0
def _test_energy_sum_outputs() -> None:
    records = load_energy_fixture_csv("nem_generated_coal_black.csv")

    assert len(records) == 50, "Has the correct number of records"

    power_df = shape_energy_dataframe(records)

    assert len(power_df) == 50, "Has the correct number of records"

    es = energy_sum(power_df, NetworkNEM)

    # should be 50 records
    assert len(es) == 1536, "Has the correct number of records"

    assert es.eoi_quantity.sum() > 1000, "Has energy value"

    return es
示例#3
0
文件: energy.py 项目: opennem/opennem
def insert_energies(results: List[Dict], network: NetworkSchema) -> int:
    """Takes a list of generation values and calculates energies and bulk-inserts
    into the database"""

    # Get the energy sums as a dataframe
    esdf = energy_sum(results, network=network)

    # Add metadata
    esdf["created_by"] = "opennem.worker.energy"
    esdf["created_at"] = ""
    esdf["updated_at"] = datetime.now()
    esdf["generated"] = None
    esdf["is_forecast"] = False
    esdf["energy_quality_flag"] = 0

    # reorder columns
    columns = [
        "created_by",
        "created_at",
        "updated_at",
        "network_id",
        "trading_interval",
        "facility_code",
        "generated",
        "eoi_quantity",
        "is_forecast",
        "energy_quality_flag",
    ]
    esdf = esdf[columns]

    records_to_store: List[Dict] = esdf.to_dict("records")

    for record in records_to_store[:5]:
        logger.debug(record)

    if len(records_to_store) < 1:
        logger.warning("No records returned from energy sum")
        return 0

    # dedupe records
    return_records_grouped = {}

    for pk_values, rec_value in groupby(
            records_to_store,
            key=lambda r: (
                r.get("trading_interval"),
                r.get("network_id"),
                r.get("facility_code"),
            ),
    ):
        if pk_values not in return_records_grouped:
            return_records_grouped[pk_values] = list(rec_value).pop()

    records_to_store = list(return_records_grouped.values())

    # Build SQL + CSV and bulk-insert
    sql_query = build_insert_query(FacilityScada,
                                   ["updated_at", "eoi_quantity"])
    conn = get_database_engine().raw_connection()
    cursor = conn.cursor()

    csv_content = generate_csv_from_records(
        FacilityScada,
        records_to_store,
        column_names=list(records_to_store[0].keys()),
    )

    try:
        cursor.copy_expert(sql_query, csv_content)
        conn.commit()
    except Exception as e:
        logger.error("Error inserting records: {}".format(e))
        return 0

    logger.info("Inserted {} records".format(len(records_to_store)))

    return len(records_to_store)