Esempio n. 1
0
        def wrap_calc_func(*args, **kwargs):
            pc = PerfCounter('%s.%s' % (func.__module__, func.__name__))
            pc.display('enter')

            hash_data = _get_func_hash_data(func)
            cache_key = _calculate_cache_key(hash_data)

            assert 'variables' not in kwargs
            assert 'datasets' not in kwargs

            if not args and not kwargs:
                should_cache_func = True
            else:
                should_cache_func = False
                print('not caching func %s.%s' % (func.__module__, func.__name__))

            if should_cache_func:
                ret = cache.get(cache_key)
                if ret is not None:  # calcfuncs must not return None
                    pc.display('cache hit')
                    return ret

            if variables is not None:
                kwargs['variables'] = {x: get_variable(y) for x, y in variables.items()}

            if datasets is not None:
                datasets_to_load = set(list(datasets.values())) - set(_dataset_cache.keys())
                if datasets_to_load:
                    loaded_datasets = []
                    for dataset_name in datasets_to_load:
                        ds_pc = PerfCounter('dataset %s' % dataset_name)
                        df = load_datasets(dataset_name)
                        ds_pc.display('loaded')
                        loaded_datasets.append(df)
                        del ds_pc

                    for dataset_name, dataset in zip(datasets_to_load, loaded_datasets):
                        _dataset_cache[dataset_name] = dataset

                kwargs['datasets'] = {ds_name: _dataset_cache[ds_url] for ds_name, ds_url in datasets.items()}

            ret = func(*args, **kwargs)
            pc.display('func ret')
            if should_cache_func:
                assert ret is not None
                cache.set(cache_key, ret, timeout=600)

            return ret
Esempio n. 2
0
    'jyrjola/aluesarjat/hginseutu_va_ve01_vaestoennuste_pks',
]

import pandas as pd
import scipy
import importlib

from utils import dict_merge
from utils.quilt import load_datasets
import aplans_graphs

import plotly
import plotly.graph_objs as go
import cufflinks as cf

kaup_rakennus_tyypeittain, pks_khk_paastot, vaestoennuste_pks = load_datasets(INPUT_DATASETS)

plotly.offline.init_notebook_mode(connected=True)
cf.set_config_file(offline=True)

# %%
df = kaup_rakennus_tyypeittain.copy()
df.Vuosi = df.Vuosi.astype(int)
buildings_by_type = df.pivot_table(index=['Vuosi', 'Rakennustyyppi'], columns='Muuttuja', values='value').reset_index()
display(buildings_by_type)
kaup_rakennus_tyypeittain = df[df.Rakennustyyppi == 'Kaikki yhteensä'].pivot(index='Vuosi', columns='Muuttuja', values='value')

# %%
df = vaestoennuste_pks
df = df[(df.Laadintavuosi == 'Laadittu 2018') & (df.Sukupuoli == 'Molemmat sukupuolet') & (df.Ikä == 'Väestö yhteensä')]
df = df[df.Alue == 'Helsinki']
Esempio n. 3
0
def prepare_input_datasets():
    global ghg_emissions

    ghg_in = load_datasets(INPUT_DATASETS)
    ghg_emissions = prepare_ghg_emissions_dataset(ghg_in)
Esempio n. 4
0
        def wrap_calc_func(*args, **kwargs):
            should_profile = os.environ.get('PROFILE_CALC', '').lower() in ('1', 'true', 'yes')

            only_if_in_cache = kwargs.pop('only_if_in_cache', False)
            skip_cache = kwargs.pop('skip_cache', False)
            var_store = kwargs.pop('variable_store', None)

            if should_profile:
                pc = PerfCounter('%s.%s' % (func.__module__, func.__name__))
                pc.display('enter')

            hash_data = _get_func_hash_data(func, None)
            cache_key = _calculate_cache_key(func, hash_data, var_store=var_store)

            assert 'variables' not in kwargs
            assert 'datasets' not in kwargs

            unknown_kwargs = set(kwargs.keys()) - set(['step_callback'])
            if not args and not unknown_kwargs and not skip_cache:
                should_cache_func = True
            else:
                should_cache_func = False

            if should_cache_func:
                ret = cache.get(cache_key)
                if ret is not None:  # calcfuncs must not return None
                    if should_profile:
                        pc.display('cache hit (%s)' % cache_key)
                    return ret
                if only_if_in_cache:
                    if should_profile:
                        pc.display('cache miss so leaving as requested (%s)' % cache_key)
                    return None

            if variables is not None:
                kwargs['variables'] = {x: get_variable(y, var_store=var_store) for x, y in variables.items()}

            if datasets is not None:
                datasets_to_load = set(list(datasets.values())) - set(_dataset_cache.keys())
                if datasets_to_load:
                    loaded_datasets = []
                    for dataset_name in datasets_to_load:
                        if should_profile:
                            ds_pc = PerfCounter('dataset %s' % dataset_name)
                        df = load_datasets(dataset_name)
                        if should_profile:
                            ds_pc.display('loaded')
                            del ds_pc
                        loaded_datasets.append(df)

                    for dataset_name, dataset in zip(datasets_to_load, loaded_datasets):
                        _dataset_cache[dataset_name] = dataset

                kwargs['datasets'] = {ds_name: _dataset_cache[ds_url] for ds_name, ds_url in datasets.items()}

            ret = func(*args, **kwargs)

            if should_profile:
                pc.display('func ret')
            if should_cache_func:
                assert ret is not None
                cache.set(cache_key, ret, timeout=3600)

            return ret
Esempio n. 5
0
def prepare_input_datasets():
    global buildings_by_heating_method

    buildings_in = load_datasets(INPUT_DATASETS)
    buildings_by_heating_method = prepare_buildings_dataset(buildings_in)
Esempio n. 6
0
def process_input_datasets():
    global population_forecast

    pop_in = load_datasets(INPUT_DATASETS)
    population_forecast = prepare_population_forecast_dataset(pop_in)
    'jyrjola/fingrid_hourly/power',
    'jyrjola/energiateollisuus/electricity_production_hourly',
    'jyrjola/energiateollisuus/electricity_production_fuels'
]

from datetime import timedelta
import pandas as pd
import numpy as np
import altair as alt
from utils.quilt import load_datasets
from data_import import statfi, energiateollisuus

fuel_emission_factors = statfi.get_fuel_classification(include_units=True)
energy_production = statfi.get_energy_production_stats()

fg_hourly, et_hourly, et_fuels = load_datasets(INPUT_DATASETS, include_units=False)

import plotly
from plotly.offline import iplot
import plotly.graph_objs as go
import cufflinks as cf
import aplans_graphs

plotly.offline.init_notebook_mode(connected=True)
cf.set_config_file(offline=True)


# %% [markdown]
# ## Fuel emission coefficients
#
# An emission coefficient is a value that relates the quantity of pollutants released to the atmosphere with an associated activity. Each of the fuel has a unit emissions coefficient that is based on the amount of gases with [global warming potential](https://en.wikipedia.org/wiki/Global_warming_potential) that are emitted. The emission coefficient is converted to match the mass of $\ce{CO2}$ that would produce the equivalent amount of GWP.
]

import pandas as pd
import pintpandas
from utils.quilt import load_datasets

import plotly
import plotly.graph_objs as go
import cufflinks as cf

import aplans_graphs

plotly.offline.init_notebook_mode(connected=True)
cf.set_config_file(offline=True)

dh_fuel_df, dh_production_df, fuel_classification, dh_demand_df, city_demand_df = load_datasets(
    INPUT_DATASETS)

# %%

# %%
fuel_co2 = fuel_classification[['code', 'co2e_emission_factor',
                                'is_bio']].set_index('code')
df = dh_fuel_df[dh_fuel_df.Operator == DISTRICT_HEATING_OPERATOR]
df = df.merge(fuel_co2, how='left', left_on='StatfiFuelCode', right_index=True)
df.co2e_emission_factor = df.co2e_emission_factor.astype('pint[t/TJ]')
df.Value = df.Value.astype('pint[GWh]')
operator_fuel_with_co2 = df.copy()
df['Emissions'] = (df.Value * df.co2e_emission_factor).pint.to('tonne').pint.m

df.loc[df.is_bio == True, 'Emissions'] = 0
emissions = df[df.Operator == DISTRICT_HEATING_OPERATOR].groupby(
Esempio n. 9
0
from calc import calcfunc
from calc.electricity import calculate_electricity_supply_emission_factor
from components.cards import GraphCard
from components.graphs import make_layout
from utils.quilt import load_datasets
from .base import Page

DEFAULT_PRICE_PER_KWH = 12
INITIAL_INSTALL_PRICE = 5000
PRICE_PER_PEAK_KW = 1000
PEAK_WATTS_PER_M2 = 150


hel_buildings, hsy_buildings, fingrid_price = load_datasets([
    'jyrjola/karttahel/buildings', 'jyrjola/hsy/buildings', 'jyrjola/fingrid_hourly/price'
])


def read_nuuka_data():
    DATA_DIR = 'data/nuuka'

    buildings = pd.read_parquet(DATA_DIR + '/buildings.parquet')
    sensors = pd.read_parquet(DATA_DIR + '/sensors.parquet')

    def is_building_needed(b_id):
        if os.path.exists('data/nuuka/%s.parquet' % b_id):
            return True
        else:
            return False
Esempio n. 10
0
import math
import pandas as pd
import scipy.stats
from variables import get_variable
from utils.quilt import load_datasets
import pintpandas  # noqa

INPUT_DATASETS = [
    'jyrjola/energiateollisuus/district_heating_fuel',
    'jyrjola/energiateollisuus/district_heating_production',
    'jyrjola/statfi/fuel_classification'
]

dh_fuel_df, dh_production_df, fuel_classification = load_datasets(
    INPUT_DATASETS)


def _prepare_fuel_emissions_dataset(fuel_use_df):
    return df


HEAT_PUMP_COL = 'Lämmön talteenotto tai lämpöpumpun tuotanto'
HEAT_DEMAND_COL = 'Käyttö'
FUEL_NET_PRODUCTION_COL = 'Nettotuotanto polttoaineilla'
PRODUCTION_LOSS_COL = 'Verkkohäviöt ja mittauserot'
TOTAL_PRODUCTION_COL = 'Yhteensä'
CHP_ELECTRICITY_PRODUCTION_COL = 'Kaukolämmön tuotantoon liittyvä sähkön nettotuotanto'

ALL_FUEL_PRODUCTION_TOTAL_COL = 'Kaukolämmön ja yhteistuotantosähkön tuotantoon käytetyt polttoaineet yhteensä'