def __init__(self, id, subsector_id, service_demand_unit, stock_time_unit, cost_of_capital, scenario=None, **kwargs): self.id = id self.subsector_id = subsector_id self.scenario = scenario StockItem.__init__(self) self.service_demand_unit = service_demand_unit self.stock_time_unit = stock_time_unit for col, att in util.object_att_from_table('DemandTechs', self.id): setattr(self, col, att) # if cost_of_capital at the technology level is None, it uses subsector defaults if self.cost_of_capital is None: self.cost_of_capital = cost_of_capital else: pass # we can have multiple sales shares because sales share may be specific # to the transition between two technolog) self.reference_sales_shares = {} if self.id in util.sql_read_table('DemandSalesData', 'demand_technology_id', return_unique=True, return_iterable=True): self.reference_sales_shares[1] = SalesShare(id=self.id, subsector_id=self.subsector_id, reference=True, sql_id_table='DemandSales', sql_data_table='DemandSalesData', primary_key='subsector_id', data_id_key='demand_technology_id', scenario=scenario) self.book_life() self.add_class() self.min_year() self.shape = shape.shapes.data[self.shape_id] if self.shape_id is not None else None
def __init__(self, id, subsector_id, sql_id_table, sql_data_table, primary_key, data_id_key, reference=False, scenario=None): self.id = id self.subsector_id = subsector_id self.sql_id_table = sql_id_table self.sql_data_table = sql_data_table self.scenario = scenario self.mapped = False if reference: for col, att in util.object_att_from_table(self.sql_id_table, self.subsector_id, primary_key): if att is not None: setattr(self, col, att) DataMapFunctions.__init__(self, data_id_key) self.read_timeseries_data(subsector_id=self.subsector_id) self.raw_values = util.remove_df_levels(self.raw_values, 'technology') else: self.replaced_demand_tech_id = None # measure specific sales share does not require technology filtering Abstract.__init__(self, self.id, primary_key=primary_key, data_id_key=data_id_key)
def __init__(self, id, supply_node_id, sql_id_table, sql_data_table, primary_key, data_id_key, reference=False, scenario=None): self.id = id self.input_type = 'total' self.supply_node_id = supply_node_id self.sql_id_table = sql_id_table self.sql_data_table = sql_data_table self.scenario = scenario self.mapped = False if reference: for col, att in util.object_att_from_table(self.sql_id_table, self.supply_node_id, primary_key): setattr(self, col, att) DataMapFunctions.__init__(self, data_id_key) self.read_timeseries_data(supply_node_id=self.supply_node_id) self.raw_values = util.remove_df_levels(self.raw_values, 'supply_technology') else: # measure specific sales does not require technology filtering Abstract.__init__(self, self.id, primary_key=primary_key, data_id_key=data_id_key)
def __init__(self, id, supply_node_id, sql_id_table, sql_data_table, reference=False): self.id = id self.supply_node_id = supply_node_id self.sql_id_table = sql_id_table self.sql_data_table = sql_data_table self.mapped = False self.input_type = 'intensity' if reference: for col, att in util.object_att_from_table(self.sql_id_table, self.supply_node_id, 'supply_node_id'): if att is not None: setattr(self, col, att) DataMapFunctions.__init__(self, 'supply_technology') self.read_timeseries_data() self.raw_values = util.remove_df_levels( self.raw_values, ['supply_node', 'supply_technology']) else: # measure specific sales share does not require technology filtering Abstract.__init__(self, self.id)
def __init__(self, id, subsector_id, service_demand_unit, stock_time_unit, cost_of_capital, scenario=None, **kwargs): self.id = id self.subsector_id = subsector_id self.scenario = scenario StockItem.__init__(self) self.service_demand_unit = service_demand_unit self.stock_time_unit = stock_time_unit for col, att in util.object_att_from_table('DemandTechs', self.id): setattr(self, col, att) # if cost_of_capital at the technology level is None, it uses subsector defaults if self.cost_of_capital is None: self.cost_of_capital = cost_of_capital else: pass # we can have multiple sales shares because sales share may be specific # to the transition between two technolog) self.reference_sales_shares = {} if self.id in util.sql_read_table('DemandSalesData', 'demand_technology_id', return_unique=True, return_iterable=True): self.reference_sales_shares[1] = SalesShare(id=self.id, subsector_id=self.subsector_id, reference=True, sql_id_table='DemandSales', sql_data_table='DemandSalesData', primary_key='subsector_id', data_id_key='demand_technology_id', scenario=scenario) self.book_life() self.add_class() self.min_year() if self.shape_id is not None: self.shape = shape.shapes.data[self.shape_id]
def __init__(self, dispatch_feeders, dispatch_geography, dispatch_geographies, scenario): #TODO replace 1 with a config parameter for col, att in util.object_att_from_table('DispatchConfig', 1): setattr(self, col, att) self.node_config_dict = dict() for supply_node in util.sql_read_table('DispatchNodeConfig','supply_node_id',return_iterable=True): self.node_config_dict[supply_node] = DispatchNodeConfig(supply_node) self.set_dispatch_orders() self.dispatch_window_dict = dict(util.sql_read_table('DispatchWindows')) self.curtailment_cost = util.unit_convert(0, unit_from_den='megawatt_hour',unit_to_den=cfg.calculation_energy_unit) self.unserved_capacity_cost = util.unit_convert(10000.0, unit_from_den='megawatt_hour',unit_to_den=cfg.calculation_energy_unit) self.dist_net_load_penalty = util.unit_convert(15000.0, unit_from_den='megawatt_hour',unit_to_den=cfg.calculation_energy_unit) # this bulk penalty is mostly for transmission self.bulk_net_load_penalty = util.unit_convert(5000.0, unit_from_den='megawatt_hour',unit_to_den=cfg.calculation_energy_unit) self.ld_upward_imbalance_penalty = util.unit_convert(150.0, unit_from_den='megawatt_hour',unit_to_den=cfg.calculation_energy_unit) self.ld_downward_imbalance_penalty = util.unit_convert(50.0, unit_from_den='megawatt_hour',unit_to_den=cfg.calculation_energy_unit) self.dispatch_feeders = dispatch_feeders self.feeders = [0] + dispatch_feeders self.dispatch_geography = dispatch_geography self.dispatch_geographies = dispatch_geographies self.stdout_detail = cfg.cfgfile.get('opt','stdout_detail') self.transmission = dispatch_transmission.DispatchTransmission(cfg.transmission_constraint_id, scenario) if self.stdout_detail == 'False': self.stdout_detail = False else: self.stdout_detail = True self.solve_kwargs = {"keepfiles": False, "tee": False}
def __init__(self, id, primary_key='id', data_id_key=None, **filters): # Ryan: I've introduced a new parameter called data_id_key, which is the key in the "Data" table # because we are introducing primary keys into the Data tables, it is sometimes necessary to specify them separately # before we only has primary_key, which was shared in the "parent" and "data" tables, and this is still the default as we make the change. if data_id_key is None: data_id_key = primary_key try: col_att = util.object_att_from_table(self.sql_id_table, id, primary_key) except: print self.sql_id_table, id, primary_key raise if col_att is None: self.data = False else: for col, att in col_att: # if att is not None: setattr(self, col, att) self.data = True DataMapFunctions.__init__(self, data_id_key) self.read_timeseries_data(**filters)
def __init__(self, id=None): if id is not None: self.id = id self.sql_id_table = 'Shapes' self.sql_data_table = 'ShapesData' for col, att in util.object_att_from_table(self.sql_id_table, id): setattr(self, col, att) # creates the index_levels dictionary dmf.DataMapFunctions.__init__(self, data_id_key='parent_id')
def __init__(self, id=None): if id is not None: self.id = id self.sql_id_table = 'Shapes' self.sql_data_table = 'ShapesData' for col, att in util.object_att_from_table(self.sql_id_table, id): setattr(self, col, att) # creates the index_levels dictionary dmf.DataMapFunctions.__init__(self)
def __init__(self, id): self.id = id self.sql_id_table = 'Shapes' self.sql_data_table = 'ShapesData' for col, att in util.object_att_from_table(self.sql_id_table, id): setattr(self, col, att) dmf.DataMapFunctions.__init__(self, data_id_key='parent_id') # needed for parallel process self.workingdir = cfg.workingdir self.cfgfile_name = cfg.cfgfile_name self.log_name = cfg.log_name
def __init__(self, id, cost_of_capital, **kwargs): self.id = id self.sql_id_table = 'DemandFuelSwitchingMeasures' self.sql_data_table = 'DemandFuelSwitchingMeasuresData' for col, att in util.object_att_from_table(self.sql_id_table, self.id): if att is not None: setattr(self, col, att) self.calculate_book_life() self.cost_of_capital = cost_of_capital self.impact = FuelSwitchingImpact(self.id) self.energy_intensity = FuelSwitchingEnergyIntensity(self.id) self.cost = DemandMeasureCost(id, self.cost_of_capital, self.book_life, 'DemandFuelSwitchingMeasuresCost', 'DemandFuelSwitchingMeasuresCostData')
def __init__(self, id, drivers, sql_id_table, sql_data_table, primary_key, technology_id=None, **kwargs): self.id = id self.drivers = drivers self.technology_id = technology_id self.sql_id_table = sql_id_table self.sql_data_table = sql_data_table self.primary_key = primary_key for col, att in util.object_att_from_table(self.sql_id_table, self.id, 'subsector_id'): setattr(self, col, att) self.in_use_drivers() DataMapFunctions.__init__(self, self.primary_key) self.read_timeseries_data() self.projected = False
def __init__(self, dispatch_feeders, dispatch_geography, dispatch_geographies, scenario): #TODO replace 1 with a config parameter for col, att in util.object_att_from_table('DispatchConfig', 1): setattr(self, col, att) self.node_config_dict = dict() for supply_node in util.sql_read_table('DispatchNodeConfig', 'supply_node_id', return_iterable=True): self.node_config_dict[supply_node] = DispatchNodeConfig( supply_node) self.set_dispatch_orders() self.dispatch_window_dict = dict( util.sql_read_table('DispatchWindows')) self.curtailment_cost = util.unit_convert( 0, unit_from_den='megawatt_hour', unit_to_den=cfg.calculation_energy_unit) self.unserved_capacity_cost = util.unit_convert( 10000.0, unit_from_den='megawatt_hour', unit_to_den=cfg.calculation_energy_unit) self.dist_net_load_penalty = util.unit_convert( 15000.0, unit_from_den='megawatt_hour', unit_to_den=cfg.calculation_energy_unit) # this bulk penalty is mostly for transmission self.bulk_net_load_penalty = util.unit_convert( 5000.0, unit_from_den='megawatt_hour', unit_to_den=cfg.calculation_energy_unit) self.ld_upward_imbalance_penalty = util.unit_convert( 150.0, unit_from_den='megawatt_hour', unit_to_den=cfg.calculation_energy_unit) self.ld_downward_imbalance_penalty = util.unit_convert( 50.0, unit_from_den='megawatt_hour', unit_to_den=cfg.calculation_energy_unit) self.dispatch_feeders = dispatch_feeders self.feeders = [0] + dispatch_feeders self.dispatch_geography = dispatch_geography self.dispatch_geographies = dispatch_geographies self.stdout_detail = cfg.cfgfile.get('opt', 'stdout_detail') self.transmission = dispatch_transmission.DispatchTransmission( cfg.transmission_constraint_id, scenario) if self.stdout_detail == 'False': self.stdout_detail = False else: self.stdout_detail = True self.solve_kwargs = {"keepfiles": False, "tee": False}
def __init__(self, id, drivers, sql_id_table, sql_data_table, demand_technology_id=None, **kwargs): self.id = id self.drivers = drivers self.demand_technology_id = demand_technology_id self.sql_id_table = sql_id_table self.sql_data_table = sql_data_table self.primary_key = 'subsector_id' self.data_id_key = 'subsector_id' for col, att in util.object_att_from_table(self.sql_id_table, self.id, 'subsector_id'): setattr(self, col, att) self.in_use_drivers() DataMapFunctions.__init__(self, self.data_id_key) self.read_timeseries_data() self.projected = False
def __init__(self, id, cost_of_capital, **kwargs): self.id = id for col, att in util.object_att_from_table('SupplyTechs', id): setattr(self, col, att) if self.cost_of_capital is None: self.cost_of_capital = cost_of_capital self.add_costs() self.efficiency = SupplyTechEfficiency(id) self.capacity_factor = SupplyTechCapacityFactor(id) self.reference_sales_shares = {} if self.id in util.sql_read_table('SupplySalesShareData', 'supply_technology', return_unique=True, return_iterable=True): self.reference_sales_shares[1] = SupplySalesShare(id=self.id, supply_node_id=self.supply_node_id, reference=True,sql_id_table='SupplySalesShare', sql_data_table='SupplySalesShareData') self.reference_sales = {} if self.id in util.sql_read_table('SupplySalesData','supply_technology', return_unique=True, return_iterable=True): self.reference_sales[1] = SupplySales(id=self.id, supply_node_id=self.supply_node_id, reference=True,sql_id_table='SupplySales', sql_data_table='SupplySalesData') StockItem.__init__(self)
def __init__(self, id, drivers, sql_id_table, sql_data_table, scenario=None, demand_technology_id=None): self.id = id self.drivers = drivers self.demand_technology_id = demand_technology_id self.sql_id_table = sql_id_table self.sql_data_table = sql_data_table if scenario: self.scenario = scenario self.primary_key = 'subsector_id' self.data_id_key = 'subsector_id' for col, att in util.object_att_from_table(self.sql_id_table, self.id, 'subsector_id'): setattr(self, col, att) self.in_use_drivers() DataMapFunctions.__init__(self, self.data_id_key) self.read_timeseries_data() self.projected = False
def __init__(self, id, supply_node_id, sql_id_table, sql_data_table, primary_key, data_id_key, reference=False): self.id = id self.input_type = 'total' self.supply_node_id = supply_node_id self.sql_id_table = sql_id_table self.sql_data_table = sql_data_table self.mapped = False if reference: for col, att in util.object_att_from_table(self.sql_id_table, self.supply_node_id, primary_key): setattr(self, col, att) DataMapFunctions.__init__(self, data_id_key) self.read_timeseries_data(supply_node_id=self.supply_node_id) self.raw_values = util.remove_df_levels(self.raw_values, 'supply_technology') else: # measure specific sales does not require technology filtering Abstract.__init__(self, self.id, primary_key=primary_key, data_id_key=data_id_key)
def __init__(self, id, supply_node_id, sql_id_table, sql_data_table, reference=False): self.id = id self.supply_node_id = supply_node_id self.sql_id_table = sql_id_table self.sql_data_table = sql_data_table self.mapped = False self.input_type = 'intensity' if reference: for col, att in util.object_att_from_table(self.sql_id_table, self.supply_node_id, 'supply_node_id'): if att is not None: setattr(self, col, att) DataMapFunctions.__init__(self, 'supply_technology') self.read_timeseries_data() self.raw_values = util.remove_df_levels(self.raw_values, ['supply_node','supply_technology']) else: # measure specific sales share does not require technology filtering Abstract.__init__(self, self.id)
def __init__(self, id, subsector_id, sql_id_table, sql_data_table, primary_key, data_id_key, reference=False): self.id = id self.subsector_id = subsector_id self.sql_id_table = sql_id_table self.sql_data_table = sql_data_table self.mapped = False if reference: for col, att in util.object_att_from_table(self.sql_id_table, self.subsector_id, primary_key): if att is not None: setattr(self, col, att) DataMapFunctions.__init__(self, data_id_key) self.read_timeseries_data(subsector_id=self.subsector_id) self.raw_values = util.remove_df_levels(self.raw_values, 'technology') else: self.replaced_demand_tech_id = None # measure specific sales share does not require technology filtering Abstract.__init__(self, self.id, primary_key=primary_key, data_id_key=data_id_key)
def __init__(self, id, primary_key='id', **filters): try: for col, att in util.object_att_from_table(self.sql_id_table, id, primary_key): # if att is not None: setattr(self, col, att) DataMapFunctions.__init__(self, primary_key) self.data = True except: self.data = False try: if len(filters): self.read_timeseries_data(**filters) else: self.read_timeseries_data() self.empty = False except: self.empty = True
def __init__(self, id, cost_of_capital, scenario, **kwargs): self.id = id for col, att in util.object_att_from_table('SupplyTechs', id): setattr(self, col, att) if self.cost_of_capital is None: self.cost_of_capital = cost_of_capital self.scenario = scenario self.add_costs() self.efficiency = SupplyTechEfficiency(id, self.scenario) self.capacity_factor = SupplyTechCapacityFactor(id, self.scenario) self.co2_capture = SupplyTechCO2Capture(id, self.scenario) self.reference_sales_shares = {} if self.id in util.sql_read_table('SupplySalesShareData', 'supply_technology_id', return_unique=True, return_iterable=True): self.reference_sales_shares[1] = SupplySalesShare( id=self.id, supply_node_id=self.supply_node_id, reference=True, sql_id_table='SupplySalesShare', sql_data_table='SupplySalesShareData', primary_key='supply_node_id', data_id_key='supply_technology_id', scenario=self.scenario) self.reference_sales = {} if self.id in util.sql_read_table('SupplySalesData', 'supply_technology_id', return_unique=True, return_iterable=True): self.reference_sales[1] = SupplySales( id=self.id, supply_node_id=self.supply_node_id, reference=True, sql_id_table='SupplySales', sql_data_table='SupplySalesData', primary_key='supply_node_id', data_id_key='supply_technology_id', scenario=self.scenario) StockItem.__init__(self) if self.shape_id is not None: self.shape = shape.shapes.data[self.shape_id]
def __init__(self, id, cost_of_capital, scenario, **kwargs): self.id = id for col, att in util.object_att_from_table('SupplyTechs', id): setattr(self, col, att) if self.cost_of_capital is None: self.cost_of_capital = cost_of_capital self.scenario = scenario self.add_costs() self.efficiency = SupplyTechEfficiency(id, self.scenario) self.capacity_factor = SupplyTechCapacityFactor(id, self.scenario) self.co2_capture = SupplyTechCO2Capture(id, self.scenario) self.reference_sales_shares = {} if self.id in util.sql_read_table('SupplySalesShareData', 'supply_technology_id', return_unique=True, return_iterable=True): self.reference_sales_shares[1] = SupplySalesShare(id=self.id, supply_node_id=self.supply_node_id, reference=True,sql_id_table='SupplySalesShare', sql_data_table='SupplySalesShareData', primary_key='supply_node_id', data_id_key='supply_technology_id', scenario=self.scenario) self.reference_sales = {} if self.id in util.sql_read_table('SupplySalesData','supply_technology_id', return_unique=True, return_iterable=True): self.reference_sales[1] = SupplySales(id=self.id, supply_node_id=self.supply_node_id, reference=True,sql_id_table='SupplySales', sql_data_table='SupplySalesData', primary_key='supply_node_id', data_id_key='supply_technology_id', scenario=self.scenario) StockItem.__init__(self) if self.shape_id is not None: self.shape = shape.shapes.data[self.shape_id]
def __init__(self, id, **kwargs): self.id = id self.sql_id_table = 'DispatchNodeConfig' for col, att in util.object_att_from_table(self.sql_id_table, id, primary_key='supply_node_id'): setattr(self, col, att)
def __init__(self, id): self.id = id for col, att in util.object_att_from_table('DemandServiceLink', self.id): setattr(self, col, att)
def __init__(self, id, **kwargs): self.id = id self.sql_id_table = 'DispatchNodeConfig' for col, att in util.object_att_from_table( self.sql_id_table, id, primary_key='supply_node_id'): setattr(self, col, att)