def forwards(self, orm):
        """Create a column definition for each ESPM field we know about."""
        bedes = orm['seed.schema'].objects.create(name='BEDES')
        espm_schema = espm.schema['flat_schema']
        espm_float_fields = espm.schema['types'].keys()

        for col in espm_schema:
            unit = None
            if espm_schema[col]:
                # If there's a non-empty string, then that's the unit name.
                unit_type = STRING
                if col in espm_float_fields:
                    # If we have an entry for a column in the types field,
                    # then it's a float.
                    unit_type = FLOAT
                unit, _ = orm['seed.unit'].objects.get_or_create(
                    unit_name=espm_schema[col],
                    unit_type=unit_type
                )

            # We determine which columns aren't listed in the model itself.
            is_extra_data = col not in m_utils.get_mappable_columns()
            column = orm['seed.column'].objects.create(
                column_name=col,
                unit=unit,
                is_extra_data=is_extra_data
            )

            bedes.columns.add(column)
    def forwards(self, orm):
        "Write your forwards methods here."
        # Note: Don't use "from appname.models import ModelName".
        # Use orm.ModelName to refer to models in this application,
        # and orm['appname.ModelName'] for models in other applications.

        schema, _ = orm['seed.schema'].objects.get_or_create(
            name='BEDES',
        )

        mappable_columns = m_utils.get_mappable_columns()
        
        orig_cols = schema.columns.all()

        for raw_name, unit_type_str in bedes.schema.iteritems():
            unit = None

            col_name, unit_desc = name_and_unit_desc(raw_name)
            unit_type = unit_enum(unit_type_str)

            unit, _ = orm['seed.unit'].objects.get_or_create(
                unit_name=unit_desc,
                unit_type=unit_type,
            )

            is_extra_data = col_name not in mappable_columns

            new_col = orm['seed.column'].objects.create(
                column_name=col_name,
                unit=unit,
                is_extra_data=is_extra_data
            )
            
            if new_col not in orig_cols:
                schema.columns.add(new_col)
Beispiel #3
0
def map_row_chunk(chunk, file_pk, source_type, prog_key, increment, *args,
                  **kwargs):
    """Does the work of matching a mapping to a source type and saving

    :param chunk: list of dict of str. One row's worth of parse data.
    :param file_pk: int, the PK for an ImportFile obj.
    :param source_type: int, represented by either ASSESSED_RAW, or
        PORTFOLIO_RAW.
    :param cleaner: (optional), the cleaner class you want to send
    to mapper.map_row. (e.g. turn numbers into floats.).
    :param raw_ids: (optional kwarg), the list of ids in chunk order.

    """
    import_file = ImportFile.objects.get(pk=file_pk)
    save_type = PORTFOLIO_BS
    if source_type == ASSESSED_RAW:
        save_type = ASSESSED_BS

    concats = []

    org = Organization.objects.get(
        pk=import_file.import_record.super_organization.pk)

    mapping, concats = get_column_mappings(org)
    map_cleaner = _build_cleaner(org)

    # For those column mapping which are not db columns, we
    # need to let MCM know that we apply our mapping function to those.
    apply_columns = []

    mappable_columns = get_mappable_columns()
    for item in mapping:
        if mapping[item] not in mappable_columns:
            apply_columns.append(item)

    apply_func = apply_data_func(mappable_columns)

    for row in chunk:
        model = mapper.map_row(row,
                               mapping,
                               BuildingSnapshot,
                               cleaner=map_cleaner,
                               concat=concats,
                               apply_columns=apply_columns,
                               apply_func=apply_func,
                               *args,
                               **kwargs)

        model.import_file = import_file
        model.source_type = save_type
        model.clean()
        model.super_organization = import_file.import_record.super_organization
        model.save()
    if model:
        # Make sure that we've saved all of the extra_data column names
        save_column_names(model, mapping=mapping)

    increment_cache(prog_key, increment)
Beispiel #4
0
def save_column_names(bs, mapping=None):
    """Save unique column names for extra_data in this organization.

    Basically this is a record of all the extra_data keys we've ever seen
    for a particular organization.

    :param bs: BuildingSnapshot instance.
    """
    from seed.utils import mapping as mapping_utils
    for key in bs.extra_data:
        # Assertain if our key is ``extra_data`` or not.
        is_extra_data = key not in mapping_utils.get_mappable_columns()
        Column.objects.get_or_create(
            organization=bs.super_organization,
            column_name=key[:511],
            is_extra_data=is_extra_data
        )
Beispiel #5
0
PortfolioRaw_to_BuildingSnapshot = (
    (u'property_id', u'pm_property_id'),
    (u'custom_property_id_1_-_id', u'custom_id_1'),
    (u'property_notes', u'property_notes'),
    (u'year_ending', u'year_ending'),
    (u'property_name', u'property_name'),
    (u'property_floor_area_bldg_park', u'gross_floor_area'),
    (u'address_line_1', u'address_line_1'),
    (u'address_line_2', u'address_line_2'),
    (u'city', u'city'),
    (u'postal_code', u'postal_code'),
    (u'county', u'district'),
    (u'year_built', u'year_built'),
    (u'energy_score', u'energy_score'),
    (u'generation_date', u'generation_date'),
    (u'release_date', u'release_date'),
    (u'state_province', u'state_province'),
    (u'site_eui', u'site_eui'),
    (u'propertys_portfolio_manager_account_holder', u'owner'),
    (u'propertys_portfolio_manager_account_holder_email', u'owner_email'),
    (u'weather_normalized_site_eui', u'site_eui_weather_normalized'),
    (u'weather_normalized_source_eui', u'source_eui_weather_normalized'),
    (u'energy_alerts', u'energy_alerts'),
    (u'third_party_certification', u'building_certification'),
)

BuildingSnapshot_to_BuildingSnapshot = (
    #(u'import_records', map_import_records),
) + tuple([(k, k) for k in get_mappable_columns()])
Beispiel #6
0
:author Dan Gunter <*****@*****.**>
"""
import logging
from collections import defaultdict

from seed.models import PropertyState
from seed.models import TaxLotState

LINEAR_UNITS = set([u'ft', u'm', u'in'])  # ??more??

from seed.utils.mapping import get_mappable_columns
from seed.lib.mappings.mapping_data import MappingData
from seed.models.deprecate import SYSTEM_MATCH

# TODO: Fix name of this method / remove if possible.
BuildingSnapshot_to_BuildingSnapshot = tuple([(k, k) for k in get_mappable_columns()])

md = MappingData()
property_state_fields = [x['name'] for x in md.property_data]
tax_lot_state_fields = [x['name'] for x in md.tax_lot_data]

PropertyState_to_PropertyState = tuple([(k, k) for k in property_state_fields])
TaxLotState_to_TaxLotState = tuple([(k, k) for k in tax_lot_state_fields])

_log = logging.getLogger(__name__)


def get_attrs_with_mapping(data_set_buildings, mapping):
    """Returns a dictionary of attributes from each data_set_building.

    :param buildings: list, group of BS instances to merge.
Beispiel #7
0
:copyright (c) 2014 - 2016, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Department of Energy) and contributors. All rights reserved.  # NOQA
:author Dan Gunter <*****@*****.**>
"""
import logging
from collections import defaultdict

from seed.lib.mappings.mapping_data import MappingData
from seed.models import PropertyState
from seed.models import TaxLotState
from seed.utils.mapping import get_mappable_columns

LINEAR_UNITS = {u'ft', u'm', u'in'}

# TODO: Fix name of this method / remove if possible.
BuildingSnapshot_to_BuildingSnapshot = tuple([(k, k)
                                              for k in get_mappable_columns()])

md = MappingData()
property_state_fields = [x['name'] for x in md.property_data]
tax_lot_state_fields = [x['name'] for x in md.tax_lot_data]

PropertyState_to_PropertyState = tuple([(k, k) for k in property_state_fields])
TaxLotState_to_TaxLotState = tuple([(k, k) for k in tax_lot_state_fields])

_log = logging.getLogger(__name__)


def get_attrs_with_mapping(data_set_buildings, mapping):
    """
    Returns a dictionary of attributes from each data_set_building.
Beispiel #8
0
def map_row_chunk(chunk, file_pk, source_type, prog_key, increment, *args, **kwargs):
    """Does the work of matching a mapping to a source type and saving

    :param chunk: list of dict of str. One row's worth of parse data.
    :param file_pk: int, the PK for an ImportFile obj.
    :param source_type: int, represented by either ASSESSED_RAW, or
        PORTFOLIO_RAW.
    :param prog_key: string, key of the progress key
    :param increment: double, value by which to increment progress key
    :param cleaner: (optional), the cleaner class you want to send
    to mapper.map_row. (e.g. turn numbers into floats.).
    :param raw_ids: (optional kwarg), the list of ids in chunk order.

    """

    import_file = ImportFile.objects.get(pk=file_pk)
    save_type = PORTFOLIO_BS
    if source_type == ASSESSED_RAW:
        save_type = ASSESSED_BS

    concats = []

    org = Organization.objects.get(
        pk=import_file.import_record.super_organization.pk
    )

    mapping, concats = get_column_mappings(org)
    map_cleaner = _build_cleaner(org)

    # For those column mapping which are not db columns, we
    # need to let MCM know that we apply our mapping function to those.
    apply_columns = []

    mappable_columns = get_mappable_columns()
    for item in mapping:
        if mapping[item] not in mappable_columns:
            apply_columns.append(item)

    apply_func = apply_data_func(mappable_columns)

    for row in chunk:
        model = mapper.map_row(
            row,
            mapping,
            BuildingSnapshot,
            cleaner=map_cleaner,
            concat=concats,
            apply_columns=apply_columns,
            apply_func=apply_func,
            *args,
            **kwargs
        )

        model.import_file = import_file
        model.source_type = save_type
        model.clean()
        model.super_organization = import_file.import_record.super_organization
        model.save()
    if model:
        # Make sure that we've saved all of the extra_data column names
        save_column_names(model, mapping=mapping)

    increment_cache(prog_key, increment)
Beispiel #9
0
PortfolioRaw_to_BuildingSnapshot = (
    (u'property_id', u'pm_property_id'),
    (u'custom_property_id_1_-_id', u'custom_id_1'),
    (u'property_notes', u'property_notes'),
    (u'year_ending', u'year_ending'),
    (u'property_name', u'property_name'),
    (u'property_floor_area_bldg_park', u'gross_floor_area'),
    (u'address_line_1', u'address_line_1'),
    (u'address_line_2', u'address_line_2'),
    (u'city', u'city'),
    (u'postal_code', u'postal_code'),
    (u'county', u'district'),
    (u'year_built', u'year_built'),
    (u'energy_score', u'energy_score'),
    (u'generation_date', u'generation_date'),
    (u'release_date', u'release_date'),
    (u'state_province', u'state_province'),
    (u'site_eui', u'site_eui'),
    (u'propertys_portfolio_manager_account_holder', u'owner'),
    (u'propertys_portfolio_manager_account_holder_email', u'owner_email'),
    (u'weather_normalized_site_eui', u'site_eui_weather_normalized'),
    (u'weather_normalized_source_eui', u'source_eui_weather_normalized'),
    (u'energy_alerts', u'energy_alerts'),
    (u'third_party_certification', u'building_certification'),
)

BuildingSnapshot_to_BuildingSnapshot = (
    #(u'import_records', map_import_records),
) + tuple([(k, k) for k in get_mappable_columns()])
Beispiel #10
0
# Values are expected data types so we can present reasonable approximations
# of validation in the frontend.

PortfolioRaw_to_BuildingSnapshot = (
    (u'property_id', u'pm_property_id'),
    (u'custom_property_id_1_-_id', u'custom_id_1'),
    (u'property_notes', u'property_notes'),
    (u'year_ending', u'year_ending'),
    (u'property_name', u'property_name'),
    (u'property_floor_area_bldg_park', u'gross_floor_area'),
    (u'address_line_1', u'address_line_1'),
    (u'address_line_2', u'address_line_2'),
    (u'city', u'city'),
    (u'postal_code', u'postal_code'),
    (u'county', u'district'),
    (u'year_built', u'year_built'),
    (u'energy_score', u'energy_score'),
    (u'generation_date', u'generation_date'),
    (u'release_date', u'release_date'),
    (u'state_province', u'state_province'),
    (u'site_eui', u'site_eui'),
    (u'propertys_portfolio_manager_account_holder', u'owner'),
    (u'propertys_portfolio_manager_account_holder_email', u'owner_email'),
    (u'weather_normalized_site_eui', u'site_eui_weather_normalized'),
    (u'weather_normalized_source_eui', u'source_eui_weather_normalized'),
    (u'energy_alerts', u'energy_alerts'),
    (u'third_party_certification', u'building_certification'),
)

BuildingSnapshot_to_BuildingSnapshot = tuple([(k, k) for k in get_mappable_columns()])