예제 #1
0
파일: columns.py 프로젝트: mmclark/seed
    def save_column_names(model_obj):
        """Save unique column names for extra_data in this organization.

        Basically this is a record of all the extra_data keys we've ever seen
        for a particular organization.

        :param model_obj: model_obj instance (either PropertyState or TaxLotState).
        """

        md = MappingData()

        for key in model_obj.extra_data:
            # Ascertain if our key is ``extra_data`` or not.

            # This is doing way to much work to find if the fields are extra data, especially
            # since that has been asked probably many times before.
            db_field = md.find_column(model_obj.__class__.__name__, key)
            is_extra_data = False if db_field else True  # yes i am a db column, thus I am not extra_data

            # get the name of the model object as a string to save into the database
            Column.objects.get_or_create(
                column_name=key[:511],
                is_extra_data=is_extra_data,
                organization=model_obj.organization,
                table_name=model_obj.__class__.__name__
            )
예제 #2
0
    def save_column_names(model_obj):
        """Save unique column names for extra_data in this organization.

        This is a record of all the extra_data keys we have ever seen
        for a particular organization.

        :param model_obj: model_obj instance (either PropertyState or TaxLotState).
        """

        md = MappingData()

        for key in model_obj.extra_data:
            # Ascertain if our key is ``extra_data`` or not.

            # This is doing way to much work to find if the fields are extra data, especially
            # since that has been asked probably many times before.
            db_field = md.find_column(model_obj.__class__.__name__, key)
            is_extra_data = False if db_field else True  # yes i am a db column, thus I am not extra_data

            # handle the special edge-case where an old organization may have duplicate columns
            # in the database. We should make this a migration in the future and put a validation
            # in the db.
            for i in range(0, 5):
                while True:
                    try:
                        Column.objects.get_or_create(
                            column_name=key[:511],
                            is_extra_data=is_extra_data,
                            organization=model_obj.organization,
                            table_name=model_obj.__class__.__name__
                        )
                    except Column.MultipleObjectsReturned:
                        _log.debug(
                            "Column.MultipleObjectsReturned for {} in save_column_names".format(
                                key[:511]))

                        columns = Column.objects.filter(column_name=key[:511],
                                                        is_extra_data=is_extra_data,
                                                        organization=model_obj.organization,
                                                        table_name=model_obj.__class__.__name__)
                        for c in columns:
                            if not ColumnMapping.objects.filter(
                                    Q(column_raw=c) | Q(column_mapped=c)).exists():
                                _log.debug("Deleting column object {}".format(c.column_name))
                                c.delete()

                        # Check if there are more than one column still
                        if Column.objects.filter(
                                column_name=key[:511],
                                is_extra_data=is_extra_data,
                                organization=model_obj.organization,
                                table_name=model_obj.__class__.__name__).count() > 1:
                            raise Exception(
                                "Could not fix duplicate columns for {}. Contact dev team").format(
                                key)

                        continue

                    break
예제 #3
0
    def _retrieve_db_columns():
        """
        Returns a predefined list of columns that can be in the database. This is a hardcoded list of all the
        database fields along with additional information such as the display name.

        :return: dict
        """

        # Grab the default columns and their details
        hard_coded_columns = copy.deepcopy(VIEW_COLUMNS_PROPERTY)

        md = MappingData()
        for c in hard_coded_columns:
            if not md.find_column(c['table'], c['name']):
                print "Could not find column field in database for {}".format(c)

        return hard_coded_columns
예제 #4
0
# !/usr/bin/env python
# encoding: utf-8
"""
:copyright (c) 2014 - 2018, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Department of Energy) and contributors. All rights reserved.  # NOQA
:author Dan Gunter <*****@*****.**>
"""
import logging
from collections import defaultdict

from seed.lib.mappings.mapping_data import MappingData
from seed.models import (
    PropertyState,
    TaxLotState,
)

md = MappingData()
property_state_fields = [x['name'] for x in sorted(md.property_state_data)]
tax_lot_state_fields = [x['name'] for x in md.tax_lot_state_data]
# TODO: Move these methods to the MappingData object and return from there
PropertyState_to_PropertyState = tuple([
    (k, k) for k in sorted(property_state_fields)
])
TaxLotState_to_TaxLotState = tuple([(k, k) for k in tax_lot_state_fields])

_log = logging.getLogger(__name__)


def get_attrs_with_mapping(data_set_buildings, mapping):
    """
    Returns a dictionary of attributes from each data_set_building.
예제 #5
0
    def _column_fields_to_columns(fields, organization):
        """
        List of dictionaries to process into column objects. This method will create the columns
        if they did not previously exist. Note that fields are probably mutable, but the method
        returns a new list of fields.

        .. example:

            test_map = [
                    {
                        'from_field': 'eui',
                        'from_units': 'kBtu/ft**2/year', # optional
                        'to_field': 'site_eui',
                        'to_table_name': 'PropertyState',
                    },
                    {
                        'from_field': 'address',
                        'to_field': 'address',
                        'to_table_name': 'TaxLotState'
                    },
                    {
                        'from_field': 'Wookiee',
                        'to_field': 'Dothraki',
                        'to_table_name': 'PropertyState',
                    },
                ]

        Args:
            fields: list of dicts containing to and from fields
            organization: organization model instance

        Returns:
            dict with lists of columns to which is mappable.
        """
        def select_col_obj(column_name, table_name, organization_column):
            if organization_column:
                return [organization_column]
            else:
                # Try for "global" column definitions, e.g. BEDES. - Note the BEDES are not
                # loaded into the database as of 9/8/2016 so not sure if this code is ever
                # exercised
                obj = Column.objects.filter(organization=None,
                                            column_name=column_name).first()

                if obj:
                    # create organization mapped column
                    obj.pk = None
                    obj.id = None
                    obj.organization = organization
                    obj.save()

                    return [obj]
                else:
                    if table_name:
                        obj, _ = Column.objects.get_or_create(
                            organization=organization,
                            column_name=column_name,
                            table_name=table_name,
                            is_extra_data=is_extra_data,
                        )
                        return [obj]
                    else:
                        obj, _ = Column.objects.get_or_create(
                            organization=organization,
                            column_name=column_name,
                            is_extra_data=is_extra_data,
                        )
                        return [obj]

        md = MappingData()

        # Container to store the dicts with the Column object
        new_data = []

        for field in fields:
            new_field = field

            # find the mapping data column (i.e. the database fields) that match, if it exists
            # then set the extra data flag to true
            db_field = md.find_column(field['to_table_name'],
                                      field['to_field'])
            is_extra_data = False if db_field else True  # yes i am a db column, thus I am not extra_data

            try:
                to_org_col, _ = Column.objects.get_or_create(
                    organization=organization,
                    column_name=field['to_field'],
                    table_name=field['to_table_name'],
                    is_extra_data=is_extra_data)
            except Column.MultipleObjectsReturned:
                _log.debug("More than one to_column found for {}.{}".format(
                    field['to_table_name'], field['to_field']))
                # raise Exception("Cannot handle more than one to_column returned for {}.{}".format(
                #     field['to_field'], field['to_table_name']))

                # TODO: write something to remove the duplicate columns
                to_org_col = Column.objects.filter(
                    organization=organization,
                    column_name=field['to_field'],
                    table_name=field['to_table_name'],
                    is_extra_data=is_extra_data).first()
                _log.debug("Grabbing the first to_column")

            try:
                # the from column is the field in the import file, thus the table_name needs to be
                # blank. Eventually need to handle passing in import_file_id
                from_org_col, _ = Column.objects.get_or_create(
                    organization=organization,
                    table_name__in=[None, ''],
                    column_name=field['from_field'],
                    units_pint=field.get('from_units'),  # might be None
                    is_extra_data=
                    False  # data from header rows in the files are NEVER extra data
                )
            except Column.MultipleObjectsReturned:
                _log.debug("More than one from_column found for {}.{}".format(
                    field['to_table_name'], field['to_field']))

                # TODO: write something to remove the duplicate columns
                from_org_col = Column.objects.filter(
                    organization=organization,
                    table_name__in=[None, ''],
                    column_name=field['from_field'],
                    units_pint=field.get('from_units'),  # might be None
                    is_extra_data=is_extra_data).first()
                _log.debug("Grabbing the first from_column")

            new_field['to_column_object'] = select_col_obj(
                field['to_field'], field['to_table_name'], to_org_col)
            new_field['from_column_object'] = select_col_obj(
                field['from_field'], "", from_org_col)
            new_data.append(new_field)

        return new_data
예제 #6
0
파일: columns.py 프로젝트: mmclark/seed
    def _column_fields_to_columns(fields, organization):
        """
        List of dictionaries to process into column objects. This method will create the columns
        if they did not previously exist. Note that fields are probably mutable, but the method
        returns a new list of fields.

        .. example:

            test_map = [
                    {
                        'from_field': 'eui',
                        'to_field': 'site_eui',
                        'to_table_name': 'PropertyState',
                    },
                    {
                        'from_field': 'address',
                        'to_field': 'address',
                        'to_table_name': 'TaxLotState'
                    },
                    {
                        'from_field': 'Wookiee',
                        'to_field': 'Dothraki',
                        'to_table_name': 'PropertyState',
                    },
                ]

        Args:
            fields: list of dicts containing to and from fields
            organization: organization model instance

        Returns:
            dict with lists of columns to which is mappable.
        """

        def select_col_obj(column_name, table_name, organization_column):
            if organization_column:
                return [organization_column]
            else:
                # Try for "global" column definitions, e.g. BEDES. - Note the BEDES are not
                # loaded into the database as of 9/8/2016 so not sure if this code is ever
                # exercised
                obj = Column.objects.filter(organization=None, column_name=column_name).first()

                if obj:
                    # create organization mapped column
                    obj.pk = None
                    obj.id = None
                    obj.organization = organization
                    obj.save()

                    return [obj]
                else:
                    if table_name:
                        obj, _ = Column.objects.get_or_create(
                            organization=organization,
                            column_name=column_name,
                            table_name=table_name,
                            is_extra_data=is_extra_data,
                        )
                        return [obj]
                    else:
                        obj, _ = Column.objects.get_or_create(
                            organization=organization,
                            column_name=column_name,
                            is_extra_data=is_extra_data,
                        )
                        return [obj]

            return True

        md = MappingData()

        # Container to store the dicts with the Column object
        new_data = []

        for field in fields:
            new_field = field

            # find the mapping data column (i.e. the database fields) that match, if it exists
            # then set the extra data flag to true
            db_field = md.find_column(field['to_table_name'], field['to_field'])
            is_extra_data = False if db_field else True  # yes i am a db column, thus I am not extra_data

            # find the to_column
            to_org_col = Column.objects.filter(organization=organization,
                                               column_name=field['to_field'],
                                               table_name=field['to_table_name'],
                                               is_extra_data=is_extra_data).first()
            from_org_col = Column.objects.filter(organization=organization,
                                                 column_name=field['from_field'],
                                                 is_extra_data=is_extra_data).first()

            new_field['to_column_object'] = select_col_obj(
                field['to_field'],
                field['to_table_name'],
                to_org_col
            )
            new_field['from_column_object'] = select_col_obj(
                field['from_field'],
                "",
                from_org_col)

            new_data.append(new_field)

        return new_data
예제 #7
0
    def process(self, organization_id, cycle, property_view=None):
        """
        Process the building file that was uploaded and create the correct models for the object

        :param organization_id: integer, ID of organization
        :param cycle: object, instance of cycle object
        :param property_view: Existing property view of the building file that will be updated from merging the property_view.state
        :return: list, [status, (PropertyState|None), (PropertyView|None), messages]
        """

        Parser = self.BUILDING_FILE_PARSERS.get(self.file_type, None)
        if not Parser:
            acceptable_file_types = ', '.join(
                map(dict(self.BUILDING_FILE_TYPES).get, self.BUILDING_FILE_PARSERS.keys())
            )
            return False, None, None, "File format was not one of: {}".format(acceptable_file_types)

        parser = Parser()
        parser.import_file(self.file.path)
        parser_args = []
        parser_kwargs = {}
        if self.file_type == self.BUILDINGSYNC:
            parser_args.append(BuildingSync.BRICR_STRUCT)
        data, errors, messages = parser.process(*parser_args, **parser_kwargs)

        if errors or not data:
            return False, None, None, messages

        # sub-select the data that are needed to create the PropertyState object
        md = MappingData()
        create_data = {"organization_id": organization_id}
        extra_data = {}
        for k, v in data.items():
            # Skip the keys that are for measures and reports and process later
            if k in ['measures', 'reports', 'scenarios']:
                continue

            if md.find_column('PropertyState', k):
                create_data[k] = v
            else:
                # TODO: break out columns in the extra data that should be part of the
                # PropertyState and which ones should be added to some other class that
                # doesn't exist yet.
                extra_data[k] = v
                # create columns, if needed, for the extra_data fields

                Column.objects.get_or_create(
                    organization_id=organization_id,
                    column_name=k,
                    table_name='PropertyState',
                    is_extra_data=True,
                )

        # always create the new object, then decide if we need to merge it.
        # create a new property_state for the object and promote to a new property_view
        property_state = PropertyState.objects.create(**create_data)
        property_state.extra_data = extra_data
        property_state.save()

        PropertyAuditLog.objects.create(
            organization_id=organization_id,
            state_id=property_state.id,
            name='Import Creation',
            description='Creation from Import file.',
            import_filename=self.file.path,
            record_type=AUDIT_IMPORT
        )

        # set the property_state_id so that we can list the building files by properties
        self.property_state_id = property_state.id
        self.save()

        # add in the measures
        for m in data.get('measures', []):
            # Find the measure in the database
            try:
                measure = Measure.objects.get(
                    category=m['category'], name=m['name'], organization_id=organization_id,
                )
            except Measure.DoesNotExist:
                # TODO: Deal with it
                continue

            # Add the measure to the join table.
            # Need to determine what constitutes the unique measure for a property
            join, _ = PropertyMeasure.objects.get_or_create(
                property_state_id=self.property_state_id,
                measure_id=measure.pk,
                implementation_status=PropertyMeasure.str_to_impl_status(m['implementation_status']),
                application_scale=PropertyMeasure.str_to_application_scale(
                    m.get('application_scale_of_application',
                          PropertyMeasure.SCALE_ENTIRE_FACILITY)
                ),
                category_affected=PropertyMeasure.str_to_category_affected(
                    m.get('system_category_affected', PropertyMeasure.CATEGORY_OTHER)
                ),
                recommended=m.get('recommended', 'false') == 'true',
            )
            join.description = m.get('description')
            join.property_measure_name = m.get('property_measure_name')
            join.cost_mv = m.get('mv_cost')
            join.cost_total_first = m.get('measure_total_first_cost')
            join.cost_installation = m.get('measure_installation_cost')
            join.cost_material = m.get('measure_material_cost')
            join.cost_capital_replacement = m.get('measure_capital_replacement_cost')
            join.cost_residual_value = m.get('measure_residual_value')
            join.save()

        # add in scenarios
        for s in data.get('scenarios', []):
            # measures = models.ManyToManyField(PropertyMeasure)

            # {'reference_case': u'Baseline', 'annual_savings_site_energy': None,
            #  'measures': [], 'id': u'Baseline', 'name': u'Baseline'}

            scenario, _ = Scenario.objects.get_or_create(
                name=s.get('name'),
                property_state_id=self.property_state_id,
            )
            scenario.description = s.get('description')
            scenario.annual_site_energy_savings = s.get('annual_site_energy_savings')
            scenario.annual_source_energy_savings = s.get('annual_source_energy_savings')
            scenario.annual_cost_savings = s.get('annual_cost_savings')
            scenario.summer_peak_load_reduction = s.get('summer_peak_load_reduction')
            scenario.winter_peak_load_reduction = s.get('winter_peak_load_reduction')
            scenario.hdd = s.get('hdd')
            scenario.hdd_base_temperature = s.get('hdd_base_temperature')
            scenario.cdd = s.get('cdd')
            scenario.cdd_base_temperature = s.get('cdd_base_temperature')

            # temporal_status = models.IntegerField(choices=TEMPORAL_STATUS_TYPES,
            #                                       default=TEMPORAL_STATUS_CURRENT)

            if s.get('reference_case'):
                ref_case = Scenario.objects.filter(
                    name=s.get('reference_case'),
                    property_state_id=self.property_state_id,
                )
                if len(ref_case) == 1:
                    scenario.reference_case = ref_case.first()

            # set the list of measures
            for measure_name in s['measures']:
                # find the join measure in the database
                measure = None
                try:
                    measure = PropertyMeasure.objects.get(
                        property_state_id=self.property_state_id,
                        property_measure_name=measure_name,
                    )
                except PropertyMeasure.DoesNotExist:
                    # PropertyMeasure is not in database, skipping silently
                    continue

                scenario.measures.add(measure)

            scenario.save()

        if property_view:
            # create a new blank state to merge the two together
            merged_state = PropertyState.objects.create(organization_id=organization_id)

            # assume the same cycle id as the former state.
            # should merge_state also copy/move over the relationships?
            merged_state, changed = merge_state(merged_state, property_view.state, property_state,
                                                get_state_attrs([property_view.state, property_state]))

            # log the merge
            # Not a fan of the parent1/parent2 logic here, seems error prone, what this
            # is also in here: https://github.com/SEED-platform/seed/blob/63536e99cf5be3a9a86391c5cead6dd4ff74462b/seed/data_importer/tasks.py#L1549
            PropertyAuditLog.objects.create(
                organization_id=organization_id,
                parent1=PropertyAuditLog.objects.filter(state=property_view.state).first(),
                parent2=PropertyAuditLog.objects.filter(state=property_state).first(),
                parent_state1=property_view.state,
                parent_state2=property_state,
                state=merged_state,
                name='System Match',
                description='Automatic Merge',
                import_filename=None,
                record_type=AUDIT_IMPORT
            )

            property_view.state = merged_state
            property_view.save()

            merged_state.merge_state = MERGE_STATE_MERGED
            merged_state.save()

            # set the property_state to the new one
            property_state = merged_state
        elif not property_view:
            property_view = property_state.promote(cycle)
        else:
            # invalid arguments, must pass both or neither
            return False, None, None, "Invalid arguments passed to BuildingFile.process()"

        return True, property_state, property_view, messages