Пример #1
0
def construct_export_filename(entity, extension):
    export_file_name = "{db_entity}_{timestamp}.{extension}".format(
        db_entity=entity.name.replace(" ", "_"),
        timestamp=timestamp(),
        extension=extension)

    return export_file_name
Пример #2
0
def _export_query_results(job, class_name, unique_id, filename, query, event='queryResultExportCompleted'):

    try:
        job.status = "Exporting"
        job.save()

        # add timestamp to filename to prevent conflicts
        filename = filename + "_" + timestamp()
        result = OGRETL().export(filename, query, 'CSV', geom=None)

        job.data = '/' + filename + ".csv"
        job.save()

        send_message_to_client(job.user.id, dict(event=event,
                                                 class_name=class_name,
                                                 unique_id=unique_id,
                                                 job_id=str(job.hashid)))

        job.status = 'Complete'

    except Exception, e:
        logger.error(e)
        job.status = "Failed"

        exc_type, exc_value, exc_traceback = sys.exc_info()
        readable_exception = traceback.format_exception(exc_type, exc_value, exc_traceback)
        job.data = readable_exception
        job.save()

        send_message_to_client(job.user.id, dict(event=job.type + " failed", trace=readable_exception))
def _export_query_results(job, class_name, unique_id, filename, query, event='queryResultExportCompleted'):

    try:
        job.status = "Exporting"
        job.save()

        # add timestamp to filename to prevent conflicts
        filename = filename + "_" + timestamp()
        result = OGRETL().export(filename, query, 'CSV', geom=None)

        job.data = '/' + filename + ".csv"
        job.save()

        send_message_to_client(job.user.id, dict(event=event,
                                                 class_name=class_name,
                                                 unique_id=unique_id,
                                                 job_id=str(job.hashid)))

        job.status = 'Complete'

    except Exception, e:
        logger.error(e)
        job.status = "Failed"

        exc_type, exc_value, exc_traceback = sys.exc_info()
        readable_exception = traceback.format_exception(exc_type, exc_value, exc_traceback)
        job.data = readable_exception
        job.save()

        send_message_to_client(job.user.id, dict(event=job.type + " failed", trace=readable_exception))
def construct_export_filename(entity, extension):
    export_file_name = "{db_entity}_{timestamp}.{extension}".format(
        db_entity=entity.name.replace(" ", "_"),
        timestamp=timestamp(),
        extension=extension)

    return export_file_name
Пример #5
0
def dump_built_forms():
    """
    Exports all of the content of all of the tables that represent BuiltForm to a SQL file that can be most easily
    distributed to other machines and loaded more quickly for tests. Avoids redundantly running the costly calculation
    of the built form relationships.

    :return:
    """
    built_form_tables = [
        "building", "buildingattributes", "buildingpercent", "buildingtype",
        "buildingtypecategory", "buildingusedefinition", "buildingusepercent",
        "builtform", "builform_tags", "builtformset",
        "builtformset_built_forms", "flatbuiltform", "infrastructure",
        "infrastructureattributeset", "infrastructurepercent",
        "infrastructuretype", "placetype", "placetypecomponent",
        "placetypecomponentpercent", "medium"
    ]

    formatted_list_of_tables = ""
    for table_name in built_form_tables:
        formatted_list_of_tables += '-t footprint_{table_name} '.format(
            table_name=table_name)

    dump_args = dict(
        database=settings.DATABASES['default']['NAME'],
        formatted_list_of_tables=formatted_list_of_tables,
        output_file="{sql_path}/built_form_export_{timestamp}.sql".format(
            sql_path=settings.SQL_PATH, timestamp=timestamp()),
        tmp_db_name="urbanfootprint_builtform_dump")

    print dump_args

    dump_command = "pg_dump {database} {formatted_list_of_tables} > {output_file}"

    create_tmp_db = "createdb {tmp_db_name} --template template_postgis"

    restore_dump_to_tmp_db = "psql {tmp_db_name} -f {output_file}"

    delete_unrelated_media = """psql {tmp_db_name} -c "DELETE from footprint_medium
            where \"key\"::text NOT LIKE 'built_form%'"
            """

    delete_dumpfile = "rm {output_file}"

    dump_isolated_built_form_relations = "pg_dump {tmp_db_name} {formatted_list_of_tables} > {output_file}"

    drop_tmp_db = "psql {database} -c \"drop database {tmp_db_name}\""

    for command in [
            dump_command, create_tmp_db, restore_dump_to_tmp_db,
            delete_unrelated_media, delete_dumpfile,
            dump_isolated_built_form_relations, drop_tmp_db
    ]:
        try:
            subprocess.call(command.format(**dump_args), shell=True)
        except Exception, E:
            print E
Пример #6
0
def dump_built_forms():
    """
    Exports all of the content of all of the tables that represent BuiltForm to a SQL file that can be most easily
    distributed to other machines and loaded more quickly for tests. Avoids redundantly running the costly calculation
    of the built form relationships.

    :return:
    """
    built_form_tables = [
        "building", "buildingattributes", "buildingpercent", "buildingtype", "buildingtypecategory",
        "buildingusedefinition", "buildingusepercent", "builtform", "builform_tags",
        "builtformset", "builtformset_built_forms", "flatbuiltform", "infrastructure", "infrastructureattributeset",
        "infrastructurepercent", "infrastructuretype", "placetype", "placetypecomponent", "placetypecomponentpercent",
        "medium"
    ]

    formatted_list_of_tables = ""
    for table_name in built_form_tables:
        formatted_list_of_tables += '-t footprint_{table_name} '.format(table_name=table_name)

    dump_args = dict(
        database=settings.DATABASES['default']['NAME'],
        formatted_list_of_tables=formatted_list_of_tables,
        output_file="{sql_path}/built_form_export_{timestamp}.sql".format(
            sql_path=settings.SQL_PATH, timestamp=timestamp()
        ),
        tmp_db_name="urbanfootprint_builtform_dump"
    )

    print dump_args

    dump_command = "pg_dump {database} {formatted_list_of_tables} > {output_file}"

    create_tmp_db = "createdb {tmp_db_name} --template template_postgis"

    restore_dump_to_tmp_db = "psql {tmp_db_name} -f {output_file}"

    delete_unrelated_media = """psql {tmp_db_name} -c "DELETE from footprint_medium
            where \"key\"::text NOT LIKE 'built_form%'"
            """

    delete_dumpfile = "rm {output_file}"

    dump_isolated_built_form_relations = "pg_dump {tmp_db_name} {formatted_list_of_tables} > {output_file}"

    drop_tmp_db = "psql {database} -c \"drop database {tmp_db_name}\""

    for command in [dump_command, create_tmp_db, restore_dump_to_tmp_db, delete_unrelated_media,
                    delete_dumpfile, dump_isolated_built_form_relations, drop_tmp_db]:
        try:
            subprocess.call(command.format(**dump_args), shell=True)
        except Exception, E:
            print E
    def update(self, **kwargs):
        scenario = self.config_entity.subclassed
        logger.debug('{0}:Starting Agriculture Core Analysis for {1}'.format(timestamp(), self.config_entity))
        if isinstance(scenario, BaseScenario):
            agriculture_db_entity_key = DbEntityKey.BASE_AGRICULTURE_CANVAS
        elif isinstance(scenario, FutureScenario):
            agriculture_db_entity_key = DbEntityKey.FUTURE_AGRICULTURE_CANVAS
        else:
            raise Exception("Config Entity is not a Future or Base Scenario, cannot run AgricultureCore.")

        ids = kwargs.get('ids', None)
        agriculture_feature_class = self.config_entity.db_entity_feature_class(agriculture_db_entity_key)

        if ids:
            features = agriculture_feature_class.objects.filter(id__in=ids)
        else:
            features = agriculture_feature_class.objects.filter(built_form__isnull=False)

        feature_count = features.count()

        if not feature_count:
            logger.info("No features to process!")
            return

        logger.debug("Processing {0} features...".format(feature_count))
        iterator_start = datetime.datetime.utcnow().replace(tzinfo=utc)
        self.progress(0.05, **kwargs)

        if feature_count <= 36:
            increment_portion = (.9 / feature_count) + .001
            equal_portion = 1
        else:
            increment_portion = .05
            equal_portion = int((feature_count - 1) / 18)
        i = 1
        for feature in features.iterator():
            if i % equal_portion == 0:
                self.progress(increment_portion, **kwargs)

            if not feature.built_form:
                feature.built_form_key = None
                feature.crop_yield = 0
                feature.market_value = 0
                feature.production_cost = 0
                feature.water_consumption = 0
                feature.labor_force = 0
                feature.truck_trips = 0
            else:
                applied_acres = feature.acres_gross * feature.density_pct * feature.dev_pct
                agriculture_attribute_set = feature.built_form.resolve_built_form(feature.built_form).agriculture_attribute_set
                feature.built_form_key = feature.built_form.key
                feature.crop_yield = agriculture_attribute_set.crop_yield * applied_acres
                feature.market_value = agriculture_attribute_set.unit_price * feature.crop_yield
                feature.production_cost = agriculture_attribute_set.cost * applied_acres
                feature.water_consumption = agriculture_attribute_set.water_consumption * applied_acres
                feature.labor_force = agriculture_attribute_set.labor_input * applied_acres
                feature.truck_trips = agriculture_attribute_set.truck_trips * applied_acres
            feature.save(update_fields=self.ANALYSIS_FIELDS)
            i += 1
        total_time = datetime.datetime.utcnow().replace(tzinfo=utc) - iterator_start

        logger.debug("Processed {0} features in {1}: {2} per feature".format(
            feature_count, total_time, total_time/feature_count
        ))
        self.progress(.9, **kwargs)
        logger.debug('{0}:Finished Agriculture Core Analysis for {1} '.format(timestamp(), self.config_entity))

        self.update_dependent_scenarios(features, scenario)
Пример #8
0
    def update(self, **kwargs):
        scenario = self.config_entity.subclassed
        logger.debug('{0}:Starting Agriculture Core Analysis for {1}'.format(
            timestamp(), self.config_entity))
        if isinstance(scenario, BaseScenario):
            agriculture_db_entity_key = DbEntityKey.BASE_AGRICULTURE_CANVAS
        elif isinstance(scenario, FutureScenario):
            agriculture_db_entity_key = DbEntityKey.FUTURE_AGRICULTURE_CANVAS
        else:
            raise Exception(
                "Config Entity is not a Future or Base Scenario, cannot run AgricultureCore."
            )

        ids = kwargs.get('ids', None)
        agriculture_feature_class = self.config_entity.db_entity_feature_class(
            agriculture_db_entity_key)

        if ids:
            features = agriculture_feature_class.objects.filter(id__in=ids)
        else:
            features = agriculture_feature_class.objects.filter(
                built_form__isnull=False)

        feature_count = features.count()

        if not feature_count:
            logger.info("No features to process!")
            return

        logger.debug("Processing {0} features...".format(feature_count))
        iterator_start = datetime.datetime.utcnow().replace(tzinfo=utc)
        self.progress(0.05, **kwargs)

        if feature_count <= 36:
            increment_portion = (.9 / feature_count) + .001
            equal_portion = 1
        else:
            increment_portion = .05
            equal_portion = int((feature_count - 1) / 18)
        i = 1
        for feature in features.iterator():
            if i % equal_portion == 0:
                self.progress(increment_portion, **kwargs)

            if not feature.built_form:
                feature.built_form_key = None
                feature.crop_yield = 0
                feature.market_value = 0
                feature.production_cost = 0
                feature.water_consumption = 0
                feature.labor_force = 0
                feature.truck_trips = 0
            else:
                applied_acres = feature.acres_gross * feature.density_pct * feature.dev_pct
                agriculture_attribute_set = feature.built_form.resolve_built_form(
                    feature.built_form).agriculture_attribute_set
                feature.built_form_key = feature.built_form.key
                feature.crop_yield = agriculture_attribute_set.crop_yield * applied_acres
                feature.market_value = agriculture_attribute_set.unit_price * feature.crop_yield
                feature.production_cost = agriculture_attribute_set.cost * applied_acres
                feature.water_consumption = agriculture_attribute_set.water_consumption * applied_acres
                feature.labor_force = agriculture_attribute_set.labor_input * applied_acres
                feature.truck_trips = agriculture_attribute_set.truck_trips * applied_acres
            feature.save(update_fields=self.ANALYSIS_FIELDS)
            i += 1
        total_time = datetime.datetime.utcnow().replace(
            tzinfo=utc) - iterator_start

        logger.debug("Processed {0} features in {1}: {2} per feature".format(
            feature_count, total_time, total_time / feature_count))
        self.progress(.9, **kwargs)
        logger.debug('{0}:Finished Agriculture Core Analysis for {1} '.format(
            timestamp(), self.config_entity))

        self.update_dependent_scenarios(features, scenario)