def run_fiscal_calculations(self, **kwargs): start_time = time.time() policy_assumptions = {} # TODO Use the first PolicySet--this needs to be done better policy_set = self.config_entity.computed_policy_sets( )[0].policy_by_key('fiscal') #TODO: put a method in PolicySet that flattens the policies to a dict like this does for policy_set in policy_set.policies.all(): policy_assumptions[policy_set.key] = {} for policy in policy_set.policies.all(): for key, subpolicy in policy.values.items(): policy_assumptions[policy_set.key]["_".join( [policy.key, key])] = subpolicy scenario_time_increment = float( self.config_entity.scenario.year - self.config_entity.scenario.project.base_year) fiscal_feature_class = self.config_entity.db_entity_feature_class( DbEntityKey.FISCAL, base_feature_class=True) net_increment_class = self.config_entity.db_entity_feature_class( DbEntityKey.INCREMENT) features = net_increment_class.objects.filter( land_development_category__isnull=False) fiscal_outputs = [] self.fiscal_progress(0.3, **kwargs) for feature in features: new_feature = fiscal_feature_class( id=feature.id, wkb_geometry=feature.wkb_geometry) new_feature.residential_capital_costs = self.calculate_feature_fiscal_attributes( feature, policy_assumptions['capital_costs']) new_feature.residential_operations_maintenance_costs = self.calculate_feature_fiscal_attributes( feature, policy_assumptions['operations_maintenance_costs'], multiplier=scenario_time_increment) new_feature.residential_revenue = self.calculate_feature_fiscal_attributes( feature, policy_assumptions['revenue']) fiscal_outputs.append(new_feature) self.fiscal_progress(0.5, **kwargs) fiscal_feature_class.objects.all().delete() fiscal_feature_class.objects.bulk_create(fiscal_outputs) self.fiscal_progress(0.2, **kwargs) print 'Finished: ' + str(time.time() - start_time) from footprint.main.publishing.config_entity_publishing import post_save_config_entity_analysis_module post_save_config_entity_analysis_module.send( sender=self.config_entity.__class__, config_entity=self.config_entity, analysis_module=kwargs['analysis_module'])
def run_fiscal_calculations(self, **kwargs): start_time = time.time() policy_assumptions = {} # TODO Use the first PolicySet--this needs to be done better policy_set = self.config_entity.computed_policy_sets()[0].policy_by_key('fiscal') #TODO: put a method in PolicySet that flattens the policies to a dict like this does for policy_set in policy_set.policies.all(): policy_assumptions[policy_set.key] = {} for policy in policy_set.policies.all(): for key, subpolicy in policy.values.items(): policy_assumptions[policy_set.key]["_".join([policy.key, key])] = subpolicy scenario_time_increment = float(self.config_entity.scenario.year - self.config_entity.scenario.project.base_year) fiscal_feature_class = self.config_entity.db_entity_feature_class(DbEntityKey.FISCAL, base_feature_class=True) net_increment_class = self.config_entity.db_entity_feature_class(DbEntityKey.INCREMENT) features = net_increment_class.objects.filter(land_development_category__isnull=False) fiscal_outputs = [] self.fiscal_progress(0.3, **kwargs) for feature in features: new_feature = fiscal_feature_class( id=feature.id, wkb_geometry=feature.wkb_geometry ) new_feature.residential_capital_costs = self.calculate_feature_fiscal_attributes(feature, policy_assumptions['capital_costs']) new_feature.residential_operations_maintenance_costs = self.calculate_feature_fiscal_attributes( feature, policy_assumptions['operations_maintenance_costs'], multiplier=scenario_time_increment) new_feature.residential_revenue = self.calculate_feature_fiscal_attributes(feature, policy_assumptions['revenue']) fiscal_outputs.append(new_feature) self.fiscal_progress(0.5, **kwargs) fiscal_feature_class.objects.all().delete() fiscal_feature_class.objects.bulk_create(fiscal_outputs) self.fiscal_progress(0.2, **kwargs) print 'Finished: ' + str(time.time() - start_time) from footprint.main.publishing.config_entity_publishing import post_save_config_entity_analysis_module post_save_config_entity_analysis_module.send( sender=self.config_entity.__class__, config_entity=self.config_entity, analysis_module=kwargs['analysis_module'] )
def analysis_module_task(job, user, config_entity_id, key, kwargs): config_entity = ConfigEntity.objects.get(id=config_entity_id).subclassed analysis_module = AnalysisModule.objects.get(config_entity=config_entity, key=key) # Set again for new instance analysis_module._started = True try: # TODO progress calls should be moved to each module so the status bar increments on the client # logger.info('AnalysisModule %s Started for ConfigEntity %s with kwarg keys' % (analysis_module.name, config_entity.name, ', '.join(kwargs or dict().keys()))) send_message_to_client( user.id, dict(event='postSavePublisherStarted'.format(capitalize(key)), job_id=str(job.hashid), config_entity_id=config_entity.id, ids=[analysis_module.id], class_name='AnalysisModule', key=analysis_module.key)) # Call each tool's update method for analysis_tool in analysis_module.analysis_tools.all( ).select_subclasses(): updated_kwargs = deepcopy(kwargs) updated_kwargs.update( dict(analysis_module=analysis_module, user=user, job=job, key=key)) analysis_tool.update(**updated_kwargs) # Call the post save publisher from footprint.main.publishing.config_entity_publishing import post_save_config_entity_analysis_module post_save_config_entity_analysis_module.send( sender=config_entity.__class__, config_entity=config_entity, analysis_module=analysis_module) logger.info('AnalysisModule %s Completed for ConfigEntity %s' % (analysis_module.name, config_entity.name)) logger.info('Sending message to client postSavePublisherCompleted to user %s for module %s and config entity %s' % \ (user.username, analysis_module.name, config_entity.name)) send_message_to_client( user.id, dict(event='postSavePublisherCompleted', job_id=str(job.hashid), config_entity_id=config_entity.id, ids=[analysis_module.id], class_name='AnalysisModule', key=analysis_module.key)) analysis_module.completed = datetime.utcnow().replace(tzinfo=utc) analysis_module.save() analysis_module._started = False except Exception, e: try: analysis_module.failed = datetime.utcnow().replace(tzinfo=utc) analysis_module.save() finally: analysis_module._started = False exc_type, exc_value, exc_traceback = sys.exc_info() readable_exception = traceback.format_exception( exc_type, exc_value, exc_traceback) logger.error(readable_exception) send_message_to_client( user.id, dict(event='postSavePublisherFailed', job_id=str(job.hashid), config_entity_id=config_entity.id, ids=[analysis_module.id], class_name='AnalysisModule', key=analysis_module.key)) raise Exception(readable_exception)
def analysis_module_task(job, user, config_entity_id, key, kwargs): config_entity = ConfigEntity.objects.get(id=config_entity_id).subclassed analysis_module = AnalysisModule.objects.get(config_entity=config_entity, key=key) # Set again for new instance analysis_module._started = True try: # TODO progress calls should be moved to each module so the status bar increments on the client # logger.info('AnalysisModule %s Started for ConfigEntity %s with kwarg keys' % (analysis_module.name, config_entity.name, ', '.join(kwargs or dict().keys()))) send_message_to_client(user.id, dict( event='postSavePublisherStarted'.format(capitalize(key)), job_id=str(job.hashid), config_entity_id=config_entity.id, ids=[analysis_module.id], class_name='AnalysisModule', key=analysis_module.key)) # Call each tool's update method for analysis_tool in analysis_module.analysis_tools.all().select_subclasses(): updated_kwargs = deepcopy(kwargs) updated_kwargs.update(dict(analysis_module=analysis_module, user=user, job=job, key=key)) analysis_tool.update(**updated_kwargs) # Call the post save publisher from footprint.main.publishing.config_entity_publishing import post_save_config_entity_analysis_module post_save_config_entity_analysis_module.send(sender=config_entity.__class__, config_entity=config_entity, analysis_module=analysis_module) logger.info('AnalysisModule %s Completed for ConfigEntity %s' % (analysis_module.name, config_entity.name)) logger.info('Sending message to client postSavePublisherCompleted to user %s for module %s and config entity %s' % \ (user.username, analysis_module.name, config_entity.name)) send_message_to_client(user.id, dict(event='postSavePublisherCompleted', job_id=str(job.hashid), config_entity_id=config_entity.id, ids=[analysis_module.id], class_name='AnalysisModule', key=analysis_module.key) ) analysis_module.completed = datetime.utcnow().replace(tzinfo=utc) analysis_module.save() analysis_module._started = False except Exception, e: try: analysis_module.failed = datetime.utcnow().replace(tzinfo=utc) analysis_module.save() finally: analysis_module._started = False exc_type, exc_value, exc_traceback = sys.exc_info() readable_exception = traceback.format_exception(exc_type, exc_value, exc_traceback) logger.error(readable_exception) send_message_to_client(user.id, dict(event='postSavePublisherFailed', job_id=str(job.hashid), config_entity_id=config_entity.id, ids=[analysis_module.id], class_name='AnalysisModule', key=analysis_module.key ) ) raise Exception(readable_exception)