def update_performance_score(id, dry_run=False): """ Updates department performance scores. """ try: cursor = connections['nfirs'].cursor() fd = FireDepartment.objects.get(id=id) except (ConnectionDoesNotExist, FireDepartment.DoesNotExist): return cursor.execute(RESIDENTIAL_FIRES_BY_FDID_STATE, (fd.fdid, fd.state)) results = dictfetchall(cursor) old_score = fd.dist_model_score counts = dict(object_of_origin=0, room_of_origin=0, floor_of_origin=0, building_of_origin=0, beyond=0) for result in results: if result['fire_sprd'] == '1': counts['object_of_origin'] += result['count'] if result['fire_sprd'] == '2': counts['room_of_origin'] += result['count'] if result['fire_sprd'] == '3': counts['floor_of_origin'] += result['count'] if result['fire_sprd'] == '4': counts['building_of_origin'] += result['count'] if result['fire_sprd'] == '5': counts['beyond'] += result['count'] ahs_building_size = ahs_building_areas(fd.fdid, fd.state) if ahs_building_size is not None: counts['building_area_draw'] = ahs_building_size response_times = response_time_distributions.get('{0}-{1}'.format(fd.fdid, fd.state)) if response_times: counts['arrival_time_draw'] = LogNormalDraw(*response_times, multiplier=60) try: dist = DIST(floor_extent=False, **counts) fd.dist_model_score = dist.gibbs_sample() except (NotEnoughRecords, ZeroDivisionError): fd.dist_model_score = None print 'updating fdid: {2} from: {0} to {1}.'.format(old_score, fd.dist_model_score, fd.id) if dry_run: return fd.save()
def calculate_structure_counts(fd_id): try: fd = FireDepartment.objects.get(id=fd_id) cursor = connections['nfirs'].cursor() except (FireDepartment.DoesNotExist, ConnectionDoesNotExist): return # Skip over existing calculations or missing dept owned tracts if not fd.owned_tracts_geom or fd.firedepartmentriskmodels_set.filter( structure_count__isnull=False).count() == 5: return STRUCTURE_COUNTS = """SELECT sum(case when l.risk_category = 'Low' THEN 1 ELSE 0 END) as low, sum(CASE WHEN l.risk_category = 'Medium' THEN 1 ELSE 0 END) as medium, sum(CASE WHEN l.risk_category = 'High' THEN 1 ELSE 0 END) high, sum(CASE WHEN l.risk_category is null THEN 1 ELSE 0 END) as na FROM parcel_risk_category_local l JOIN (SELECT ST_SetSRID(%(owned_geom)s::geometry, 4326) as owned_geom) x ON owned_geom && l.wkb_geometry WHERE ST_Intersects(owned_geom, l.wkb_geometry) """ cursor.execute(STRUCTURE_COUNTS, {'owned_geom': fd.owned_tracts_geom.wkb}) mapping = {1: 'low', 2: 'medium', 4: 'high', 5: 'na'} tot = 0 counts = dictfetchall(cursor)[0] for l in HazardLevels.values_sans_all(): rm, _ = fd.firedepartmentriskmodels_set.get_or_create(level=l) count = counts[mapping[l]] rm.structure_count = count rm.save() tot = tot + count rm, _ = fd.firedepartmentriskmodels_set.get_or_create( level=HazardLevels.All.value) rm.structure_count = tot rm.save()
def update_parcel_effectivefirefighting_table(drivetimegeom, department): """ Intersect with Parcel layer and update parcel_department_hazard_level table """ # drivetimegeomLT15 = None drivetimegeomLT27 = None drivetimegeomLT42 = None drivetimegeomGT38 = None drivetimegeomGT41 = None for responseJSON in drivetimegeom: if arcgis2geojson(responseJSON['geometry'])['type'] == 'MultiPolygon': responsegeom = GEOSGeometry( json.dumps(arcgis2geojson(responseJSON['geometry']))) else: responsegeom = GEOSGeometry( MultiPolygon( fromstr(str(arcgis2geojson(responseJSON['geometry']))), )) # Dissolve/Union Geometry # if(responseJSON['attributes']['SUM_StaffLong'] < 15): # if drivetimegeomLT15 is None: # drivetimegeomLT15 = responsegeom # else: # drivetimegeomLT15 = drivetimegeomLT15.union(responsegeom) if (responseJSON['attributes']['SUM_StaffLong'] > 14): if drivetimegeomLT27 is None: drivetimegeomLT27 = responsegeom else: drivetimegeomLT27 = drivetimegeomLT27.union(responsegeom) if (responseJSON['attributes']['SUM_StaffLong'] > 26): if drivetimegeomLT42 is None: drivetimegeomLT42 = responsegeom else: drivetimegeomLT42 = drivetimegeomLT42.union(responsegeom) if (responseJSON['attributes']['SUM_StaffLong'] > 38): if drivetimegeomGT38 is None: drivetimegeomGT38 = responsegeom else: drivetimegeomGT38 = drivetimegeomGT38.union(responsegeom) if (responseJSON['attributes']['SUM_StaffLong'] > 41): if drivetimegeomGT41 is None: drivetimegeomGT41 = responsegeom else: drivetimegeomGT41 = drivetimegeomGT41.union(responsegeom) cursor = connections['nfirs'].cursor() QUERY_INTERSECT_FOR_PARCEL_DRIVETIME = """SELECT sum(case when l.risk_category = 'Low' THEN 1 ELSE 0 END) as low, sum(CASE WHEN l.risk_category = 'Medium' THEN 1 ELSE 0 END) as medium, sum(CASE WHEN l.risk_category = 'High' THEN 1 ELSE 0 END) high, sum(CASE WHEN l.risk_category is null THEN 1 ELSE 0 END) as unknown FROM parcel_risk_category_local l JOIN (SELECT ST_SetSRID(ST_GeomFromGeoJSON(%(drive_geom)s), 4326) as drive_geom) x ON drive_geom && l.wkb_geometry WHERE ST_WITHIN(l.wkb_geometry, drive_geom) """ # option to limit dept track and ST_WITHIN(l.wkb_geometry, %(owned_geom)s) # option to collect info on 0 14 personnel fighting force # saving overhead because under 15 isn't used right now - would have to add database entry below # if drivetimegeomLT15: # cursor.execute(QUERY_INTERSECT_FOR_PARCEL_DRIVETIME, {'drive_geom': drivetimegeomLT15.json, 'owned_geom': department.geom.wkb}) # results0 = dictfetchall(cursor) if drivetimegeomLT27: cursor.execute( QUERY_INTERSECT_FOR_PARCEL_DRIVETIME, { 'drive_geom': drivetimegeomLT27.intersection( department.owned_tracts_geom).json, 'owned_geom': department.owned_tracts_geom.wkb }) results15 = dictfetchall(cursor) if results15: if results15[0]['unknown'] is None: results15[0]['unknown'] = 0 if results15[0]['low'] is None: results15[0]['low'] = 0 if drivetimegeomLT42: cursor.execute( QUERY_INTERSECT_FOR_PARCEL_DRIVETIME, { 'drive_geom': drivetimegeomLT42.intersection( department.owned_tracts_geom).json, 'owned_geom': department.owned_tracts_geom.wkb }) results27 = dictfetchall(cursor) if results27: if results27[0]['medium'] is None: results27[0]['medium'] = 0 if drivetimegeomGT38: cursor.execute( QUERY_INTERSECT_FOR_PARCEL_DRIVETIME, { 'drive_geom': drivetimegeomGT38.intersection( department.owned_tracts_geom).json, 'owned_geom': department.owned_tracts_geom.wkb }) results38 = dictfetchall(cursor) if results38: if results38[0]['high'] is None: results38[0]['high'] = 0 if drivetimegeomGT41: cursor.execute( QUERY_INTERSECT_FOR_PARCEL_DRIVETIME, { 'drive_geom': drivetimegeomGT41.intersection( department.owned_tracts_geom).json, 'owned_geom': department.owned_tracts_geom.wkb }) results42 = dictfetchall(cursor) if results15: if results42[0]['high'] is None: results42[0]['high'] = 0 # Overwrite/Update efff area if already loaded if EffectiveFireFightingForceLevel.objects.filter( department_id=department.id): existingrecord = EffectiveFireFightingForceLevel.objects.filter( department_id=department.id) addefffdepartment = existingrecord[0] # there is an 'all' value for future calculations on the total addefffdepartment.perc_covered_low_15_26 = 0 addefffdepartment.perc_covered_unknown_15_26 = 0 addefffdepartment.perc_covered_medium_27_42 = 0 addefffdepartment.perc_covered_high38_plus = 0 addefffdepartment.perc_covered_high_43_plus = 0 if drivetimegeomLT27: addefffdepartment.parcelcount_low_15_26 = results15[0]['low'] addefffdepartment.parcelcount_unknown_15_26 = results15[0][ 'unknown'] addefffdepartment.perc_covered_low_15_26 = round( 100 * (float(results15[0]['low']) / float( department.metrics.structure_counts_by_risk_category.low)), 2) addefffdepartment.perc_covered_unknown_15_26 = round( 100 * (float(results15[0]['unknown']) / float(department.metrics. structure_counts_by_risk_category.unknown)), 2) if isinstance(drivetimegeomLT27, MultiPolygon): addefffdepartment.drivetimegeom_15_26 = drivetimegeomLT27 else: addefffdepartment.drivetimegeom_15_26 = MultiPolygon( drivetimegeomLT27) if drivetimegeomLT42: addefffdepartment.parcelcount_medium_27_42 = results27[0]['medium'] addefffdepartment.perc_covered_medium_27_42 = round( 100 * (float(results27[0]['medium']) / float(department.metrics. structure_counts_by_risk_category.medium)), 2) if isinstance(drivetimegeomLT42, MultiPolygon): addefffdepartment.drivetimegeom_27_42 = drivetimegeomLT42 else: addefffdepartment.drivetimegeom_27_42 = MultiPolygon( drivetimegeomLT42) if drivetimegeomGT38: addefffdepartment.parcelcount_high38_plus = results38[0]['high'] addefffdepartment.perc_covered_high38_plus = round( 100 * (float(results38[0]['high']) / float( department.metrics.structure_counts_by_risk_category.high) ), 2) if isinstance(drivetimegeomGT38, MultiPolygon): addefffdepartment.drivetimegeom_38_plus = drivetimegeomGT38 else: addefffdepartment.drivetimegeom_38_plus = MultiPolygon( drivetimegeomGT38) if drivetimegeomGT41: addefffdepartment.parcelcount_high_43_plus = results42[0]['high'] addefffdepartment.perc_covered_high_43_plus = round( 100 * (float(results42[0]['high']) / float( department.metrics.structure_counts_by_risk_category.high) ), 2) if isinstance(drivetimegeomGT41, MultiPolygon): addefffdepartment.drivetimegeom_43_plus = drivetimegeomGT41 else: addefffdepartment.drivetimegeom_43_plus = MultiPolygon( drivetimegeomGT41) print department.name + " EFFF Area Updated" + ' at ' + time.strftime( "%Y-%m-%d %H:%M:%S", time.gmtime()) else: deptefffarea = {} deptefffarea['department'] = department deptefffarea['perc_covered_low_15_26'] = 0 deptefffarea['perc_covered_unknown_15_26'] = 0 deptefffarea['perc_covered_medium_27_42'] = 0 deptefffarea['perc_covered_high38_plus'] = 0 deptefffarea['perc_covered_high_43_plus'] = 0 if drivetimegeomLT27: deptefffarea['parcelcount_low_15_26'] = results15[0]['low'] deptefffarea['parcelcount_unknown_15_26'] = results15[0]['unknown'] deptefffarea['perc_covered_low_15_26'] = round( 100 * (float(results15[0]['low']) / float( department.metrics.structure_counts_by_risk_category.low)), 2) deptefffarea['perc_covered_unknown_15_26'] = round( 100 * (float(results15[0]['unknown']) / float(department.metrics. structure_counts_by_risk_category.unknown)), 2) if isinstance(drivetimegeomLT27, MultiPolygon): deptefffarea['drivetimegeom_15_26'] = drivetimegeomLT27 else: deptefffarea['drivetimegeom_15_26'] = MultiPolygon( drivetimegeomLT27) if drivetimegeomLT42: deptefffarea['parcelcount_medium_27_42'] = results27[0]['medium'] deptefffarea['perc_covered_medium_27_42'] = round( 100 * (float(results27[0]['medium']) / float(department.metrics. structure_counts_by_risk_category.medium)), 2) if isinstance(drivetimegeomLT42, MultiPolygon): deptefffarea['drivetimegeom_27_42'] = drivetimegeomLT42 else: deptefffarea['drivetimegeom_27_42'] = MultiPolygon( drivetimegeomLT42) if drivetimegeomGT38: deptefffarea['parcelcount_high38_plus'] = results38[0]['high'] deptefffarea['perc_covered_high38_plus'] = round( 100 * (float(results38[0]['high']) / float( department.metrics.structure_counts_by_risk_category.high) ), 2) if isinstance(drivetimegeomGT38, MultiPolygon): deptefffarea['drivetimegeom_38_plus'] = drivetimegeomGT38 else: deptefffarea['drivetimegeom_38_plus'] = MultiPolygon( drivetimegeomGT38) if drivetimegeomGT41: deptefffarea['parcelcount_high_43_plus'] = results42[0]['high'] deptefffarea['perc_covered_high_43_plus'] = round( 100 * (float(results42[0]['high']) / float( department.metrics.structure_counts_by_risk_category.high) ), 2) if isinstance(drivetimegeomGT41, MultiPolygon): deptefffarea['drivetimegeom_43_plus'] = drivetimegeomGT41 else: deptefffarea['drivetimegeom_43_plus'] = MultiPolygon( drivetimegeomGT41) addefffdepartment = EffectiveFireFightingForceLevel.objects.create( **deptefffarea) print department.name + " EFFF Area Created" + ' at ' + time.strftime( "%Y-%m-%d %H:%M:%S", time.gmtime()) addefffdepartment.save()
def update_parcel_department_hazard_level(drivetimegeom, department): """ Intersect with Parcel layer and update parcel_department_hazard_level table 0-4 minutes 4-6 minutes 6-8 minutes """ drivetimegeom0 = arcgis2geojson(drivetimegeom[2]['geometry']) drivetimegeom4 = arcgis2geojson(drivetimegeom[1]['geometry']) drivetimegeom6 = arcgis2geojson(drivetimegeom[0]['geometry']) cursor = connections['nfirs'].cursor() # TESTGEOM = "SELECT ST_AREA(ST_GeomFromGeoJSON(%(drive_geom)s),false) As area" QUERY_INTERSECT_FOR_PARCEL_DRIVETIME = """SELECT sum(case when l.risk_category = 'Low' THEN 1 ELSE 0 END) as low, sum(CASE WHEN l.risk_category = 'Medium' THEN 1 ELSE 0 END) as medium, sum(CASE WHEN l.risk_category = 'High' THEN 1 ELSE 0 END) high, sum(CASE WHEN l.risk_category is null THEN 1 ELSE 0 END) as unknown FROM parcel_risk_category_local l JOIN (SELECT ST_SetSRID(ST_GeomFromGeoJSON(%(drive_geom)s), 4326) as drive_geom) x ON drive_geom && l.wkb_geometry WHERE ST_WITHIN(l.wkb_geometry, drive_geom) """ print 'Querying Database for parcels' cursor.execute(QUERY_INTERSECT_FOR_PARCEL_DRIVETIME, {'drive_geom': json.dumps(drivetimegeom0)}) results0 = dictfetchall(cursor) cursor.execute(QUERY_INTERSECT_FOR_PARCEL_DRIVETIME, {'drive_geom': json.dumps(drivetimegeom4)}) results4 = dictfetchall(cursor) cursor.execute(QUERY_INTERSECT_FOR_PARCEL_DRIVETIME, {'drive_geom': json.dumps(drivetimegeom6)}) results6 = dictfetchall(cursor) # Overwrite/Update service area is already registered if ParcelDepartmentHazardLevel.objects.filter(department_id=department.id): existingrecord = ParcelDepartmentHazardLevel.objects.filter( department_id=department.id) addhazardlevelfordepartment = existingrecord[0] addhazardlevelfordepartment.parcelcount_low_0_4 = results0[0]['low'] addhazardlevelfordepartment.parcelcount_low_4_6 = results4[0]['low'] addhazardlevelfordepartment.parcelcount_low_6_8 = results6[0]['low'] addhazardlevelfordepartment.parcelcount_medium_0_4 = results0[0][ 'medium'] addhazardlevelfordepartment.parcelcount_medium_4_6 = results4[0][ 'medium'] addhazardlevelfordepartment.parcelcount_medium_6_8 = results6[0][ 'medium'] addhazardlevelfordepartment.parcelcount_high_0_4 = results0[0]['high'] addhazardlevelfordepartment.parcelcount_high_4_6 = results4[0]['high'] addhazardlevelfordepartment.parcelcount_high_6_8 = results6[0]['high'] addhazardlevelfordepartment.parcelcount_unknown_0_4 = results0[0][ 'unknown'] addhazardlevelfordepartment.parcelcount_unknown_4_6 = results4[0][ 'unknown'] addhazardlevelfordepartment.parcelcount_unknown_6_8 = results6[0][ 'unknown'] if drivetimegeom0['type'] == 'MultiPolygon': addhazardlevelfordepartment.drivetimegeom_0_4 = GEOSGeometry( json.dumps(drivetimegeom0)) else: addhazardlevelfordepartment.drivetimegeom_0_4 = GEOSGeometry( MultiPolygon(fromstr(str(drivetimegeom0)), )) if drivetimegeom4['type'] == 'MultiPolygon': addhazardlevelfordepartment.drivetimegeom_4_6 = GEOSGeometry( json.dumps(drivetimegeom4)) else: addhazardlevelfordepartment.drivetimegeom_4_6 = GEOSGeometry( MultiPolygon(fromstr(str(drivetimegeom4)), )) if drivetimegeom6['type'] == 'MultiPolygon': addhazardlevelfordepartment.drivetimegeom_6_8 = GEOSGeometry( json.dumps(drivetimegeom6)) else: addhazardlevelfordepartment.drivetimegeom_6_8 = GEOSGeometry( MultiPolygon(fromstr(str(drivetimegeom6)), )) print department.name + " Service Area Updated" else: deptservicearea = {} deptservicearea['department'] = department deptservicearea['parcelcount_low_0_4'] = results0[0]['low'] deptservicearea['parcelcount_low_4_6'] = results4[0]['low'] deptservicearea['parcelcount_low_6_8'] = results6[0]['low'] deptservicearea['parcelcount_medium_0_4'] = results0[0]['medium'] deptservicearea['parcelcount_medium_4_6'] = results4[0]['medium'] deptservicearea['parcelcount_medium_6_8'] = results6[0]['medium'] deptservicearea['parcelcount_high_0_4'] = results0[0]['high'] deptservicearea['parcelcount_high_4_6'] = results4[0]['high'] deptservicearea['parcelcount_high_6_8'] = results6[0]['high'] deptservicearea['parcelcount_unknown_0_4'] = results0[0]['unknown'] deptservicearea['parcelcount_unknown_4_6'] = results4[0]['unknown'] deptservicearea['parcelcount_unknown_6_8'] = results6[0]['unknown'] if drivetimegeom0['type'] == 'MultiPolygon': deptservicearea['drivetimegeom_0_4'] = GEOSGeometry( json.dumps(drivetimegeom0)) else: deptservicearea['drivetimegeom_0_4'] = GEOSGeometry( MultiPolygon(fromstr(str(drivetimegeom0)), )) if drivetimegeom4['type'] == 'MultiPolygon': deptservicearea['drivetimegeom_4_6'] = GEOSGeometry( json.dumps(drivetimegeom4)) else: deptservicearea['drivetimegeom_4_6'] = GEOSGeometry( MultiPolygon(fromstr(str(drivetimegeom4)), )) if drivetimegeom6['type'] == 'MultiPolygon': deptservicearea['drivetimegeom_6_8'] = GEOSGeometry( json.dumps(drivetimegeom6)) else: deptservicearea['drivetimegeom_6_8'] = GEOSGeometry( MultiPolygon(fromstr(str(drivetimegeom6)), )) addhazardlevelfordepartment = ParcelDepartmentHazardLevel.objects.create( **deptservicearea) print department.name + " Service Area Created" addhazardlevelfordepartment.save()
def update_performance_score(id, dry_run=False): """ Updates department performance scores. """ print "updating performance score for {}".format(id) try: cursor = connections['nfirs'].cursor() fd = FireDepartment.objects.get(id=id) except (ConnectionDoesNotExist, FireDepartment.DoesNotExist): return # Hack to get around inline SQL string execution and argument escaping in a tuple fds = ["''{}''".format(x) for x in fd.fdids] RESIDENTIAL_FIRES_BY_FDID_STATE = """ SELECT * FROM crosstab( 'select COALESCE(y.risk_category, ''N/A'') as risk_category, fire_sprd, count(*) FROM joint_buildingfires a left join ( SELECT state, fdid, inc_date, inc_no, exp_no, geom, x.parcel_id, x.risk_category FROM (select * from joint_incidentaddress a left join parcel_risk_category_local b using (parcel_id) ) AS x ) AS y using (state, inc_date, exp_no, fdid, inc_no) where a.state='%(state)s' and a.fdid in ({fds}) and prop_use in (''419'',''429'',''439'',''449'',''459'',''460'',''462'',''464'',''400'') and fire_sprd is not null and fire_sprd != '''' group by risk_category, fire_sprd order by risk_category, fire_sprd ASC') AS ct(risk_category text, "object_of_origin" bigint, "room_of_origin" bigint, "floor_of_origin" bigint, "building_of_origin" bigint, "beyond" bigint); """.format(fds=','.join(fds)) cursor.execute(RESIDENTIAL_FIRES_BY_FDID_STATE, {'state': fd.state}) results = dictfetchall(cursor) all_counts = dict(object_of_origin=0, room_of_origin=0, floor_of_origin=0, building_of_origin=0, beyond=0) risk_mapping = {'Low': 1, 'Medium': 2, 'High': 4, 'N/A': 5} ahs_building_size = ahs_building_areas(fd.fdid, fd.state) for result in results: if result.get('risk_category') not in risk_mapping: continue dist_model = dist_model_for_hazard_level(result.get('risk_category')) # Use floor draws based on the LogNormal of the structure type distribution for med/high risk categories # TODO: Detect support for number_of_floors_draw on risk model vs being explicit on hazard levels used :/ if result.get('risk_category') in ['Medium', 'High']: rm, _ = fd.firedepartmentriskmodels_set.get_or_create( level=risk_mapping[result['risk_category']]) if rm.floor_count_coefficients: pass # TODO # dist_model.number_of_floors_draw = LogNormalDraw(*rm.floor_count_coefficients) counts = dict(object_of_origin=result['object_of_origin'] or 0, room_of_origin=result['room_of_origin'] or 0, floor_of_origin=result['floor_of_origin'] or 0, building_of_origin=result['building_of_origin'] or 0, beyond=result['beyond'] or 0) # add current risk category to the all risk category for key, value in counts.items(): all_counts[key] += value if ahs_building_size is not None: counts['building_area_draw'] = ahs_building_size response_times = response_time_distributions.get('{0}-{1}'.format( fd.fdid, fd.state)) if response_times: counts['arrival_time_draw'] = LogNormalDraw(*response_times, multiplier=60) record, _ = fd.firedepartmentriskmodels_set.get_or_create( level=risk_mapping[result['risk_category']]) old_score = record.dist_model_score try: dist = dist_model(floor_extent=False, **counts) record.dist_model_score = dist.gibbs_sample() record.dist_model_score_fire_count = dist.total_fires print 'updating fdid: {2} - {3} performance score from: {0} to {1}.'.format( old_score, record.dist_model_score, fd.id, HazardLevels(record.level).name) except (NotEnoughRecords, ZeroDivisionError): print 'Error updating DIST score: {}.'.format( traceback.format_exc()) record.dist_model_score = None if not dry_run: record.save() # Clear out scores for missing hazard levels if not dry_run: missing_categories = set(risk_mapping.keys()) - set( map(lambda x: x.get('risk_category'), results)) for r in missing_categories: print 'clearing {0} level from {1} due to missing categories in aggregation'.format( r, fd.id) record, _ = fd.firedepartmentriskmodels_set.get_or_create( level=risk_mapping[r]) record.dist_model_score = None record.save() record, _ = fd.firedepartmentriskmodels_set.get_or_create( level=HazardLevels.All.value) old_score = record.dist_model_score dist_model = dist_model_for_hazard_level('All') try: if ahs_building_size is not None: all_counts['building_area_draw'] = ahs_building_size response_times = response_time_distributions.get('{0}-{1}'.format( fd.fdid, fd.state)) if response_times: all_counts['arrival_time_draw'] = LogNormalDraw(*response_times, multiplier=60) dist = dist_model(floor_extent=False, **all_counts) record.dist_model_score = dist.gibbs_sample() print 'updating fdid: {2} - {3} performance score from: {0} to {1}.'.format( old_score, record.dist_model_score, fd.id, HazardLevels(record.level).name) except (NotEnoughRecords, ZeroDivisionError): print 'Error updating DIST score: {}.'.format(traceback.format_exc()) record.dist_model_score = None if not dry_run: record.save() print "...updated performance score for {}".format(id)