def load_finished(self): clear_progress_message_bar(self.message_bar, self.message_bar_item) if not self.button_box.isEnabled(): self.button_box.setEnabled(True) self.button_box.addButton('Close', QDialogButtonBox.NoRole) self.button_box.addButton('Close and show in browser', QDialogButtonBox.YesRole)
def upload_done(self, result): layer_url, success = result # In case success == 'False', layer_url contains the error message if success: # so far, we implemented the style-converter only for the # rule-based styles. Only in those cases, we should add a style to # the layer to be uploaded. Otherwise, it's fine to use the default # basic geonode style. if isinstance(self.iface.activeLayer().rendererV2(), QgsRuleBasedRendererV2): self._update_layer_style() else: self.message_bar.pushMessage( 'Info', 'Using the basic default style', level=QgsMessageBar.INFO) self.message_bar_item, _ = create_progress_message_bar( self.message_bar, 'Loading page......', no_percentage=True) self.web_view.load(QUrl(layer_url)) self.layer_url = layer_url self.upload_successful.emit(layer_url) else: error_msg = layer_url clear_progress_message_bar(self.message_bar) self.message_bar.pushMessage( 'Upload error', error_msg, duration=0, level=QgsMessageBar.CRITICAL)
def on_load_finished(self, ok): # NOTE: Intuitively, I would have used the value of ok to check if # loading failed and to display an error message. However, before # a failing loading finishes, the attempt to set the progress value # raises a RuntimeError. I am using that to display an error message, # which seems to be working fine. clear_progress_message_bar(self.parent.lower_message_bar, self.progress_message_bar)
def get_sv_data(self, sv_variables_ids, load_geometries, country_iso_codes, message_bar): """ Get a csv file containing data corresponding to the social vulnerability variables which ids are given in input If country iso codes are also provided, only the corresponding data will be exported :param sv_variables_ids: a string of comma-separated ids of social vulnerability variables :param country_iso_codes: a string of comma-separated country iso codes (optional - default: all countries) :param export_geometries: 'True' or 'False', indicating if also the geometries of countries have to be exported (optional - default: 'False') """ msg_bar_item, progress_ = create_progress_message_bar( message_bar, 'Waiting for the OpenQuake Platform to export the data...', no_percentage=True) page = self.host + PLATFORM_EXPORT_VARIABLES_DATA data = dict(sv_variables_ids=sv_variables_ids, export_geometries=load_geometries, country_iso_codes=country_iso_codes) result = self.sess.post(page, data=data, stream=True) clear_progress_message_bar(message_bar, msg_bar_item) if result.status_code == 200: # save csv on a temporary file fd, fname = tempfile.mkstemp(suffix='.csv') os.close(fd) # All the fields of the csv file will be considered as text fields # unless a .csvt file with the same name as the .csv file is used # to specify the field types. # For the type descriptor, use the same name as the csv file fname_types = fname.split('.')[0] + '.csvt' # We expect iso, country_name, v1, v2, ... vn # Count variables ids sv_variables_count = len(sv_variables_ids.split(',')) # build the string that describes data types for the csv types_string = '"String","String"' + ',"Real"' * sv_variables_count if load_geometries: types_string += ',"String"' with open(fname_types, 'w', newline='') as csvt: csvt.write(types_string) with open(fname, 'w', newline='') as csv_file: n_countries_to_download = len(country_iso_codes) n_downloaded_countries = 0 msg_bar_item, progress = create_progress_message_bar( message_bar, 'Downloading socioeconomic data...') for line in result.iter_lines(): csv_file.write(line + os.linesep) n_downloaded_countries += 1 progress.setValue(n_downloaded_countries / n_countries_to_download * 100) clear_progress_message_bar(message_bar, msg_bar_item) msg = 'The socioeconomic data has been saved into %s' % fname return fname, msg else: raise SvNetworkError(result.content)
def purge_zones_without_loss_points(zonal_layer, loss_attrs_dict, iface): """ Delete from the zonal layer the zones that contain no loss points """ tot_zones = zonal_layer.featureCount() msg = tr("Purging zones containing no loss points...") msg_bar_item, progress = create_progress_message_bar( iface.messageBar(), msg) with edit(zonal_layer): for current_zone, zone_feature in enumerate(zonal_layer.getFeatures()): progress_percent = current_zone / float(tot_zones) * 100 progress.setValue(progress_percent) # save the ids of the zones to purge (which contain no loss # points) if zone_feature[loss_attrs_dict['count']] == 0: zonal_layer.deleteFeature(zone_feature.id()) clear_progress_message_bar(iface.messageBar(), msg_bar_item) msg = "Zones containing no loss points were deleted" log_msg(msg, level='W', message_bar=iface.messageBar()) return zonal_layer
def calculate_vector_stats_aggregating_by_zone_id(loss_layer, zonal_layer, zone_id_in_losses_attr_name, zone_id_in_zones_attr_name, loss_attr_names, loss_attrs_dict, iface, old_field_to_new_field=None, extra=True): """ Once we know the zone id of each point in the loss map, we can count how many points are in each zone, sum and average their values """ tot_points = loss_layer.featureCount() msg = tr("Step 2 of 3: aggregating losses by zone id...") msg_bar_item, progress = create_progress_message_bar( iface.messageBar(), msg) # if the user picked an attribute from the loss layer, to be # used as zone id, use that; otherwise, use the attribute # copied from the zonal layer if not zone_id_in_losses_attr_name: zone_id_in_losses_attr_name = zone_id_in_zones_attr_name with TraceTimeManager(msg, DEBUG): zone_stats = {} for current_point, point_feat in enumerate(loss_layer.getFeatures()): progress_perc = current_point / float(tot_points) * 100 progress.setValue(progress_perc) zone_id = point_feat[zone_id_in_losses_attr_name] if zone_id not in zone_stats: zone_stats[zone_id] = {} for loss_attr_name in loss_attr_names: if loss_attr_name not in zone_stats[zone_id]: zone_stats[zone_id][loss_attr_name] = { 'count': 0, 'sum': 0.0 } if old_field_to_new_field: loss_value = point_feat[ old_field_to_new_field[loss_attr_name]] else: loss_value = point_feat[loss_attr_name] zone_stats[zone_id][loss_attr_name]['count'] += 1 zone_stats[zone_id][loss_attr_name]['sum'] += loss_value clear_progress_message_bar(iface.messageBar(), msg_bar_item) if extra: msg = tr("Step 3 of 3: writing point counts, loss sums and averages" " into the zonal layer...") else: msg = tr("Step 3 of 3: writing sums into the zonal layer...") with TraceTimeManager(msg, DEBUG): tot_zones = zonal_layer.featureCount() msg_bar_item, progress = create_progress_message_bar( iface.messageBar(), msg) with edit(zonal_layer): if extra: count_idx = zonal_layer.fieldNameIndex( loss_attrs_dict['count']) avg_idx = {} sum_idx = {} for loss_attr_name in loss_attr_names: sum_idx[loss_attr_name] = zonal_layer.fieldNameIndex( loss_attrs_dict[loss_attr_name]['sum']) if extra: avg_idx[loss_attr_name] = zonal_layer.fieldNameIndex( loss_attrs_dict[loss_attr_name]['avg']) for current_zone, zone_feat in enumerate( zonal_layer.getFeatures()): progress_perc = current_zone / float(tot_zones) * 100 progress.setValue(progress_perc) # get the id of the current zone zone_id = zone_feat[zone_id_in_zones_attr_name] # initialize points_count, loss_sum and loss_avg # to zero, and update them afterwards only if the zone # contains at least one loss point points_count = 0 if extra: loss_avg = {} loss_sum = {} for loss_attr_name in loss_attr_names: loss_sum[loss_attr_name] = 0.0 if extra: loss_avg[loss_attr_name] = 0.0 # retrieve count and sum from the dictionary, using # the zone id as key to get the values from the # corresponding dict (otherwise, keep zero values) if zone_id in zone_stats: for loss_attr_name in loss_attr_names: loss_sum[loss_attr_name] = \ zone_stats[zone_id][loss_attr_name]['sum'] points_count = \ zone_stats[zone_id][loss_attr_name]['count'] if extra: # division by zero should be impossible, because # we are computing this only for zones containing # at least one point (otherwise we keep all zeros) loss_avg[loss_attr_name] = ( loss_sum[loss_attr_name] / points_count) # NOTE: The following line looks redundant zone_stats[zone_id][loss_attr_name]['avg'] = ( loss_avg[loss_attr_name]) # without casting to int and to float, it wouldn't work fid = zone_feat.id() if extra: zonal_layer.changeAttributeValue(fid, count_idx, int(points_count)) for loss_attr_name in loss_attr_names: if points_count: zonal_layer.changeAttributeValue( fid, sum_idx[loss_attr_name], float(loss_sum[loss_attr_name])) if extra: zonal_layer.changeAttributeValue( fid, avg_idx[loss_attr_name], float(loss_avg[loss_attr_name])) else: # if no points were found in that region, let both # sum and average be NULL instead of 0 zonal_layer.changeAttributeValue( fid, sum_idx[loss_attr_name], QPyNullVariant(float)) if extra: zonal_layer.changeAttributeValue( fid, avg_idx[loss_attr_name], QPyNullVariant(float)) clear_progress_message_bar(iface.messageBar(), msg_bar_item) notify_loss_aggregation_by_zone_complete(loss_attrs_dict, loss_attr_names, iface, extra=extra) return (loss_layer, zonal_layer, loss_attrs_dict)
def _add_zone_id_to_points_internal(iface, loss_layer, zonal_layer, zone_id_in_zones_attr_name): """ On the hypothesis that we don't know what is the zone in which each point was collected we use an alternative implementation of what SAGA does, i.e., we add a field to the loss layer, containing the id of the zone to which it belongs. In order to achieve that: * we create a spatial index of the loss points * for each zone (in the layer containing zonally-aggregated SVI * we identify points that are inside the zone's bounding box * we check if each of these points is actually inside the zone's geometry; if it is: * copy the zone id into the new field of the loss point Notes: * loss_layer contains the not aggregated loss points * zonal_layer contains the zone geometries """ # make a copy of the loss layer and use that from now on add_to_registry = True if DEBUG else False loss_layer_plus_zones = \ ProcessLayer(loss_layer).duplicate_in_memory( new_name='Loss plus zone labels', add_to_registry=add_to_registry) # add to it the new attribute that will contain the zone id # and to do that we need to know the type of the zone id field zonal_layer_fields = zonal_layer.fields() zone_id_field_variant, zone_id_field_type_name = [ (field.type(), field.typeName()) for field in zonal_layer_fields if field.name() == zone_id_in_zones_attr_name ][0] zone_id_field = QgsField(zone_id_in_zones_attr_name, zone_id_field_variant) zone_id_field.setTypeName(zone_id_field_type_name) assigned_attr_names_dict = \ ProcessLayer(loss_layer_plus_zones).add_attributes( [zone_id_field]) zone_id_in_losses_attr_name = assigned_attr_names_dict.values()[0] # get the index of the new attribute, to be used to update its values zone_id_attr_idx = loss_layer_plus_zones.fieldNameIndex( zone_id_in_losses_attr_name) # to show the overall progress, cycling through points tot_points = loss_layer_plus_zones.featureCount() msg = tr("Step 2 of 3: creating spatial index for loss points...") msg_bar_item, progress = create_progress_message_bar( iface.messageBar(), msg) # create spatial index with TraceTimeManager(tr("Creating spatial index for loss points..."), DEBUG): spatial_index = QgsSpatialIndex() for current_point, loss_feature in enumerate( loss_layer_plus_zones.getFeatures()): progress_perc = current_point / float(tot_points) * 100 progress.setValue(progress_perc) spatial_index.insertFeature(loss_feature) clear_progress_message_bar(iface.messageBar(), msg_bar_item) with edit(loss_layer_plus_zones): # to show the overall progress, cycling through zones tot_zones = zonal_layer.featureCount() msg = tr("Step 3 of 3: labeling points by zone id...") msg_bar_item, progress = create_progress_message_bar( iface.messageBar(), msg) for current_zone, zone_feature in enumerate(zonal_layer.getFeatures()): progress_perc = current_zone / float(tot_zones) * 100 progress.setValue(progress_perc) msg = "{0}% - Zone: {1} on {2}".format(progress_perc, zone_feature.id(), tot_zones) with TraceTimeManager(msg, DEBUG): zone_geometry = zone_feature.geometry() # Find ids of points within the bounding box of the zone point_ids = spatial_index.intersects( zone_geometry.boundingBox()) # check if the points inside the bounding box of the zone # are actually inside the zone's geometry for point_id in point_ids: msg = "Checking if point {0} is actually inside " \ "the zone".format(point_id) with TraceTimeManager(msg, DEBUG): # Get the point feature by the point's id request = QgsFeatureRequest().setFilterFid(point_id) point_feature = loss_layer_plus_zones.getFeatures( request).next() point_geometry = QgsGeometry(point_feature.geometry()) # check if the point is actually inside the zone # and it is not only contained by its bounding box if zone_geometry.contains(point_geometry): zone_id = zone_feature[zone_id_in_zones_attr_name] loss_layer_plus_zones.changeAttributeValue( point_id, zone_id_attr_idx, zone_id) # for consistency with the SAGA algorithm, remove points that don't # belong to any zone for point_feature in loss_layer_plus_zones.getFeatures(): if not point_feature[zone_id_in_losses_attr_name]: loss_layer_plus_zones.deleteFeature(point_feature.id()) clear_progress_message_bar(iface.messageBar(), msg_bar_item) return loss_layer_plus_zones, zone_id_in_losses_attr_name
def collect_zonal_data(self, probs_field_names, integrate_svi=False, zone_field_name=None): # build dictionary zone_id -> dmg_by_asset_probs zonal_dmg_by_asset_probs = defaultdict(list) zonal_asset_refs = defaultdict(list) try: first_feat = self.dmg_by_asset_features[0] except IndexError: return zonal_dmg_by_asset_probs, zonal_asset_refs probs_fields_idxs = sorted([ first_feat.fieldNameIndex(probs_field_names[i]) for i in range(len(probs_field_names))]) if integrate_svi and self.svi_layer is not None: # FIXME self.svi_field_name is temporarily ignored # svi_by_zone = dict() for zone_feat in self.svi_layer.getFeatures(): zone_id = str(zone_feat[zone_field_name]) # FIXME self.svi_field_name is temporarily ignored # svi_value = zone_feat[self.svi_field_name] # svi_by_zone[zone_id] = svi_value msg = 'Reading damage state probabilities...' msg_bar_item, progress = create_progress_message_bar( self.iface.messageBar(), msg) tot_features = len(self.dmg_by_asset_features) for feat_idx, dmg_by_asset_feat in enumerate( self.dmg_by_asset_features, start=1): zone_id = dmg_by_asset_feat[zone_field_name] # FIXME: hack to handle case in which the zone id is an integer # but it is stored as Real try: zone_id = str(int(zone_id)) except Exception: zone_id = str(zone_id) # FIXME: same hack as above asset_ref = dmg_by_asset_feat['asset_ref'] try: asset_ref = str(int(asset_ref)) except Exception: asset_ref = str(asset_ref) dmg_by_asset_probs = [dmg_by_asset_feat.attributes()[idx] for idx in probs_fields_idxs] zonal_dmg_by_asset_probs[zone_id].append(dmg_by_asset_probs) zonal_asset_refs[zone_id].append(asset_ref) progress_perc = feat_idx / float(tot_features) * 100 progress.setValue(progress_perc) clear_progress_message_bar(self.iface.messageBar(), msg_bar_item) else: # ignore svi msg = 'Reading damage state probabilities...' msg_bar_item, progress = create_progress_message_bar( self.iface.messageBar(), msg) tot_features = len(self.dmg_by_asset_features) for idx, dmg_by_asset_feat in enumerate( self.dmg_by_asset_features, start=1): dmg_by_asset_probs = [dmg_by_asset_feat.attributes()[idx] for idx in probs_fields_idxs] asset_ref = dmg_by_asset_feat['asset_ref'] zonal_dmg_by_asset_probs['ALL'].append(dmg_by_asset_probs) zonal_asset_refs['ALL'].append(asset_ref) progress_perc = idx / float(tot_features) * 100 progress.setValue(progress_perc) clear_progress_message_bar(self.iface.messageBar(), msg_bar_item) return zonal_dmg_by_asset_probs, zonal_asset_refs
def generate_simulation_recovery_curve( self, timeList, LossBasedDamageStateProbabilities, RecoveryBasedDamageStateProbabilities, inspectionTimes, recoveryTimes, repairTimes, assessmentTimes, mobilizationTimes, zone_id, asset_refs, zone_index, n_zones, simulation, n_simulations, seed=None): # Looping over all buildings in community # Initialize building level recovery function simulationRecoveryFunction = [ 0 for x in range(len(timeList))] msg = ('Calculating recovery curve for ' 'zone %s (%s/%s), simulation %s/%s' % (zone_id, zone_index, n_zones, simulation + 1, n_simulations)) msg_bar_item, progress = create_progress_message_bar( self.iface.messageBar(), msg) tot_bldgs = len(LossBasedDamageStateProbabilities) # TODO: use enumerate instead # TODO: perhaps iterate enumerating by asset_ref for bldg_idx in range(tot_bldgs): # Generate recovery function for current # building/simulation using the given damage state # probability distribution currentSimulationBuildingLevelDamageStateProbabilities = \ RecoveryBasedDamageStateProbabilities[bldg_idx] # call building class within Napa Data # PAOLO: building number is not used. Instead, we need to # make available to the building all the imported data napa_bldg = Building( self.iface, inspectionTimes, recoveryTimes, repairTimes, currentSimulationBuildingLevelDamageStateProbabilities, timeList, assessmentTimes, mobilizationTimes) approach = self.approach # approach can be aggregate or disaggregate building_level_recovery_function = \ napa_bldg.generateBldgLevelRecoveryFunction(approach, seed) if self.output_data_dir is not None and self.save_bldg_curves: output_by_building_dir = os.path.join( self.output_data_dir, 'by_building') if not os.path.exists(output_by_building_dir): os.makedirs(output_by_building_dir) asset_ref = asset_refs[bldg_idx] output_filename = os.path.join( output_by_building_dir, "zone_%s_bldg_%s.txt" % (zone_id, asset_ref)) with open(output_filename, 'w') as f: f.write(str(building_level_recovery_function)) # The following lines plot building level curves # fig = plt.figure() # plt.plot(timeList, building_level_recovery_function) # plt.xlabel('Time (days)') # plt.ylabel('Normalized recovery level') # plt.title('Building level recovery curve') # plt.ylim((0.0, 1.2)) # plt.show() # plt.close(fig) # Assign buidling level recovery function # TODO: use enumerate instead for timePoint in range(len(timeList)): simulationRecoveryFunction[timePoint] += \ building_level_recovery_function[timePoint] progress_perc = bldg_idx / float(tot_bldgs) * 100 progress.setValue(progress_perc) clear_progress_message_bar(self.iface.messageBar(), msg_bar_item) return simulationRecoveryFunction