def _aggregate_using_geometries(self, extra=True): loss_layer_path = os.path.join( self.data_dir_name, 'loss_points.shp') orig_loss_layer = QgsVectorLayer(loss_layer_path, 'Loss points', 'ogr') zonal_layer_path = os.path.join( self.data_dir_name, 'svi_zones.shp') orig_zonal_layer = QgsVectorLayer( zonal_layer_path, 'SVI zones', 'ogr') # avoid modifying the original layers copied_loss_layer = ProcessLayer(orig_loss_layer).duplicate_in_memory() copied_zonal_layer = ProcessLayer( orig_zonal_layer).duplicate_in_memory() zone_id_in_losses_attr_name = None zone_id_in_zones_attr_name = None res = calculate_zonal_stats(copied_loss_layer, copied_zonal_layer, self.loss_attr_names, self.loss_layer_is_vector, zone_id_in_losses_attr_name, zone_id_in_zones_attr_name, IFACE, extra=extra) (output_loss_layer, output_zonal_layer, output_loss_attrs_dict) = res _, output_loss_layer_shp_path = tempfile.mkstemp(suffix='.shp') _, output_zonal_layer_shp_path = tempfile.mkstemp(suffix='.shp') save_layer_as_shapefile(output_loss_layer, output_loss_layer_shp_path) save_layer_as_shapefile(output_zonal_layer, output_zonal_layer_shp_path) output_loss_layer = QgsVectorLayer( output_loss_layer_shp_path, 'Loss points plus zone ids', 'ogr') output_zonal_layer = QgsVectorLayer( output_zonal_layer_shp_path, 'Zonal layer', 'ogr') expected_loss_layer_path = os.path.join( self.data_dir_name, 'loss_points_added_zone_ids.shp') expected_loss_layer = QgsVectorLayer(expected_loss_layer_path, 'Loss points plus zone ids', 'ogr') if extra: # adding also count and avg expected_zonal_layer_path = os.path.join( self.data_dir_name, 'svi_zones_plus_loss_stats_zone_ids.shp') else: # sum only expected_zonal_layer_path = os.path.join( self.data_dir_name, 'svi_zones_plus_loss_stats_zone_ids_sum_only.shp') expected_zonal_layer = QgsVectorLayer( expected_zonal_layer_path, 'Expected zonal layer', 'ogr') self._check_output_layer(output_loss_layer, expected_loss_layer) self._check_output_layer(output_zonal_layer, expected_zonal_layer)
def test_aggregate_using_zone_id(self): loss_layer_path = os.path.join( self.data_dir_name, 'loss_points_having_zone_ids.shp') orig_loss_layer = QgsVectorLayer( loss_layer_path, 'Loss points having zone ids', 'ogr') zonal_layer_path = os.path.join( self.data_dir_name, 'svi_zones.shp') orig_zonal_layer = QgsVectorLayer( zonal_layer_path, 'SVI zones', 'ogr') # avoid modifying the original layers copied_loss_layer = ProcessLayer(orig_loss_layer).duplicate_in_memory() copied_zonal_layer = ProcessLayer( orig_zonal_layer).duplicate_in_memory() zone_id_in_zones_attr_name = 'ZONE_NAME' zone_id_in_losses_attr_name = 'ZONE_NAME' res = calculate_zonal_stats(copied_loss_layer, copied_zonal_layer, self.loss_attr_names, self.loss_layer_is_vector, zone_id_in_losses_attr_name, zone_id_in_zones_attr_name, IFACE) (output_loss_layer, output_zonal_layer, output_loss_attrs_dict) = res _, output_loss_layer_shp_path = tempfile.mkstemp(suffix='.shp') _, output_zonal_layer_shp_path = tempfile.mkstemp(suffix='.shp') save_layer_as_shapefile(output_loss_layer, output_loss_layer_shp_path) save_layer_as_shapefile(output_zonal_layer, output_zonal_layer_shp_path) output_loss_layer = QgsVectorLayer( output_loss_layer_shp_path, 'Loss points having zone ids', 'ogr') output_zonal_layer = QgsVectorLayer( output_zonal_layer_shp_path, 'Zonal layer', 'ogr') expected_zonal_layer_path = os.path.join( self.data_dir_name, 'svi_zones_plus_loss_stats_zone_names.shp') expected_zonal_layer = QgsVectorLayer( expected_zonal_layer_path, 'Expected zonal layer', 'ogr') self._check_output_layer(output_zonal_layer, expected_zonal_layer)
def import_loss_layer_from_csv(self, csv_file_path, dest_shp=None, delete_lon_lat=False): # FIXME: hardcoded field names longitude_field = 'LON' latitude_field = 'LAT' lines_to_skip_count = count_heading_commented_lines(csv_file_path) url = QUrl.fromLocalFile(csv_file_path) url.addQueryItem('type', 'csv') url.addQueryItem('xField', longitude_field) url.addQueryItem('yField', latitude_field) url.addQueryItem('spatialIndex', 'no') url.addQueryItem('subsetIndex', 'no') url.addQueryItem('watchFile', 'no') url.addQueryItem('delimiter', ',') url.addQueryItem('crs', 'epsg:4326') url.addQueryItem('skipLines', str(lines_to_skip_count)) url.addQueryItem('trimFields', 'yes') layer_uri = str(url.toEncoded()) csv_layer = QgsVectorLayer(layer_uri, 'Loss', "delimitedtext") dest_filename = dest_shp or QFileDialog.getSaveFileName( self, 'Save loss shapefile as...', os.path.expanduser("~"), 'Shapefiles (*.shp)') if dest_filename: if dest_filename[-4:] != ".shp": dest_filename += ".shp" else: return result = save_layer_as_shapefile(csv_layer, dest_filename) if result != QgsVectorFileWriter.NoError: raise RuntimeError('Could not save shapefile') shp_layer = QgsVectorLayer( dest_filename, 'Loss data', 'ogr') if delete_lon_lat: ProcessLayer(shp_layer).delete_attributes( [longitude_field, latitude_field]), if shp_layer.isValid(): QgsMapLayerRegistry.instance().addMapLayer(shp_layer) else: msg = 'Invalid loss map' log_msg(msg, level='C', message_bar=self.iface.messageBar()) return None return shp_layer
def work(self): # To upload the layer to the platform, we need it to be a shapefile. # So we need to check if the active layer is stored as a shapefile and, # if it isn't, save it as a shapefile data_file = '%s%s' % (self.file_stem, '.shp') projection = self.current_layer.crs().geographicCRSAuthId() if (self.current_layer.storageType() == 'ESRI Shapefile' and projection == 'EPSG:4326'): # copy the shapefile (with all its files) into the temporary # directory, using self.file_stem as name self.set_message.emit(tr('Preparing the files to be uploaded...')) layer_source = self.current_layer.publicSource() layer_source_stem = layer_source[:-4] # remove '.shp' for ext in ['shp', 'dbf', 'shx', 'prj']: src = "%s.%s" % (layer_source_stem, ext) dst = "%s.%s" % (self.file_stem, ext) shutil.copyfile(src, dst) else: # if it's not a shapefile or it is in a bad projection, # we need to build a shapefile from it self.set_message.emit( tr('Writing the shapefile to be uploaded...')) result = save_layer_as_shapefile( self.current_layer, data_file, crs=QgsCoordinateReferenceSystem( 4326, QgsCoordinateReferenceSystem.EpsgCrsId)) if result != QgsVectorFileWriter.NoError: raise RuntimeError('Could not save shapefile') file_size_mb = os.path.getsize(data_file) file_size_mb += os.path.getsize(self.file_stem + '.shx') file_size_mb += os.path.getsize(self.file_stem + '.dbf') # convert bytes to MB file_size_mb = file_size_mb / 1024 / 1024 self.upload_size_msg = tr('Uploading ~%s MB...' % file_size_mb) self.set_message.emit(self.upload_size_msg) permissions = { "authenticated": "_none", "anonymous": "_none", "users": [[self.username, "layer_readwrite"], [self.username, "layer_admin"]] } data = { 'layer_title': os.path.basename(self.file_stem), 'base_file': open('%s.shp' % self.file_stem, 'rb'), 'dbf_file': open('%s.dbf' % self.file_stem, 'rb'), 'shx_file': open('%s.shx' % self.file_stem, 'rb'), 'prj_file': open('%s.prj' % self.file_stem, 'rb'), 'xml_file': open('%s.xml' % self.file_stem, 'rb'), 'charset': 'UTF-8', 'permissions': json.dumps(permissions) } # generate headers and data-generator an a requests-compatible format # and provide our progress-callback data_generator, headers = multipart_encode_for_requests( data, cb=self.progress_cb) # use the requests-lib to issue a post-request with out data attached r = self.session.post(self.hostname + '/layers/upload', data=data_generator, headers=headers) try: response = json.loads(r.text) return self.hostname + response['url'], True except KeyError: if 'errors' in response: raise KeyError(response['errors']) else: raise KeyError("The server did not provide error messages") except ValueError: raise RuntimeError(r.text)
def write_output(res_layer, data_dir_name, res_layer_name): res_layer_path = os.path.join(data_dir_name, res_layer_name + '.shp') write_success = save_layer_as_shapefile(res_layer, res_layer_path) if write_success != QgsVectorFileWriter.NoError: raise RuntimeError('Could not save shapefile')
def test_calculate_svi(self): proj_def = deepcopy(self.project_definition) svi_node = proj_def['children'][1] added_attrs_ids, discarded_feats, edited_node, any_change = \ calculate_composite_variable(IFACE, self.layer, svi_node) proj_def['children'][1] = edited_node self.assertEqual(added_attrs_ids, set([9, 10, 11])) expected_discarded_feats = set([ DiscardedFeature(1, 'Missing value'), DiscardedFeature(2, 'Missing value') ]) self.assertEqual(discarded_feats, expected_discarded_feats) self.assertEqual(any_change, True) self.assertEqual(proj_def, proj_def_svi_calc_first_round) # # to rebuild the outputs # res_layer_name = 'svi_calculation_first_round' # write_output(self.layer, self.data_dir_name, res_layer_name) _, out_layer_shp_path = tempfile.mkstemp(suffix='.shp') save_layer_as_shapefile(self.layer, out_layer_shp_path) out_layer = QgsVectorLayer(out_layer_shp_path, 'svi_calculation_first_round', 'ogr') expected_layer_path = os.path.join(self.data_dir_name, 'svi_calculation_first_round.shp') expected_layer = QgsVectorLayer(expected_layer_path, 'svi_calculation_first_round', 'ogr') res = ProcessLayer(out_layer).has_same_content_as(expected_layer) if not res: ProcessLayer(self.layer).pprint(usage='testing') ProcessLayer(expected_layer).pprint(usage='testing') self.assertEqual(res, True) # If the attributes have already been added to the layer, they should # be re-used instead of adding new ones # therefore the project definition should be the same as it was after # the first round svi_node = proj_def['children'][1] added_attrs_ids, discarded_feats, edited_node, any_change = \ calculate_composite_variable(IFACE, self.layer, svi_node) proj_def['children'][1] = edited_node self.assertEqual(added_attrs_ids, set([])) expected_discarded_feats = set([ DiscardedFeature(1, 'Missing value'), DiscardedFeature(2, 'Missing value') ]) self.assertEqual(discarded_feats, expected_discarded_feats) self.assertEqual(any_change, True) self.assertEqual(proj_def, proj_def_svi_calc_first_round) _, out_layer_shp_path = tempfile.mkstemp(suffix='.shp') save_layer_as_shapefile(self.layer, out_layer_shp_path) out_layer = QgsVectorLayer(out_layer_shp_path, 'svi_calculation_second_round', 'ogr') res = ProcessLayer(out_layer).has_same_content_as(expected_layer) if not res: ProcessLayer(self.layer).pprint(usage='testing') ProcessLayer(expected_layer).pprint(usage='testing') self.assertEqual(res, True)