Example #1
0
 def test__reproduce__not_null_constraint_failed(self):
     geom_wkt = 'POLYGON((1.9952502916543926 43.079137301616434,1.8083614437225106 43.110281163194,1.6280391132319614 43.13999933989308,1.4543047771860391 43.168322409488,1.287178802518546 43.19527992537942,1.126680477150093 43.22090040126175,0.9728280404789855 43.24521129666272,0.8256387132257121 43.26823900332679,0.6851287265540695 43.2900088324148,0.5513133503959514 43.31054500249216,0.4177032107533156 43.3308546554019,0.4177032107533156 43.3308546554019,0.31209072545607186 42.966172534807384,0.2072770834059167 42.60138984322352,0.10324224766647609 42.23651548835664,-3.327518831779395e-05 41.87155835974214,-0.10256845388361147 41.50652732885059,-0.20438175048840848 41.14143124914533,-0.305491137731497 40.776278956093606,-0.4059141156224018 40.4110792671334,-0.5056677274094622 40.045840981598744,-0.6188735003262834 39.62838980058282,-0.6188735003262834 39.62838980058282,-0.4938090620192412 39.60834071737128,-0.36846516623385345 39.58812662484392,-0.2367679070115216 39.566753658618175,-0.09872965092928164 39.54419980869909,0.045636463325510676 39.520442055142105,0.19631650129236156 39.49545637806485,0.35329570832956364 39.469217768317954,0.516558484513175 39.441700238839005,0.686088358898322 39.412876836714965,0.8618679632011015 39.382719655976956,0.8618679632011015 39.382719655976956,0.985334472893415 39.799577386800905,1.0941941665822539 40.164279112775866,1.2038450353475123 40.52892574316672,1.3143064728956748 40.89350812553239,1.425598388091744 41.25801708090206,1.5377412226553768 41.622443403572326,1.6507559695776892 41.98677786085107,1.764664192292795 42.351011192745254,1.8794880446399878 42.71513411159017,1.9952502916543926 43.079137301616434))'
     with self.assertRaises(django.db.utils.IntegrityError):
         gg, created = GeographicLocation.objects.get_or_create(
             geometry=WKTReader().read(geom_wkt))
     geom_wkt = 'POLYGON((1.995 43.079,1.808 43.1102,1.628 43.139,1.454 43.168,1.287 43.195,1.126 43.220,0.972 43.245,0.825 43.268,0.685 43.290,0.551 43.310,0.417 43.330,0.417 43.330,0.312 42.966,0.207 42.601,0.103 42.236,0.0 41.871,-0.102 41.506,-0.204 41.141,-0.305 40.776,-0.405 40.411,-0.505 40.045,-0.618 39.628,-0.618 39.628,-0.493 39.608,-0.368 39.588,-0.236 39.566,-0.098 39.544,0.0456 39.520,0.196 39.495,0.353 39.469,0.516 39.441,0.686 39.412,0.861 39.382,0.861 39.382,0.985 39.799,1.094 40.164,1.203 40.528,1.314 40.893,1.425 41.258,1.537 41.622,1.650 41.986,1.764 42.351,1.879 42.715,1.995 43.079))'
     gg, created = GeographicLocation.objects.get_or_create(
         geometry=WKTReader().read(geom_wkt))
     self.assertTrue(created)
     gg, created = GeographicLocation.objects.get_or_create(
         geometry=WKTReader().read(geom_wkt))
     self.assertFalse(created)
Example #2
0
def add_maine_data():
    sf = shapefile.Reader("/net/home/cv/iatkin/me/metwp24p.shp")
    wktr = WKTReader()

    for me_index in range(0, 8422):
        m = MaineData()
        m.name = sf.record(me_index)[0]
        m.county = sf.record(me_index)[4]
        m.data_type = sf.record(me_index)[12]

        args = []
        shape = sf.shape(me_index)

        for point_index in range(0, len(shape.points)):
            if point_index in shape.parts:
                args.append([])
            point = shape.points[point_index]
            point = srs_to_latlng(point[0], point[1], 26919)
            args[-1].append([point[1], point[0]])

        try:
            polygon = Polygon(*args)
        except:
            print me_index
            continue

        m.geometry = polygon
        m.save()

        if me_index % 1000 == 0:
            print "%04d/8422" % (me_index)
Example #3
0
    def test01_wktreader(self):
        # Creating a WKTReader instance
        wkt_r = WKTReader()
        wkt = "POINT (5 23)"

        # read() should return a GEOSGeometry
        ref = GEOSGeometry(wkt)
        g1 = wkt_r.read(wkt)
        g2 = wkt_r.read(six.text_type(wkt))

        for geom in (g1, g2):
            self.assertEqual(ref, geom)

        # Should only accept six.string_types objects.
        self.assertRaises(TypeError, wkt_r.read, 1)
        self.assertRaises(TypeError, wkt_r.read, buffer("foo"))
Example #4
0
    def test01_wktreader(self):
        # Creating a WKTReader instance
        wkt_r = WKTReader()
        wkt = 'POINT (5 23)'

        # read() should return a GEOSGeometry
        ref = GEOSGeometry(wkt)
        g1 = wkt_r.read(wkt)
        g2 = wkt_r.read(unicode(wkt))

        for geom in (g1, g2):
            self.assertEqual(ref, geom)

        # Should only accept basestring objects.
        self.assertRaises(TypeError, wkt_r.read, 1)
        self.assertRaises(TypeError, wkt_r.read, buffer('foo'))
Example #5
0
    def test01_wktreader(self):
        # Creating a WKTReader instance
        wkt_r = WKTReader()
        wkt = 'POINT (5 23)'

        # read() should return a GEOSGeometry
        ref = GEOSGeometry(wkt)
        g1 = wkt_r.read(wkt.encode())
        g2 = wkt_r.read(wkt)

        for geom in (g1, g2):
            self.assertEqual(ref, geom)

        # Should only accept six.string_types objects.
        self.assertRaises(TypeError, wkt_r.read, 1)
        self.assertRaises(TypeError, wkt_r.read, memoryview(b'foo'))
Example #6
0
    def test01_wktreader(self):
        # Creating a WKTReader instance
        wkt_r = WKTReader()
        wkt = 'POINT (5 23)'

        # read() should return a GEOSGeometry
        ref = GEOSGeometry(wkt)
        g1 = wkt_r.read(wkt.encode())
        g2 = wkt_r.read(wkt)

        for geom in (g1, g2):
            self.assertEqual(ref, geom)

        # Should only accept six.string_types objects.
        self.assertRaises(TypeError, wkt_r.read, 1)
        self.assertRaises(TypeError, wkt_r.read, memoryview(b'foo'))
Example #7
0
    def test01_wktreader(self):
        # Creating a WKTReader instance
        wkt_r = WKTReader()
        wkt = 'POINT (5 23)'

        # read() should return a GEOSGeometry
        ref = GEOSGeometry(wkt)
        g1 = wkt_r.read(wkt)
        g2 = wkt_r.read(unicode(wkt))

        for geom in (g1, g2):
            self.assertEqual(ref, geom)

        # Should only accept basestring objects.
        self.assertRaises(TypeError, wkt_r.read, 1)
        self.assertRaises(TypeError, wkt_r.read, buffer('foo'))
Example #8
0
    def test02_wktwriter(self):
        # Creating a WKTWriter instance, testing its ptr property.
        wkt_w = WKTWriter()
        self.assertRaises(TypeError, wkt_w._set_ptr, WKTReader.ptr_type())

        ref = GEOSGeometry('POINT (5 23)')
        ref_wkt = 'POINT (5.0000000000000000 23.0000000000000000)'
        self.assertEqual(ref_wkt, wkt_w.write(ref).decode())
Example #9
0
File: misc.py Project: iatkin/place
def get_uncovered_area(name, steps=3):
    wktr = WKTReader()
    
    items = BoundedItem.objects.filter(LayerDisplayName__contains=name).distinct("bounds")
    places = get_item_places(name, steps=steps, return_places=True)

    p = wktr.read("GEOMETRYCOLLECTION EMPTY")

    for item in items:
        p = p.union(item.bounds)

    for place in places:
        if "County" in place.split(",")[0]:
            continue
        p = p.difference(places[place])

    return (p, places)
Example #10
0
    def test02_wktwriter(self):
        # Creating a WKTWriter instance, testing its ptr property.
        wkt_w = WKTWriter()
        self.assertRaises(TypeError, wkt_w._set_ptr, WKTReader.ptr_type())

        ref = GEOSGeometry('POINT (5 23)')
        ref_wkt = 'POINT (5.0000000000000000 23.0000000000000000)'
        self.assertEqual(ref_wkt, wkt_w.write(ref).decode())
Example #11
0
    def test02_wktwriter(self):
        # Creating a WKTWriter instance, testing its ptr property.
        wkt_w = WKTWriter()
        with self.assertRaises(TypeError):
            wkt_w.ptr = WKTReader.ptr_type()

        ref = GEOSGeometry("POINT (5 23)")
        ref_wkt = "POINT (5.0000000000000000 23.0000000000000000)"
        self.assertEqual(ref_wkt, wkt_w.write(ref).decode())
Example #12
0
    def test01_wktreader(self):
        # Creating a WKTReader instance
        wkt_r = WKTReader()
        wkt = "POINT (5 23)"

        # read() should return a GEOSGeometry
        ref = GEOSGeometry(wkt)
        g1 = wkt_r.read(wkt.encode())
        g2 = wkt_r.read(wkt)

        for geom in (g1, g2):
            self.assertEqual(ref, geom)

        # Should only accept string objects.
        with self.assertRaises(TypeError):
            wkt_r.read(1)
        with self.assertRaises(TypeError):
            wkt_r.read(memoryview(b"foo"))
    def process(self, uri, *args, **kwargs):
        """ Create data products
        """
        ds, created = self.get_or_create(uri, *args, **kwargs)
        fn = nansat_filename(uri)
        swath_data = {}
        # Read subswaths 
        for i in range(self.N_SUBSWATHS):
            swath_data[i] = Doppler(fn, subswath=i)

        # Get module name
        mm = self.__module__.split('.')
        module = '%s.%s' % (mm[0], mm[1])
        # Set media path (where images will be stored)
        mp = media_path(module, swath_data[i].filename)
        # Set product path (where netcdf products will be stored)
        ppath = product_path(module, swath_data[i].filename)

        # Loop subswaths, process each of them and create figures for display with leaflet
        for i in range(self.N_SUBSWATHS):
            is_corrupted = False
            # Check if the file is corrupted
            try:
                inci = swath_data[i]['incidence_angle']
            #  TODO: What kind of exception ?
            except:
                is_corrupted = True
                continue

            # Add Doppler anomaly
            swath_data[i].add_band(array=swath_data[i].anomaly(), parameters={
                'wkv':
                'anomaly_of_surface_backwards_doppler_centroid_frequency_shift_of_radar_wave'
            })

            # Get band number of DC freq, then DC polarisation
            band_number = swath_data[i]._get_band_number({
                'standard_name': 'surface_backwards_doppler_centroid_frequency_shift_of_radar_wave',
                })
            pol = swath_data[i].get_metadata(bandID=band_number, key='polarization')

            # Calculate total geophysical Doppler shift
            fdg = swath_data[i].geophysical_doppler_shift()
            swath_data[i].add_band(
                array=fdg,
                parameters={
                    'wkv': 'surface_backwards_doppler_frequency_shift_of_radar_wave_due_to_surface_velocity'
                })

            # Set filename of exported netcdf
            fn = os.path.join(ppath,
                              os.path.basename(swath_data[i].filename).split('.')[0]
                              + 'subswath%d.nc' % i)
            # Set filename of original gsar file in metadata
            swath_data[i].set_metadata(key='Originating file',
                                        value=swath_data[i].filename)
            # Export data to netcdf
            print('Exporting %s (subswath %d)' % (swath_data[i].filename, i))
            swath_data[i].export(filename=fn)

            # Add netcdf uri to DatasetURIs
            ncuri = os.path.join('file://localhost', fn)
            new_uri, created = DatasetURI.objects.get_or_create(uri=ncuri,
                                                                dataset=ds)

            # Reproject to leaflet projection
            xlon, xlat = swath_data[i].get_corners()
            d = Domain(NSR(3857),
                       '-lle %f %f %f %f -tr 1000 1000'
                       % (xlon.min(), xlat.min(), xlon.max(), xlat.max()))
            swath_data[i].reproject(d, eResampleAlg=1, tps=True)

            # Check if the reprojection failed
            try:
                inci = swath_data[i]['incidence_angle']
            except:
                is_corrupted = True
                warnings.warn('Could not read incidence angles - reprojection failed')
                continue

            # Create visualizations of the following bands (short_names)
            ingest_creates = ['valid_doppler',
                              'valid_land_doppler',
                              'valid_sea_doppler',
                              'dca',
                              'fdg']
            for band in ingest_creates:
                filename = '%s_subswath_%d.png' % (band, i)
                # check uniqueness of parameter
                param = Parameter.objects.get(short_name=band)
                fig = swath_data[i].write_figure(
                    os.path.join(mp, filename),
                    bands=band,
                    mask_array=swath_data[i]['swathmask'],
                    mask_lut={0: [128, 128, 128]},
                    transparency=[128, 128, 128])

                if type(fig) == Figure:
                    print 'Created figure of subswath %d, band %s' % (i, band)
                else:
                    warnings.warn('Figure NOT CREATED')

                # Get or create DatasetParameter
                dsp, created = DatasetParameter.objects.get_or_create(dataset=ds,
                                                                      parameter=param)

                # Create GeographicLocation for the visualization object
                geom, created = GeographicLocation.objects.get_or_create(
                        geometry=WKTReader().read(swath_data[i].get_border_wkt()))

                # Create Visualization
                vv, created = Visualization.objects.get_or_create(
                    uri='file://localhost%s/%s' % (mp, filename),
                    title='%s (swath %d)' % (param.standard_name, i + 1),
                    geographic_location=geom
                )

                # Create VisualizationParameter
                vp, created = VisualizationParameter.objects.get_or_create(
                    visualization=vv,
                    ds_parameter=dsp
                )

        # TODO: consider merged figures like Jeong-Won has added in the development branch

        return ds, not is_corrupted
    def get_or_create(self, uri, *args, **kwargs):
        """ Ingest gsar file to geo-spaas db
        """

        ds, created = super(DatasetManager, self).get_or_create(uri, *args, **kwargs)

        # TODO: Check if the following is necessary
        if not type(ds) == Dataset:
            return ds, False

        # set Dataset entry_title
        ds.entry_title = 'SAR Doppler'
        ds.save()

        fn = nansat_filename(uri)
        n = Nansat(fn, subswath=0)
        gg = WKTReader().read(n.get_border_wkt())

        if ds.geographic_location.geometry.area>gg.area:
            return ds, False

        # Update dataset border geometry
        # This must be done every time a Doppler file is processed. It is time
        # consuming but apparently the only way to do it. Could be checked
        # though...

        swath_data = {}
        lon = {}
        lat = {}
        astep = {}
        rstep = {}
        az_left_lon = {}
        ra_upper_lon = {}
        az_right_lon = {}
        ra_lower_lon = {}
        az_left_lat = {}
        ra_upper_lat = {}
        az_right_lat = {}
        ra_lower_lat = {}
        num_border_points = 10
        border = 'POLYGON(('

        for i in range(self.N_SUBSWATHS):
            # Read subswaths 
            swath_data[i] = Nansat(fn, subswath=i)

            # Should use nansat.domain.get_border - see nansat issue #166
            # (https://github.com/nansencenter/nansat/issues/166)
            lon[i], lat[i] = swath_data[i].get_geolocation_grids()

            astep[i] = max(1, (lon[i].shape[0] / 2 * 2 - 1) / num_border_points)
            rstep[i] = max(1, (lon[i].shape[1] / 2 * 2 - 1) / num_border_points)

            az_left_lon[i] = lon[i][0:-1:astep[i], 0]
            az_left_lat[i] = lat[i][0:-1:astep[i], 0]

            az_right_lon[i] = lon[i][0:-1:astep[i], -1]
            az_right_lat[i] = lat[i][0:-1:astep[i], -1]

            ra_upper_lon[i] = lon[i][-1, 0:-1:rstep[i]]
            ra_upper_lat[i] = lat[i][-1, 0:-1:rstep[i]]

            ra_lower_lon[i] = lon[i][0, 0:-1:rstep[i]]
            ra_lower_lat[i] = lat[i][0, 0:-1:rstep[i]]

        lons = np.concatenate((az_left_lon[0],  ra_upper_lon[0],
                               ra_upper_lon[1], ra_upper_lon[2],
                               ra_upper_lon[3], ra_upper_lon[4],
                               np.flipud(az_right_lon[4]), np.flipud(ra_lower_lon[4]),
                               np.flipud(ra_lower_lon[3]), np.flipud(ra_lower_lon[2]),
                               np.flipud(ra_lower_lon[1]), np.flipud(ra_lower_lon[0])))

        # apply 180 degree correction to longitude - code copied from
        # get_border_wkt...
        # TODO: simplify using np.mod?
        for ilon, llo in enumerate(lons):
            lons[ilon] = copysign(acos(cos(llo * pi / 180.)) / pi * 180,
                                  sin(llo * pi / 180.))

        lats = np.concatenate((az_left_lat[0], ra_upper_lat[0],
                               ra_upper_lat[1], ra_upper_lat[2],
                               ra_upper_lat[3], ra_upper_lat[4],
                               np.flipud(az_right_lat[4]), np.flipud(ra_lower_lat[4]),
                               np.flipud(ra_lower_lat[3]), np.flipud(ra_lower_lat[2]),
                               np.flipud(ra_lower_lat[1]), np.flipud(ra_lower_lat[0])))

        poly_border = ','.join(str(llo) + ' ' + str(lla) for llo, lla in zip(lons, lats))
        wkt = 'POLYGON((%s))' % poly_border
        new_geometry = WKTReader().read(wkt)

        # Get geolocation of dataset - this must be updated
        geoloc = ds.geographic_location
        # Check geometry, return if it is the same as the stored one
        created = False
        if geoloc.geometry != new_geometry:
            # Change the dataset geolocation to cover all subswaths
            geoloc.geometry = new_geometry
            geoloc.save()
            created = True
        
        return ds, created
Example #15
0
    def get_or_create(self,
                      uri,
                      n_points=10,
                      uri_filter_args=None,
                      uri_service_name=FILE_SERVICE_NAME,
                      uri_service_type=LOCAL_FILE_SERVICE,
                      *args,
                      **kwargs):
        """ Create dataset and corresponding metadata

        Parameters:
        ----------
            uri : str
                  URI to file or stream openable by Nansat
            n_points : int
                  Number of border points (default is 10)
            uri_filter_args : dict
                Extra DatasetURI filter arguments if several datasets can refer to the same URI
            uri_service_name : str
                name of the service which is used  ('dapService', 'fileService', 'http' or 'wms')
            uri_service_type : str
                type of the service which is used  ('OPENDAP', 'local', 'HTTPServer' or 'WMS')

        Returns:
        -------
            dataset and flag
        """
        if not uri_filter_args:
            uri_filter_args = {}

        # Validate uri - this should raise an exception if the uri doesn't point to a valid
        # file or stream
        validate_uri(uri)

        # Several datasets can refer to the same uri (e.g., scatterometers and svp drifters), so we
        # need to pass uri_filter_args
        uris = DatasetURI.objects.filter(uri=uri, **uri_filter_args)
        if len(uris) > 0:
            return uris[0].dataset, False

        # Open file with Nansat
        n = Nansat(nansat_filename(uri), **kwargs)

        # get metadata from Nansat and get objects from vocabularies
        n_metadata = n.get_metadata()

        entry_id = n_metadata.get('entry_id', None)
        # set compulsory metadata (source)
        platform, _ = Platform.objects.get_or_create(
            json.loads(n_metadata['platform']))
        instrument, _ = Instrument.objects.get_or_create(
            json.loads(n_metadata['instrument']))
        specs = n_metadata.get('specs', '')
        source, _ = Source.objects.get_or_create(platform=platform,
                                                 instrument=instrument,
                                                 specs=specs)

        default_char_fields = {
            # Adding NERSC_ in front of the id violates the string representation of the uuid
            #'entry_id': lambda: 'NERSC_' + str(uuid.uuid4()),
            'entry_id': lambda: str(uuid.uuid4()),
            'entry_title': lambda: 'NONE',
            'summary': lambda: 'NONE',
        }

        # set optional CharField metadata from Nansat or from default_char_fields
        options = {}
        try:
            existing_ds = Dataset.objects.get(entry_id=entry_id)
        except Dataset.DoesNotExist:
            existing_ds = None
        for name in default_char_fields:
            if name not in n_metadata:
                warnings.warn('%s is not provided in Nansat metadata!' % name)
                # prevent overwriting of existing values by defaults
                if existing_ds:
                    options[name] = existing_ds.__getattribute__(name)
                else:
                    options[name] = default_char_fields[name]()
            else:
                options[name] = n_metadata[name]

        default_foreign_keys = {
            'gcmd_location': {
                'model': Location,
                'value': pti.get_gcmd_location('SEA SURFACE')
            },
            'data_center': {
                'model': DataCenter,
                'value': pti.get_gcmd_provider('NERSC')
            },
            'ISO_topic_category': {
                'model': ISOTopicCategory,
                'value': pti.get_iso19115_topic_category('Oceans')
            },
        }

        # set optional ForeignKey metadata from Nansat or from default_foreign_keys
        for name in default_foreign_keys:
            value = default_foreign_keys[name]['value']
            model = default_foreign_keys[name]['model']
            if name not in n_metadata:
                warnings.warn('%s is not provided in Nansat metadata!' % name)
            else:
                try:
                    value = json.loads(n_metadata[name])
                except:
                    warnings.warn(
                        '%s value of %s  metadata provided in Nansat is wrong!'
                        % (n_metadata[name], name))
            if existing_ds:
                options[name] = existing_ds.__getattribute__(name)
            else:
                options[name], _ = model.objects.get_or_create(value)

        # Find coverage to set number of points in the geolocation
        if len(n.vrt.dataset.GetGCPs()) > 0:
            n.reproject_gcps()
        geolocation = GeographicLocation.objects.get_or_create(
            geometry=WKTReader().read(n.get_border_wkt(nPoints=n_points)))[0]

        # create dataset
        # - the get_or_create method should use get_or_create here as well,
        #   or its name should be changed - see issue #127
        ds, created = Dataset.objects.update_or_create(
            entry_id=options['entry_id'],
            defaults={
                'time_coverage_start': n.get_metadata('time_coverage_start'),
                'time_coverage_end': n.get_metadata('time_coverage_end'),
                'source': source,
                'geographic_location': geolocation,
                'gcmd_location': options["gcmd_location"],
                'ISO_topic_category': options["ISO_topic_category"],
                "data_center": options["data_center"],
                'entry_title': options["entry_title"],
                'summary': options["summary"]
            })

        # create parameter
        all_band_meta = n.bands()
        for band_id in range(1, len(all_band_meta) + 1):
            band_meta = all_band_meta[band_id]
            standard_name = band_meta.get('standard_name', None)
            short_name = band_meta.get('short_name', None)
            units = band_meta.get('units', None)
            if standard_name in ['latitude', 'longitude', None]:
                continue
            params = Parameter.objects.filter(standard_name=standard_name)
            if params.count() > 1 and short_name is not None:
                params = params.filter(short_name=short_name)
            if params.count() > 1 and units is not None:
                params = params.filter(units=units)
            if params.count() >= 1:
                ds.parameters.add(params[0])

        # create dataset URI
        DatasetURI.objects.get_or_create(name=uri_service_name,
                                         service=uri_service_type,
                                         uri=uri,
                                         dataset=ds)

        return ds, created
    def get_or_create(self, uri, reprocess=False, *args, **kwargs):
        # ingest file to db
        ds, created = super(DatasetManager,
                            self).get_or_create(uri, *args, **kwargs)

        fn = nansat_filename(uri)

        n = Nansat(fn)

        # Reproject to leaflet projection
        xlon, xlat = n.get_corners()
        d = Domain(
            NSR(3857), '-lle %f %f %f %f -tr 1000 1000' %
            (xlon.min(), xlat.min(), xlon.max(), xlat.max()))
        n.reproject(d)

        # Get band numbers of required bands according to standard names
        speedBandNum = n._get_band_number({'standard_name': 'wind_speed'})
        dirBandNum = n._get_band_number(
            {'standard_name': 'wind_from_direction'})

        # Get numpy arrays of the bands
        speed = n[speedBandNum]
        dir = n[dirBandNum]

        ## It probably wont work with nansatmap...
        #nmap = Nansatmap(n, resolution='l')
        #nmap.pcolormesh(speed, vmax=18)
        #nmap.quiver(-speed*np.sin(dir), speed*np.cos(dir), step=10, scale=300,
        #        width=0.002)

        # Set paths - this code should be inherited but I think there is an
        # issue in generalising the first line that defines the current module
        mm = self.__module__.split('.')
        module = '%s.%s' % (mm[0], mm[1])
        mp = media_path(module, n.fileName)
        ppath = product_path(module, n.fileName)

        filename = os.path.basename(n.fileName).split('.')[0] + '.' + \
                os.path.basename(n.fileName).split('.')[1] + '.png'

        # check uniqueness of parameter
        param1 = Parameter.objects.get(standard_name=n.get_metadata(
            bandID=speedBandNum, key='standard_name'))
        param2 = Parameter.objects.get(standard_name=n.get_metadata(
            bandID=dirBandNum, key='standard_name'))

        n.write_figure(os.path.join(mp, filename),
                       bands=speedBandNum,
                       mask_array=n['swathmask'],
                       mask_lut={0: [128, 128, 128]},
                       transparency=[128, 128, 128])

        # Get DatasetParameter
        dsp1, created = DatasetParameter.objects.get_or_create(
            dataset=ds, parameter=param1)

        # Create Visualization
        geom, created = GeographicLocation.objects.get_or_create(
            geometry=WKTReader().read(n.get_border_wkt()))
        vv, created = Visualization.objects.get_or_create(
            uri='file://localhost%s/%s' % (mp, filename),
            title='%s' % (param1.standard_name),
            geographic_location=geom)

        # Create VisualizationParameter
        vp, created = VisualizationParameter.objects.get_or_create(
            visualization=vv, ds_parameter=dsp1)

        return ds, True
Example #17
0
def add_vermont():
    vt_names = []
    vt_dict = {}
    with open("/tmp/vt_names") as f:
        vt_names = [l.strip() for l in f.readlines()]

    for name in vt_names:
        vt_name = name
        gnis_name = None
        municipality_name = None

        if " - " in name:
            (vt_name, gnis_name, municipality_name) = name.split(" - ")

        if gnis_name:
            gnis = GNIS.objects.get(feature_class="Civil",
                                    state_alpha="VT",
                                    feature_name=gnis_name)
        else:
            gnis = GNIS.objects.filter(
                feature_class="Civil",
                state_alpha="VT",
                feature_name__endswith="of %s" %
                (vt_name)).exclude(feature_name__startswith="Village of")[0]

        vt_dict[vt_name] = {
            "vt_name": vt_name,
            "gnis_name": gnis_name or vt_name,
            "municipality_name": municipality_name or vt_name,
            "gnis": gnis
        }

    wktr = WKTReader()
    sf = shapefile.Reader("/tmp/Boundary_BNDHASH_region_towns.shp")

    for i in range(0, 255):
        shape = sf.shape(i)
        record = sf.record(i)

        wkt = "POLYGON (("
        for p in shape.points:
            p = srs_to_latlng(p[0], p[1], 2852)
            wkt += "%f %f," % (p[1], p[0])
        wkt = wkt[:-1]
        wkt += "))"

        try:
            polygon = wktr.read(wkt)
        except:
            p = srs_to_latlng(shape.points[0][0], shape.points[0][1], 2852)
            wkt = wkt[:-2]
            wkt += ",%f %f))" % (p[1], p[0])
            polygon = wktr.read(wkt)

        dict_entry = vt_dict[record[6]]
        gnis = dict_entry["gnis"]

        m = Municipality()
        m.name = dict_entry["municipality_name"]
        m.geometry = polygon
        m.state_fips = gnis.state_numeric
        m.county_fips = gnis.county_numeric
        m.gnis = gnis.feature_id
        m.save()
        print "Saved %s" % (m)
Example #18
0
    def get_or_create(self, uri, *args, **kwargs):
        """ Ingest gsar file to geo-spaas db
        """

        ds, created = super(DatasetManager,
                            self).get_or_create(uri, *args, **kwargs)
        connection.close()

        # TODO: Check if the following is necessary
        if not type(ds) == Dataset:
            return ds, False

        fn = nansat_filename(uri)
        n = Nansat(fn, subswath=0)

        # set Dataset entry_title
        ds.entry_title = n.get_metadata('title')
        ds.save()

        if created:
            from sar_doppler.models import SARDopplerExtraMetadata
            # Store the polarization and associate the dataset
            extra, _ = SARDopplerExtraMetadata.objects.get_or_create(
                dataset=ds, polarization=n.get_metadata('polarization'))
            if not _:
                raise ValueError(
                    'Created new dataset but could not create instance of ExtraMetadata'
                )
            ds.sardopplerextrametadata_set.add(extra)
            connection.close()

        gg = WKTReader().read(n.get_border_wkt())

        #lon, lat = n.get_border()
        #ind_near_range = 0
        #ind_far_range = int(lon.size/4)
        #import pyproj
        #geod = pyproj.Geod(ellps='WGS84')
        #angle1,angle2,img_width = geod.inv(lon[ind_near_range], lat[ind_near_range],
        #                                    lon[ind_far_range], lat[ind_far_range])

        # If the area of the dataset geometry is larger than the area of the subswath border, it means that the dataset
        # has already been created (the area should be the total area of all subswaths)
        if np.floor(ds.geographic_location.geometry.area) > np.round(gg.area):
            return ds, False

        swath_data = {}
        lon = {}
        lat = {}
        astep = {}
        rstep = {}
        az_left_lon = {}
        ra_upper_lon = {}
        az_right_lon = {}
        ra_lower_lon = {}
        az_left_lat = {}
        ra_upper_lat = {}
        az_right_lat = {}
        ra_lower_lat = {}
        num_border_points = 10
        border = 'POLYGON(('

        for i in range(self.N_SUBSWATHS):
            # Read subswaths
            swath_data[i] = Nansat(fn, subswath=i)

            lon[i], lat[i] = swath_data[i].get_geolocation_grids()

            astep[i] = int(
                max(1, (lon[i].shape[0] / 2 * 2 - 1) / num_border_points))
            rstep[i] = int(
                max(1, (lon[i].shape[1] / 2 * 2 - 1) / num_border_points))

            az_left_lon[i] = lon[i][0:-1:astep[i], 0]
            az_left_lat[i] = lat[i][0:-1:astep[i], 0]

            az_right_lon[i] = lon[i][0:-1:astep[i], -1]
            az_right_lat[i] = lat[i][0:-1:astep[i], -1]

            ra_upper_lon[i] = lon[i][-1, 0:-1:rstep[i]]
            ra_upper_lat[i] = lat[i][-1, 0:-1:rstep[i]]

            ra_lower_lon[i] = lon[i][0, 0:-1:rstep[i]]
            ra_lower_lat[i] = lat[i][0, 0:-1:rstep[i]]

        lons = np.concatenate(
            (az_left_lon[0], ra_upper_lon[0], ra_upper_lon[1], ra_upper_lon[2],
             ra_upper_lon[3], ra_upper_lon[4], np.flipud(az_right_lon[4]),
             np.flipud(ra_lower_lon[4]), np.flipud(ra_lower_lon[3]),
             np.flipud(ra_lower_lon[2]), np.flipud(ra_lower_lon[1]),
             np.flipud(ra_lower_lon[0]))).round(decimals=3)

        # apply 180 degree correction to longitude - code copied from
        # get_border_wkt...
        # TODO: simplify using np.mod?
        for ilon, llo in enumerate(lons):
            lons[ilon] = copysign(
                acos(cos(llo * pi / 180.)) / pi * 180, sin(llo * pi / 180.))

        lats = np.concatenate(
            (az_left_lat[0], ra_upper_lat[0], ra_upper_lat[1], ra_upper_lat[2],
             ra_upper_lat[3], ra_upper_lat[4], np.flipud(az_right_lat[4]),
             np.flipud(ra_lower_lat[4]), np.flipud(ra_lower_lat[3]),
             np.flipud(ra_lower_lat[2]), np.flipud(ra_lower_lat[1]),
             np.flipud(ra_lower_lat[0]))).round(decimals=3)

        poly_border = ','.join(
            str(llo) + ' ' + str(lla) for llo, lla in zip(lons, lats))
        wkt = 'POLYGON((%s))' % poly_border
        new_geometry = WKTReader().read(wkt)

        # Get or create new geolocation of dataset
        # Returns False if it is the same as an already created one (this may happen when a lot of data is processed)
        ds.geographic_location, cr = GeographicLocation.objects.get_or_create(
            geometry=new_geometry)
        connection.close()

        return ds, True
Example #19
0
    def get_or_create(self,
                      uri,
                      n_points=10,
                      uri_filter_args=None,
                      *args,
                      **kwargs):
        ''' Create dataset and corresponding metadata

        Parameters:
        ----------
            uri : str
                  URI to file or stream openable by Nansat
            n_points : int
                  Number of border points (default is 10)
            uri_filter_args : dict
                Extra DatasetURI filter arguments if several datasets can refer to the same URI

        Returns:
        -------
            dataset and flag
        '''
        if not uri_filter_args:
            uri_filter_args = {}

        # Validate uri - this should raise an exception if the uri doesn't point to a valid
        # file or stream
        validate_uri(uri)

        # Several datasets can refer to the same uri (e.g., scatterometers and svp drifters), so we
        # need to pass uri_filter_args
        uris = DatasetURI.objects.filter(uri=uri, **uri_filter_args)
        if len(uris) > 0:
            return uris[0].dataset, False

        # Open file with Nansat
        n = Nansat(nansat_filename(uri), **kwargs)

        # get metadata from Nansat and get objects from vocabularies
        n_metadata = n.get_metadata()

        # set compulsory metadata (source)
        platform, _ = Platform.objects.get_or_create(
            json.loads(n_metadata['platform']))
        instrument, _ = Instrument.objects.get_or_create(
            json.loads(n_metadata['instrument']))
        specs = n_metadata.get('specs', '')
        source, _ = Source.objects.get_or_create(platform=platform,
                                                 instrument=instrument,
                                                 specs=specs)

        default_char_fields = {
            'entry_id': lambda: 'NERSC_' + str(uuid.uuid4()),
            'entry_title': lambda: 'NONE',
            'summary': lambda: 'NONE',
        }

        # set optional CharField metadata from Nansat or from default_char_fields
        options = {}
        for name in default_char_fields:
            if name not in n_metadata:
                warnings.warn('%s is not provided in Nansat metadata!' % name)
                options[name] = default_char_fields[name]()
            else:
                options[name] = n_metadata[name]

        default_foreign_keys = {
            'gcmd_location': {
                'model': Location,
                'value': pti.get_gcmd_location('SEA SURFACE')
            },
            'data_center': {
                'model': DataCenter,
                'value': pti.get_gcmd_provider('NERSC')
            },
            'ISO_topic_category': {
                'model': ISOTopicCategory,
                'value': pti.get_iso19115_topic_category('Oceans')
            },
        }

        # set optional ForeignKey metadata from Nansat or from default_foreign_keys
        for name in default_foreign_keys:
            value = default_foreign_keys[name]['value']
            model = default_foreign_keys[name]['model']
            if name not in n_metadata:
                warnings.warn('%s is not provided in Nansat metadata!' % name)
            else:
                try:
                    value = json.loads(n_metadata[name])
                except:
                    warnings.warn(
                        '%s value of %s  metadata provided in Nansat is wrong!'
                        % (n_metadata[name], name))
            options[name], _ = model.objects.get_or_create(value)

        # Find coverage to set number of points in the geolocation
        if len(n.vrt.dataset.GetGCPs()) > 0:
            n.reproject_gcps()
        geolocation = GeographicLocation.objects.get_or_create(
            geometry=WKTReader().read(n.get_border_wkt(nPoints=n_points)))[0]

        # create dataset
        ds, created = Dataset.objects.get_or_create(
            time_coverage_start=n.get_metadata('time_coverage_start'),
            time_coverage_end=n.get_metadata('time_coverage_end'),
            source=source,
            geographic_location=geolocation,
            **options)
        # create dataset URI
        ds_uri, _ = DatasetURI.objects.get_or_create(uri=uri, dataset=ds)

        return ds, created
    def handle(self, *args, **options):
        obj = json.loads(open('rosario-all.json').read())

        lineas = ['101', '102', '103', '106', '107', '110', '112', '113', '115', '116', '120', '121', '122', '123',
                  '125', '126', '127', '128', '129', '130', '131', '132', '133', '134', '135', '136', '137', '138',
                  '139', '140', '141', '142', '143', '144', '145', '146', '153', '35/9', 'Enlace', 'K',
                  'Linea de la Costa', 'Ronda del Centro', 'Expreso', 'Las Rosas', 'M', 'Metropolitana', 'Monticas',
                  'Serodino', 'TIRSA']

        ci = Ciudad.objects.get(slug='rosario')

        ls = []
        for l in lineas:
            li = Linea(nombre=l)
            li.save()
            ls.append(li)

        for o in obj:
            for l in ls:
                if o['linea'].startswith(l.nombre):
                    print("Agregando recorrido " + o['linea'])
                    print("IDA: ")
                    sys.stdout.flush()

                    # IDA
                    recorrido = Recorrido(
                        nombre=o['linea'].replace(l.nombre, ''),
                        sentido='IDA',
                        linea=l,
                        inicio=o['descripcionIDA'].split(',')[0].replace('Desde', '').strip().capitalize(),
                        fin=o['descripcionIDA'].split(',')[-1].replace('Hasta', '').strip().capitalize(),
                        ruta=WKTReader().read(o['ida']),
                        descripcion=o['descripcionIDA'],
                        paradas_completas=True
                    )
                    recorrido.save()
                    for p in o['paradas']:
                        if WKTReader().read(p['point']).distance(recorrido.ruta) < 0.0005555:  # ~50 metros
                            print(p['codigo'])
                            sys.stdout.flush()
                            try:
                                parada = Parada.objects.get(codigo=p['codigo'])
                            except ObjectDoesNotExist:
                                parada = Parada(codigo=p['codigo'], latlng=WKTReader().read(p['point']))
                                parada.save()
                            horario = Horario(recorrido=recorrido, parada=parada)
                            horario.save()

                    print("")
                    print("VUELTA: ")
                    sys.stdout.flush()
                    # VUELTA
                    recorrido = Recorrido(
                        nombre=o['linea'].replace(l.nombre, ''),
                        sentido='VUELTA',
                        linea=l,
                        inicio=o['descripcionVUELTA'].split(',')[-1].replace('Hasta', '').strip().capitalize(),
                        fin=o['descripcionVUELTA'].split(',')[0].replace('Desde', '').strip().capitalize(),
                        ruta=WKTReader().read(o['vuelta']),
                        descripcion=o['descripcionVUELTA'],
                        paradas_completas=True
                    )
                    recorrido.save()
                    for p in o['paradas']:
                        print(p['codigo'])
                        sys.stdout.flush()
                        if WKTReader().read(p['point']).distance(recorrido.ruta) < 0.0005555:  # ~50 metros
                            try:
                                parada = Parada.objects.get(codigo=p['codigo'])
                            except ObjectDoesNotExist:
                                parada = Parada(codigo=p['codigo'], latlng=WKTReader().read(p['point']))
                                parada.save()
                            horario = Horario(recorrido=recorrido, parada=parada)
                            horario.save()
                    print("")
                    print("--------------------------------")
    def get_or_create(self, uri, reprocess=False, *args, **kwargs):
        # ingest file to db
        ds, created = super(DatasetManager,
                            self).get_or_create(uri, *args, **kwargs)

        # set Dataset entry_title
        ds.entry_title = 'SAR NRCS'
        ds.save()

        # Unless reprocess==True, we may not need to do the following... (see
        # managers.py in sar doppler processor)
        #visExists = ... # check if visualization(s) already created
        #if visExists and not reprocess:
        #    warnings.warn('NO VISUALISATIONS CREATED - update managers.py')
        #    return ds, created

        n = Nansat(nansat_filename(uri))
        n.reproject_GCPs()
        n.resize(pixelsize=500)
        lon, lat = n.get_corners()
        lat_max = min(lat.max(), 85)
        d = Domain(
            NSR(3857), '-lle %f %f %f %f -ts %d %d' %
            (lon.min(), lat.min(), lon.max(), lat_max, n.shape()[1],
             n.shape()[0]))
        # Get all NRCS bands
        s0bands = []
        pp = []
        for key, value in n.bands().iteritems():
            try:
                if value['standard_name'] == standard_name:
                    s0bands.append(key)
                    pp.append(value['polarization'])
            except KeyError:
                continue
        ''' Create data products
        '''
        mm = self.__module__.split('.')
        module = '%s.%s' % (mm[0], mm[1])
        mp = media_path(module, n.fileName)
        # ppath = product_path(module, n.fileName)

        # Create png's for each band
        num_products = 0
        for band in s0bands:
            print 'Visualize', band
            s0_tmp = n[band]
            n_tmp = Nansat(domain=n, array=s0_tmp)
            n_tmp.reproject_GCPs()
            n_tmp.reproject(d)

            s0 = n_tmp[1]
            n_tmp = None
            mask = np.ones(s0.shape, np.uint8)
            mask[np.isnan(s0) + (s0 <= 0)] = 0
            s0 = np.log10(s0) * 10.

            meta = n.bands()[band]
            product_filename = '%s_%s.png' % (meta['short_name'],
                                              meta['polarization'])

            nansatFigure(s0, mask, polarization_clims[meta['polarization']][0],
                         polarization_clims[meta['polarization']][1], mp,
                         product_filename)

            # Get DatasetParameter
            param = Parameter.objects.get(short_name=meta['short_name'])
            dsp, created = DatasetParameter.objects.get_or_create(
                dataset=ds, parameter=param)

            # Create Visualization
            geom, created = GeographicLocation.objects.get_or_create(
                geometry=WKTReader().read(n.get_border_wkt()))
            vv, created = Visualization.objects.get_or_create(
                uri='file://localhost%s/%s' % (mp, product_filename),
                title='%s %s polarization' %
                (param.standard_name, meta['polarization']),
                geographic_location=geom)

            # Create VisualizationParameter
            vp, created = VisualizationParameter.objects.get_or_create(
                visualization=vv, ds_parameter=dsp)

        return ds, True
Example #22
0
File: misc.py Project: iatkin/place
def get_item_places(name, zoom=12, steps=3, distinct_bounds=True, return_places=False):
    nurl = "http://open.mapquestapi.com/nominatim/v1"
    f = open("/tmp/%s-places" % (name), "w", encoding="utf-8")
    wktr = WKTReader()

    ps = {}

    items = BoundedItem.objects.filter(LayerDisplayName__contains="%s" % (name))

    if distinct_bounds:
        items = items.distinct("bounds")

    names = [b.LayerDisplayName for b in items]
    names.sort()

    def do_print_name(name):
        print name

    def dont_print_name(name):
        pass

    print_name = None

    if items.count() > 1:
        print_name = do_print_name
    else:
        print_name = dont_print_name

    for n in names:
        b = BoundedItem.objects.get(LayerDisplayName=n)
        d_x = (b.MaxX - b.MinX)/(steps)
        d_y = (b.MaxY - b.MinY)/(steps)

        xs = []
        ys = []

        for i in range(0,steps+1):
            xs.append(d_x * i)
            ys.append(d_y * i)

        places = []
        print_name(b.LayerDisplayName)
        f.write(b.LayerDisplayName)
        f.write("\n")
        for lon in xs:
            for lat in ys:
                url = "%s/reverse?lat=%f&lon=%f&zoom=%d&format=json" % (nurl, b.MinY + lat, b.MinX + lon, zoom)
                r = requests.get(url).json()
                name =  r["display_name"]

                if (not (name in places)):
                    url = "%s/search?q=\"%s\"&format=json&polygon_text=1" % (nurl, name)
                    r = requests.get(url).json()
                    p = wktr.read(r[0]["geotext"])

                    if b.bounds.intersects(p):
                        ps[name] = p
                        places.append(name)
        places.sort()
        for place in places:
            f.write(place)
            f.write("\n")
        f.write("\n")
    f.close()

    if return_places:
        return ps
def calc_mean_doppler(datetime_start=timezone.datetime(2010,1,1,
    tzinfo=timezone.utc), datetime_end=timezone.datetime(2010,2,1,
    tzinfo=timezone.utc), domain=Domain(NSR().wkt, 
        '-te 10 -44 40 -30 -tr 0.05 0.05')):
    geometry = WKTReader().read(domain.get_border_wkt(nPoints=1000))
    ds = Dataset.objects.filter(entry_title__contains='Doppler',
            time_coverage_start__range=[datetime_start, datetime_end],
            geographic_location__geometry__intersects=geometry)
    Va = np.zeros(domain.shape())
    Vd = np.zeros(domain.shape())
    ca = np.zeros(domain.shape())
    cd = np.zeros(domain.shape())
    sa = np.zeros(domain.shape())
    sd = np.zeros(domain.shape())
    sum_var_inv_a = np.zeros(domain.shape())
    sum_var_inv_d = np.zeros(domain.shape())
    for dd in ds:
        uris = dd.dataseturi_set.filter(uri__endswith='nc')
        for uri in uris:
            dop = Doppler(uri.uri)
            # Consider skipping swath 1 and possibly 2...
            dop.reproject(domain)
            # TODO: HARDCODING - MUST BE IMPROVED
            satpass = dop.get_metadata(key='Originating file').split('/')[6]
            if satpass=='ascending':
                try:
                    v_ai = dop['Ur']
                    v_ai[np.abs(v_ai)>3] = np.nan
                except:
                    # subswath doesn't cover the given domain
                    continue
                # uncertainty:
                # 5 Hz - TODO: estimate this correctly...
                sigma_ai = -np.pi*np.ones(dop.shape())*5./(112*np.sin(dop['incidence_angle']*np.pi/180.)) 
                alpha_i = -dop['sensor_azimuth']*np.pi/180.
                Va = np.nansum(np.append(np.expand_dims(Va, 2),
                    np.expand_dims(v_ai/np.square(sigma_ai), 2), axis=2),
                    axis=2)
                ca = np.nansum(np.append(np.expand_dims(ca, 2),
                    np.expand_dims(np.cos(alpha_i)/np.square(sigma_ai), 2),
                    axis=2), axis=2)
                sa = np.nansum(np.append(np.expand_dims(sa, 2),
                    np.expand_dims(np.sin(alpha_i)/np.square(sigma_ai), 2),
                    axis=2), axis=2)
                sum_var_inv_a =np.nansum(np.append(np.expand_dims(sum_var_inv_a, 2),
                    np.expand_dims(1./np.square(sigma_ai), 2), axis=2),
                    axis=2)
            else:
                try:
                    v_dj = -dop['Ur']
                    v_dj[np.abs(v_dj)>3] = np.nan
                except:
                    # subswath doesn't cover the given domain
                    continue
                # 5 Hz - TODO: estimate this correctly...
                sigma_dj = -np.pi*np.ones(dop.shape())*5./(112*np.sin(dop['incidence_angle']*np.pi/180.)) 
                delta_j = (dop['sensor_azimuth']-180.)*np.pi/180.
                Vd = np.nansum(np.append(np.expand_dims(Vd, 2),
                    np.expand_dims(v_dj/np.square(sigma_dj), 2), axis=2),
                    axis=2)
                cd = np.nansum(np.append(np.expand_dims(cd, 2),
                    np.expand_dims(np.cos(delta_j)/np.square(sigma_dj), 2),
                    axis=2), axis=2)
                sd = np.nansum(np.append(np.expand_dims(sd, 2),
                    np.expand_dims(np.sin(delta_j)/np.square(sigma_dj), 2),
                    axis=2), axis=2)
                sum_var_inv_d = np.nansum(np.append(
                    np.expand_dims(sum_var_inv_d, 2), np.expand_dims(
                        1./np.square(sigma_ai), 2), axis=2), axis=2)

    u = (Va*sd + Vd*sa)/(sa*cd + sd*ca)
    v = (Va*cd - Vd*ca)/(sa*cd + sd*ca)
    sigma_u = np.sqrt(np.square(sd)*sum_var_inv_a +
            np.square(sa)*sum_var_inv_d) / (sa*cd + sd*ca)
    sigma_v = np.sqrt(np.square(cd)*sum_var_inv_a +
            np.square(ca)*sum_var_inv_d) / (sa*cd + sd*ca)
    nu = Nansat(array=u, domain=domain)
    nmap=Nansatmap(nu, resolution='h')
    nmap.pcolormesh(nu[1], vmin=-1.5, vmax=1.5, cmap='bwr')
    nmap.add_colorbar()
    nmap.draw_continents()
    nmap.fig.savefig('/vagrant/shared/unwasc.png', bbox_inches='tight')