Esempio n. 1
0
 def test_within(self):
     """
     Test WithinProcess for vector inputs
     """
     vector1_io = VectorFileIO(
         uri=os.path.join(testfile_path, 'iraq_hospitals.geojson'))
     vector2_io = VectorFileIO(
         uri=os.path.join(testfile_path, 'baghdad_districts.geojson'))
     process = geo.WithinProcess(inputs=[vector1_io, vector2_io])
     try:
         process.compute()
         self.assertEquals(len(process.output.data), 19)
     finally:
         if process:
             process.purge()
Esempio n. 2
0
    def compute(self):
        if not self.output:
            self.output = VectorFileIO(name='result', uri=self.get_outpath())
        first_df = self.inputs[0].read()
        col = self.var_col
        adjust_by_col = self.adjust_by_col

        # filter out null fields or else weight functions won't work
        keep = first_df[col].notnull()
        filtered_df = first_df[keep].reset_index()

        # get Local Moran's I
        f = np.array(filtered_df[col])
        w = wt.gpd_contiguity(filtered_df)
        if adjust_by_col:
            adjust_by = np.array(filtered_df[adjust_by_col])
            lm = pysal.esda.moran.Moran_Local_Rate(e=f,
                                                   b=adjust_by,
                                                   w=w,
                                                   permutations=9999)
        else:
            lm = pysal.Moran_Local(y=f, w=w, permutations=9999)

        sig = lm.p_sim < 0.05
        filtered_df['lm_sig'] = sig
        filtered_df['lm_p_sim'] = lm.p_sim
        filtered_df['lm_q'] = lm.q
        filtered_df['lm_Is'] = lm.Is

        self.output.data = filtered_df
        self.output.write()
        logger.debug(self.output)
Esempio n. 3
0
    def compute(self):
        if not self.output:
            self.output = VectorFileIO(name='result', uri=self.get_outpath())
        for input in self.inputs:
            if input.name == 'input':
                first_df = input.read()
        col = self.var_col
        adjust_by_col = self.adjust_by_col
        permutations = self.permutations
        if not permutations:
            permutations = 999

        # filter out null fields
        keep = first_df[col].notnull()
        filtered_df = first_df[keep]

        # get Global Moran's I
        f = np.array(filtered_df[col])
        w = wt.gpd_contiguity(filtered_df)
        if adjust_by_col:
            adjust_by = np.array(filtered_df[adjust_by_col])
            mi = pysal.esda.moran.Moran_Rate(e=f,
                                             b=adjust_by,
                                             w=w,
                                             permutations=permutations)
        else:
            mi = pysal.Moran(y=f, w=w, permutations=permutations)

        keep = ['I', 'EI', 'p_norm', 'EI_sim', 'p_sim', 'z_sim', 'p_z_sim']
        mi_dict = {k: getattr(mi, k) for k in keep}

        self.output.data = mi_dict
        self.output.write()
        logger.debug(self.output)
Esempio n. 4
0
 def test_intersect(self):
     """
     Test IntersectsProcess for vector inputs
     """
     vector1_io = VectorFileIO(
         uri=os.path.join(testfile_path, 'baghdad_districts.geojson'))
     vector2_io = VectorFileIO(
         uri=os.path.join(testfile_path, 'iraq_roads.geojson'))
     process = geo.IntersectsProcess(inputs=[vector1_io, vector2_io])
     try:
         process.compute()
         with open(
                 os.path.join(testfile_path,
                              'intersects_process_results.json')) as exp:
             expected_json = json.load(exp)
         actual_json = json.loads(process.output.read(format=formats.JSON))
         self.assertEquals(len(expected_json['features']),
                           len(actual_json['features']))
     finally:
         if process:
             process.purge()
Esempio n. 5
0
 def test_union(self):
     """
     Test UnionProcess for vector inputs
     """
     vector1_io = VectorFileIO(uri=os.path.join(
         testfile_path, 'baghdad_districts.geojson'),
                               filters=[('NNAME', 'contains', '^A')])
     vector2_io = VectorFileIO(uri=os.path.join(
         testfile_path, 'baghdad_districts.geojson'),
                               filters=[('NNAME', 'contains', '^B')])
     process = geo.UnionProcess(inputs=[vector1_io, vector2_io])
     try:
         process.compute()
         with open(os.path.join(testfile_path,
                                'union_process_results.json')) as exp:
             expected_json = json.load(exp)
         actual_json = json.loads(process.output.read(format=formats.JSON))
         self.assertEquals(len(expected_json['features']),
                           len(actual_json['features']))
     finally:
         if process:
             process.purge()
Esempio n. 6
0
 def test_weight(self):
     """
     Test WeightProcess for vector inputs
     """
     vector_io = VectorFileIO(name='input',
                              uri=os.path.join(testfile_path,
                                               'baghdad_hospitals.geojson'))
     process = geo.WeightProcess('knnW', inputs=[vector_io])
     try:
         process.compute()
         exp = pysal.open(
             os.path.join(testfile_path, 'weight_process_result.gal'), 'r')
         expected_w = exp.read()
         exp.close()
         actual = process.output.read(format=formats.WEIGHT)
         self.assertEquals(expected_w.n, actual.n)
     finally:
         if process:
             process.purge()
Esempio n. 7
0
 def test_cluster(self):
     """
     Test ClusterProcess for vector inputs
     """
     vector_io = VectorFileIO(name='input',
                              uri=os.path.join(testfile_path,
                                               'baghdad_hospitals.geojson'))
     process = geo.ClusterProcess('num_hospitals', inputs=[vector_io])
     try:
         process.compute()
         with open(
                 os.path.join(testfile_path,
                              'cluster_process_results.json')) as exp:
             expected_json = json.load(exp)
         actual_json = json.loads(process.output.read(format=formats.JSON))
         self.assertEquals(len(expected_json['features']),
                           len(actual_json['features']))
     finally:
         if process:
             process.purge()
Esempio n. 8
0
 def compute(self):
     if not self.output:
         self.output = VectorFileIO(name='result', uri=self.get_outpath())
     for input in self.inputs:
         if input.name == 'input':
             first_df = input.read()
     weight_type = self.weight_type
     if weight_type == 'contiguity':
         w = wt.gpd_contiguity(first_df)
     elif weight_type == 'knnW':
         w = wt.gpd_knnW(first_df)
     elif weight_type == 'distanceBandW':
         w = wt.gpd_distanceBandW(first_df)
     elif weight_type == 'kernel':
         w = wt.gpd_kernel(first_df)
     # TODO: add params related to dif weight types
     else:
         print(u'weight type {0} not available'.format(weight_type))
     self.output.data = w
     self.output.write()
     logger.debug(self.output)
Esempio n. 9
0
 def test_autocorrelation(self):
     """
     Test AutocorrelationProcess for vector inputs
     """
     vector_io = VectorFileIO(name='input',
                              uri=os.path.join(testfile_path,
                                               'baghdad_hospitals.geojson'))
     process = geo.AutocorrelationProcess('num_hospitals',
                                          inputs=[vector_io])
     try:
         process.compute()
         with open(
                 os.path.join(
                     testfile_path,
                     'autocorrelation_process_results.json')) as exp:
             expected_json = json.load(exp)
         actual_json = process.output.read(format=formats.JSON)
         self.assertEquals(expected_json['I'], actual_json['I'])
     finally:
         if process:
             process.purge()
Esempio n. 10
0
 def test_length(self):
     """
     Test LengthProcess for vector inputs
     """
     vector_roads = VectorFileIO(uri=os.path.join(testfile_path,
                                                  'iraq_roads.geojson'),
                                 filters=[('type', '=', 'motorway'),
                                          ('bridge', '=', 1)])
     process = geo.LengthProcess(inputs=[vector_roads])
     try:
         process.compute()
         with open(
                 os.path.join(testfile_path,
                              'length_process_results.json')) as exp:
             expected_json = json.load(exp)
         actual_json = json.loads(process.output.read(format=formats.JSON))
         self.assertEquals(len(expected_json['features']),
                           len(actual_json['features']))
     finally:
         if process:
             process.purge()
Esempio n. 11
0
    def test_subset_raster(self):
        """
        Test SubsetProcess for vector & raster inputs
        """
        zipfile = ZipFile(os.path.join(testfile_path, '2states.zip'), 'r')
        zipfile.extract('2states.geojson', testfile_path)

        vector_io = VectorFileIO(
            uri=os.path.join(testfile_path, '2states.geojson'))
        raster_io = RasterFileIO(
            uri=os.path.join(testfile_path, 'globalairtemp.tif'))
        process = geo.SubsetProcess(inputs=[raster_io, vector_io])
        try:
            process.compute()
            self.assertEquals(type(process.output.data).__name__, 'Dataset')
            self.assertTrue(os.path.exists(process.output.uri))
            self.assertIsNotNone(process.id)
            self.assertIn(process.id, process.output.uri)
        finally:
            testfile = os.path.join(testfile_path, '2states.geojson')
            if os.path.exists(testfile):
                os.remove(testfile)
            if process:
                process.purge()
Esempio n. 12
0
 def __init__(self, var_col, **kwargs):
     self.var_col = var_col
     super(ClusterProcess, self).__init__(**kwargs)
     if not self.output:
         self.output = VectorFileIO(name='result', uri=self.get_outpath())
Esempio n. 13
0
 def __init__(self, **kwargs):
     super(ZonalStatsProcess, self).__init__(**kwargs)
     if not self.output:
         self.output = VectorFileIO(name='result', uri=self.get_outpath())
     self.validate()
Esempio n. 14
0
 def __init__(self, inputs=None, buffer_size=None, **kwargs):
     super(BufferProcess, self).__init__(inputs, **kwargs)
     self.buffer_size = buffer_size
     if not self.output:
         self.output = VectorFileIO(name='result', uri=self.get_outpath())
     self.validate()
Esempio n. 15
0
 def __init__(self, distance=None, **kwargs):
     super(NearProcess, self).__init__(**kwargs)
     self.distance = distance
     if not self.output:
         self.output = VectorFileIO(name='result', uri=self.get_outpath())
     self.validate()
Esempio n. 16
0
 def __init__(self, combined=False, **kwargs):
     super(CentroidProcess, self).__init__(**kwargs)
     self.combined = combined
     if not self.output:
         self.output = VectorFileIO(name='result', uri=self.get_outpath())