def createBsveDataset(user, bsveRoot, extraHeaders, params, layer_type):
    typeName = params['type_name']

    try:
        if params['metadata']:
            layer_info = params['metadata']
        else:
            layer_info = BsveWmsStyle(typeName, bsveRoot=bsveRoot,
                                      extraHeaders=extraHeaders).get_layer_info(layer_type)
    except TypeError:
        layer_info = ""

    # TODO: Add the legend url here once it is
    # ready on bsve side
    name = params['name']

    params['layer_info'] = layer_info
    params['adapter'] = 'bsve'

    legend_url = bsveRoot + "/data/v2/sources/geotiles/data/result?"
    legend_qs = quote("$filter=name eq {} and request eq getlegendgraphic and height eq 20 and width eq 20".format(
        typeName), safe='= ').replace(' ', '+')

    r = requests.get(legend_url + legend_qs, headers=extraHeaders)
    legend = b64encode(r.content)
    params['legend'] = legend

    # dataset = self.constructDataset(name, params)
    folder = findDatasetFolder(user, user, create=True)
    dataset = Item().createItem(name, user, folder, '')
    updateMinervaMetadata(dataset, params)
    return dataset
Beispiel #2
0
 def createGeojsonDataset(self, item, params):
     user = self.getCurrentUser()
     folder = findDatasetFolder(user, user, create=True)
     if folder is None:
         raise RestException('User has no Minerva Dataset folder.')
     if folder['_id'] != item['folderId']:
         raise RestException("Items need to be in user's Minerva Dataset " +
                             "folder.")
     minerva_metadata = {
         'original_type': 'geojson',
         'dataset_type': 'geojson',
     }
     # Use the first geojson or json file found as the dataset.
     for file in self.model('item').childFiles(item=item, limit=0):
         if ('geojson' in file['exts'] or 'json' in file['exts'] or
                 file.get('mimeType') in (
                     'application/json', 'application/vnd.geo+json',
                 )):
             minerva_metadata['original_files'] = [{
                 'name': file['name'], '_id': file['_id']}]
             minerva_metadata['geojson_file'] = {
                 'name': file['name'], '_id': file['_id']}
             minerva_metadata['geo_render'] = {
                 'type': 'geojson', 'file_id': file['_id']}
             minerva_metadata['original_type'] = 'geojson'
             minerva_metadata['source'] = {
                 'layer_source': 'GeoJSON'}
             minerva_metadata['source_type'] = 'item'
             break
     if 'geojson_file' not in minerva_metadata:
         raise RestException('Item contains no geojson file.')
     updateMinervaMetadata(item, minerva_metadata)
     return item
Beispiel #3
0
    def testCheckDatasetsExistInDatabase(self):

        with HTTMock(wms_mock):
            response = self.request(path=self._path,
                                    method='POST',
                                    params=self._params,
                                    user=self._user)

        self.assertStatusOk(response)
        from girder.plugins.minerva.utility.minerva_utility import findDatasetFolder

        dataset_folder = findDatasetFolder(self._user, self._user)

        items = self.model('folder').childItems(dataset_folder)

        db_datasets = [i['meta']['minerva']['type_name'] for i in items]

        wmsSource = response.json

        response_datasets = [
            d['meta']['minerva']['type_name'] for d in wmsSource
        ]

        self.assertEquals(len(db_datasets), len(response_datasets),
                          'Number of datasets do not match with  thedb')

        self.assertEquals(set(db_datasets), set(response_datasets),
                          'Dataset type_names do not math with the db')
Beispiel #4
0
def createBsveDataset(user, bsveRoot, extraHeaders, params, layer_type):
    typeName = params['type_name']

    try:
        if params['metadata']:
            layer_info = params['metadata']
        else:
            layer_info = BsveWmsStyle(
                typeName, bsveRoot=bsveRoot,
                extraHeaders=extraHeaders).get_layer_info(layer_type)
    except TypeError:
        layer_info = ""

    # TODO: Add the legend url here once it is
    # ready on bsve side
    name = params['name']

    params['layer_info'] = layer_info
    params['adapter'] = 'bsve'

    legend_url = bsveRoot + "/data/v2/sources/geotiles/data/result?"
    legend_qs = quote(
        "$filter=name eq {} and request eq getlegendgraphic and height eq 20 and width eq 20"
        .format(typeName),
        safe='= ').replace(' ', '+')

    r = requests.get(legend_url + legend_qs, headers=extraHeaders)
    legend = b64encode(r.content)
    params['legend'] = legend

    # dataset = self.constructDataset(name, params)
    folder = findDatasetFolder(user, user, create=True)
    dataset = Item().createItem(name, user, folder, '')
    updateMinervaMetadata(dataset, params)
    return dataset
Beispiel #5
0
 def createExternalMongo(self, name, dbConnectionUri, collectionName):
     # assuming to create in the user space of the current user
     user = self.getCurrentUser()
     folder = findDatasetFolder(user, user)
     desc = 'external mongo dataset for %s' % name
     item = self.model('item').createItem(name, user, folder, desc)
     minerva_metadata = {
         'dataset_id': item['_id'],
         'original_type': 'mongo',
         'mongo_connection': {
             'db_uri': dbConnectionUri,
             'collection_name': collectionName
         }
     }
     # get the first entry in the collection, set as json_row
     # TODO integrate this with the methods for taking a row from a JSON
     # array in a file
     collection = self.mongoCollection(dbConnectionUri, collectionName)
     collectionList = list(collection.find(limit=1))
     if len(collectionList) > 0:
         minerva_metadata['json_row'] = collectionList[0]
     else:
         minerva_metadata['json_row'] = None
     if 'meta' not in item:
         item['meta'] = {}
     item['meta']['minerva'] = minerva_metadata
     self.model('item').setMetadata(item, item['meta'])
     return item['meta']['minerva']
Beispiel #6
0
 def unshareDataset(self, item, params):
     currentUser = self.getCurrentUser()
     datasetFolder = findDatasetFolder(currentUser,
                                       currentUser,
                                       create=True)
     self.model('item').move(item, datasetFolder)
     return self.model('item').filter(item, currentUser)
Beispiel #7
0
 def getGeometryLinkTarget(self, params):
     currentUser = self.getCurrentUser()
     folder = findDatasetFolder(currentUser, currentUser)
     items = list(self.model('item').find(
         query={'folderId': folder['_id'],
                'meta.minerva.dataset_type': 'geojson'},
         fields=['name']))
     return items
Beispiel #8
0
 def constructDataset(self, name, minerva_metadata, desc=''):
     user = self.getCurrentUser()
     folder = findDatasetFolder(user, user, create=True)
     if folder is None:
         raise Exception('User has no Minerva Dataset folder.')
     dataset = self.model('item').createItem(name, user, folder, desc)
     updateMinervaMetadata(dataset, minerva_metadata)
     return dataset
Beispiel #9
0
 def constructDataset(self, name, minerva_metadata, desc=''):
     user = self.getCurrentUser()
     folder = findDatasetFolder(user, user, create=True)
     if folder is None:
         raise Exception('User has no Minerva Dataset folder.')
     dataset = self.model('item').createItem(name, user, folder, desc)
     updateMinervaMetadata(dataset, minerva_metadata)
     return dataset
Beispiel #10
0
 def getGeometryLinkTarget(self, params):
     currentUser = self.getCurrentUser()
     folder = findDatasetFolder(currentUser, currentUser)
     items = list(
         self.model('item').find(query={
             'folderId': folder['_id'],
             'meta.minerva.dataset_type': 'geojson'
         },
                                 fields=['name']))
     return items
Beispiel #11
0
 def listDatasets(self, user, params):
     folder = findDatasetFolder(self.getCurrentUser(), user)
     if folder is None:
         return []
     else:
         limit, offset, sort = \
             self.getPagingParameters(params,
                                      defaultSortDir=pymongo.DESCENDING)
         items = [self.model('item').filter(item, self.getCurrentUser()) for
                  item in self.model('folder').childItems(folder,
                                                          limit=limit, offset=offset, sort=sort)]
     return items
Beispiel #12
0
 def createGeojsonDataset(self, item, params, postgresGeojson=None):
     user = self.getCurrentUser()
     folder = findDatasetFolder(user, user, create=True)
     if folder is None:
         raise RestException('User has no Minerva Dataset folder.')
     if folder['_id'] != item['folderId']:
         raise RestException("Items need to be in user's Minerva Dataset " +
                             "folder.")
     minerva_metadata = {
         'original_type': 'geojson',
         'dataset_type': 'geojson',
     }
     # Use the first geojson or json file found as the dataset.
     for file in self.model('item').childFiles(item=item, limit=0):
         if ('geojson' in file['exts'] or 'json' in file['exts']
                 or file.get('mimeType') in (
                     'application/json',
                     'application/vnd.geo+json',
                 )):
             minerva_metadata['original_files'] = [{
                 'name': file['name'],
                 '_id': file['_id']
             }]
             minerva_metadata['geojson_file'] = {
                 'name': file['name'],
                 '_id': file['_id']
             }
             minerva_metadata['geo_render'] = {
                 'type': 'geojson',
                 'file_id': file['_id']
             }
             minerva_metadata['original_type'] = 'geojson'
             minerva_metadata['source'] = {'layer_source': 'GeoJSON'}
             minerva_metadata['source_type'] = 'item'
             if postgresGeojson is not None:
                 if postgresGeojson['field'] is not None:
                     minerva_metadata['visProperties'] = {
                         'line': {
                             "fillColorKey": postgresGeojson['field']
                         },
                         'polygon': {
                             "fillColorKey": postgresGeojson['field']
                         },
                         'point': {
                             "fillColorKey": postgresGeojson['field']
                         }
                     }
                 minerva_metadata['postgresGeojson'] = postgresGeojson
             break
     if 'geojson_file' not in minerva_metadata:
         raise RestException('Item contains no geojson file.')
     updateMinervaMetadata(item, minerva_metadata)
     return item
Beispiel #13
0
    def promoteItemToDataset(self, item, params):
        """
        Take an Item in the user's Minerva Dataset folder, and promote
        it to a Minerva Dataset by adding proper Minerva metadata.
        """

        user = self.getCurrentUser()
        folder = findDatasetFolder(user, user, create=True)
        if folder is None:
            raise RestException('User has no Minerva Dataset folder.')
        if folder['_id'] != item['folderId']:
            raise RestException("Items need to be in user's Minerva Dataset " +
                                "folder.")
        # Don't overwrite if minerva metadata already exists.
        if 'meta' in item and 'minerva' in item['meta']:
            return item

        minerva_metadata = {'source_type': 'item'}
        for file in self.model('item').childFiles(item=item, limit=0):
            # TODO This switching based on which file is found first is
            # fairly brittle and should only be called after first upload.
            if 'geojson' in file['exts']:
                # we found a geojson, assume this is geojson original
                minerva_metadata['original_type'] = 'geojson'
                minerva_metadata['dataset_type'] = 'geojson'
                minerva_metadata['original_files'] = [{
                    'name': file['name'],
                    '_id': file['_id']
                }]
                minerva_metadata['geojson_file'] = {
                    'name': file['name'],
                    '_id': file['_id']
                }
                minerva_metadata['source'] = {'layer_source': 'GeoJSON'}
                break
            elif 'json' in file['exts']:
                minerva_metadata['original_type'] = 'json'
                minerva_metadata['dataset_type'] = 'json'
                minerva_metadata['original_files'] = [{
                    'name': file['name'],
                    '_id': file['_id']
                }]
                break
            elif 'csv' in file['exts']:
                minerva_metadata['original_type'] = 'csv'
                minerva_metadata['dataset_type'] = 'csv'
                minerva_metadata['original_files'] = [{
                    'name': file['name'],
                    '_id': file['_id']
                }]
                break
        updateMinervaMetadata(item, minerva_metadata)
        return item
Beispiel #14
0
 def listDatasets(self, user, params):
     folder = findDatasetFolder(self.getCurrentUser(), user)
     if folder is None:
         return []
     else:
         limit, offset, sort = \
             self.getPagingParameters(params,
                                      defaultSortDir=pymongo.DESCENDING)
         items = [self.model('item').filter(item, self.getCurrentUser()) for
                  item in self.model('folder').childItems(folder,
                  limit=limit, offset=offset, sort=sort)]
         return items
Beispiel #15
0
 def createMinervaDataset(self, geojsonString, name):
     """Creates a dataset from a geojson string"""
     output = StringIO.StringIO(json.dumps(geojsonString))
     outputSize = output.len
     user = self.getCurrentUser()
     datasetFolder = findDatasetFolder(user, user, create=True)
     itemModel = ModelImporter.model('item')
     uploadModel = ModelImporter.model('upload')
     item = itemModel.createItem(name, user, datasetFolder)
     geojsonFile = uploadModel.uploadFromFile(output, outputSize, name,
                                              'item', item, user)
     GeojsonDataset().createGeojsonDataset(itemId=geojsonFile['itemId'],
                                           params={})
     return geojsonFile
Beispiel #16
0
 def createMinervaDataset(self, geojsonString, name):
     """Creates a dataset from a geojson string"""
     output = StringIO.StringIO(json.dumps(geojsonString))
     outputSize = output.len
     user = self.getCurrentUser()
     datasetFolder = findDatasetFolder(user, user, create=True)
     itemModel = ModelImporter.model('item')
     uploadModel = ModelImporter.model('upload')
     item = itemModel.createItem(name, user, datasetFolder)
     geojsonFile = uploadModel.uploadFromFile(output, outputSize, name,
                                              'item', item, user)
     GeojsonDataset().createGeojsonDataset(itemId=geojsonFile['itemId'],
                                           params={})
     return geojsonFile
Beispiel #17
0
    def promoteItemToDataset(self, item, params):
        """
        Take an Item in the user's Minerva Dataset folder, and promote
        it to a Minerva Dataset by adding proper Minerva metadata.
        """

        user = self.getCurrentUser()
        folder = findDatasetFolder(user, user, create=True)
        if folder is None:
            raise RestException('User has no Minerva Dataset folder.')
        if folder['_id'] != item['folderId']:
            raise RestException("Items need to be in user's Minerva Dataset " +
                                "folder.")
        # Don't overwrite if minerva metadata already exists.
        if 'meta' in item and 'minerva' in item['meta']:
            return item

        minerva_metadata = {
            'source_type': 'item'
        }
        for file in self.model('item').childFiles(item=item, limit=0):
            # TODO This switching based on which file is found first is
            # fairly brittle and should only be called after first upload.
            if 'geojson' in file['exts']:
                # we found a geojson, assume this is geojson original
                minerva_metadata['original_type'] = 'geojson'
                minerva_metadata['dataset_type'] = 'geojson'
                minerva_metadata['original_files'] = [{
                    'name': file['name'], '_id': file['_id']}]
                minerva_metadata['geojson_file'] = {
                    'name': file['name'], '_id': file['_id']}
                break
            elif 'json' in file['exts']:
                minerva_metadata['original_type'] = 'json'
                minerva_metadata['dataset_type'] = 'json'
                minerva_metadata['original_files'] = [{
                    'name': file['name'], '_id': file['_id']}]
                break
            elif 'csv' in file['exts']:
                minerva_metadata['original_type'] = 'csv'
                minerva_metadata['dataset_type'] = 'csv'
                minerva_metadata['original_files'] = [{
                    'name': file['name'], '_id': file['_id']}]
                break
        updateMinervaMetadata(item, minerva_metadata)
        return item
Beispiel #18
0
    def queryElasticsearch(self, params):
        """
        Creates a local job to run the elasticsearch_worker, the job will store
        the results of the elastic search query in a dataset.
        """
        currentUser = self.getCurrentUser()
        datasetName = params['datasetName']
        elasticsearchParams = params['searchParams']

        datasetFolder = findDatasetFolder(currentUser, currentUser)
        dataset = (self.model('item').createItem(
            datasetName,
            currentUser,
            datasetFolder,
            'created by elasticsearch query'))

        user, token = self.getCurrentUser(returnToken=True)
        kwargs = {
            'params': params,
            'user': currentUser,
            'dataset': dataset,
            'token': token,
            'sourceId': params['sourceId']
        }

        job = self.model('job', 'jobs').createLocalJob(
            title='elasticsearch: %s' % datasetName,
            user=currentUser,
            type='elasticsearch',
            public=False,
            kwargs=kwargs,
            module='girder.plugins.minerva.jobs.elasticsearch_worker',
            async=True)

        minerva_metadata = {
            'dataset_type': 'json',
            'source_id': params['sourceId'],
            'source': 'elasticsearch',
            'elasticsearch_params': elasticsearchParams
        }
        updateMinervaMetadata(dataset, minerva_metadata)

        self.model('job', 'jobs').scheduleJob(job)

        return job
Beispiel #19
0
def getLayers(user):
    folder = findDatasetFolder(user, user)
    if not folder:
        return []

    items = Folder().childItems(folder)

    layers = {}
    for item in items:
        adapter = item.get('meta', {}).get('minerva', {}).get('adapter')
        name = item.get('meta', {}).get('minerva', {}).get('type_name')
        if adapter == 'bsve':

            # delete duplicates if they exist
            if name in layers:
                Item().remove(item)
            else:
                layers[name] = item

    return layers
def getLayers(user):
    folder = findDatasetFolder(
        user, user
    )
    if not folder:
        return []

    items = Folder().childItems(folder)

    layers = {}
    for item in items:
        adapter = item.get('meta', {}).get('minerva', {}).get('adapter')
        name = item.get('meta', {}).get('minerva', {}).get('type_name')
        if adapter == 'bsve':

            # delete duplicates if they exist
            if name in layers:
                Item().remove(item)
            else:
                layers[name] = item

    return layers
Beispiel #21
0
    def promoteItemToDataset(self, item, params):
        """
        Take an Item in the user's Minerva Dataset folder, and promote
        it to a Minerva Dataset by adding proper Minerva metadata.
        """

        user = self.getCurrentUser()
        folder = findDatasetFolder(user, user, create=True)
        if folder is None:
            raise RestException('User has no Minerva Dataset folder.')
        if folder['_id'] != item['folderId']:
            raise RestException("Items need to be in user's Minerva Dataset " +
                                "folder.")
        # Don't overwrite if minerva metadata already exists.
        if 'meta' in item and 'minerva' in item['meta']:
            return item

        minerva_metadata = self._updateMinervaMetadata(item)
        bounds = self._getBound(item)
        if bounds:
            minerva_metadata['bounds'] = bounds
            updateMinervaMetadata(item, minerva_metadata)
        return item
Beispiel #22
0
    def testCheckDatasetsExistInDatabase(self):

        with HTTMock(wms_mock):
            response = self.request(path=self._path, method='POST',
                                    params=self._params, user=self._user)

        self.assertStatusOk(response)
        from girder.plugins.minerva.utility.minerva_utility import findDatasetFolder

        dataset_folder = findDatasetFolder(self._user, self._user)

        items = self.model('folder').childItems(dataset_folder)

        db_datasets = [i['meta']['minerva']['type_name'] for i in items]

        wmsSource = response.json

        response_datasets = [d['meta']['minerva']['type_name'] for d in wmsSource]

        self.assertEquals(len(db_datasets), len(response_datasets),
                          'Number of datasets do not match with  thedb')

        self.assertEquals(set(db_datasets), set(response_datasets),
                          'Dataset type_names do not math with the db')
Beispiel #23
0
 def unshareDataset(self, item, params):
     currentUser = self.getCurrentUser()
     datasetFolder = findDatasetFolder(currentUser, currentUser, create=True)
     self.model('item').move(item, datasetFolder)
     return self.model('item').filter(item, currentUser)
Beispiel #24
0
 def createDatasetFolder(self, user, params):
     folder = findDatasetFolder(self.getCurrentUser(), user, create=True)
     return {'folder': folder}
Beispiel #25
0
 def getDatasetFolder(self, user, params):
     folder = findDatasetFolder(self.getCurrentUser(), user)
     return {'folder': folder}
Beispiel #26
0
    def promoteItemToDataset(self, item, params):
        """
        Take an Item in the user's Minerva Dataset folder, and promote
        it to a Minerva Dataset by adding proper Minerva metadata.
        """

        user = self.getCurrentUser()
        folder = findDatasetFolder(user, user, create=True)
        if folder is None:
            raise RestException('User has no Minerva Dataset folder.')
        if folder['_id'] != item['folderId']:
            raise RestException("Items need to be in user's Minerva Dataset " +
                                "folder.")
        # Don't overwrite if minerva metadata already exists.
        if 'meta' in item and 'minerva' in item['meta']:
            return item

        minerva_metadata = {'source_type': 'item'}
        for file in self.model('item').childFiles(item=item, limit=0):
            # Check the first few k of a file to see if this might be a
            # geojson timeseries.  Crudely, we expect this to be a json array
            # which contains objects, each of which has at least a geojson
            # element.  This test will fail if there are other elements in the
            # first object that push the geojson element beyond the tested
            # header length.  It could give a false positive, too.  The correct
            # way would be to download and parse the whole file, but that would
            # be more expensive in memory and time.
            headerLen = 2048
            fileHeader = ''
            for headerData in self.model('file').download(file,
                                                          headers=False,
                                                          endByte=headerLen)():
                fileHeader = (fileHeader + headerData)[:headerLen]
                if len(fileHeader) >= headerLen:
                    break
            if (fileHeader.lstrip()[:1] == '['
                    and fileHeader.lstrip()[1:].lstrip()[:1] == '{'
                    and '"geojson"' in fileHeader):
                minerva_metadata['original_type'] = 'geojson-timeseries'
                minerva_metadata['dataset_type'] = 'geojson-timeseries'
                minerva_metadata['original_files'] = [{
                    'name': file['name'],
                    '_id': file['_id']
                }]
                minerva_metadata['geojson_file'] = {
                    'name': file['name'],
                    '_id': file['_id']
                }
                minerva_metadata['source'] = {'layer_source': 'GeoJSON'}
                break
            # TODO This switching based on which file is found first is
            # fairly brittle and should only be called after first upload.
            if 'geojson' in file['exts']:
                # we found a geojson, assume this is geojson original
                minerva_metadata['original_type'] = 'geojson'
                minerva_metadata['dataset_type'] = 'geojson'
                minerva_metadata['original_files'] = [{
                    'name': file['name'],
                    '_id': file['_id']
                }]
                minerva_metadata['geojson_file'] = {
                    'name': file['name'],
                    '_id': file['_id']
                }
                minerva_metadata['source'] = {'layer_source': 'GeoJSON'}
                break
            elif 'json' in file['exts']:
                minerva_metadata['original_type'] = 'json'
                minerva_metadata['dataset_type'] = 'json'
                minerva_metadata['original_files'] = [{
                    'name': file['name'],
                    '_id': file['_id']
                }]
                break
            elif 'csv' in file['exts']:
                minerva_metadata['original_type'] = 'csv'
                minerva_metadata['dataset_type'] = 'csv'
                minerva_metadata['original_files'] = [{
                    'name': file['name'],
                    '_id': file['_id']
                }]
                break
            elif ({'tif', 'tiff'}.intersection(file['exts'])
                  and file['mimeType'] == 'image/tiff'):
                info = getInfo(file)
                if 'srs' in info and info['srs']:
                    minerva_metadata['original_type'] = 'tiff'
                    minerva_metadata['dataset_type'] = 'geotiff'
                    minerva_metadata['original_files'] = [{
                        'name': file['name'],
                        '_id': file['_id']
                    }]
                    minerva_metadata['source'] = {'layer_source': 'Tiff'}
                break
        updateMinervaMetadata(item, minerva_metadata)
        bounds = self._getBound(item)
        if bounds:
            minerva_metadata['bounds'] = bounds
            updateMinervaMetadata(item, minerva_metadata)
        return item
Beispiel #27
0
    def bsveSearchAnalysis(self, params):
        currentUser = self.getCurrentUser()
        datasetName = params['datasetName']
        bsveSearchParams = params['bsveSearchParams']
        analysis = findAnalysisByName(currentUser, 'bsve search')
        # TODO in case can't find analysis?

        try:
            bsveSearchParams = json.loads(bsveSearchParams)
        except ValueError:
            raise RestException('bsveSearchParams is invalid JSON.')

        datasetFolder = findDatasetFolder(currentUser, currentUser)
        # TODO
        # try findOne earlier
        # all throughout utility
        # create a new dataset in the dataset folder with this name
        # TODO in case of duplicates?
        dataset = (self.model('item').createItem(datasetName, currentUser,
                                                 datasetFolder,
                                                 'created by bsve search'))

        params = {
            'bsveSearchParams': bsveSearchParams
        }

        # create a local job with bsve search
        # tie in the dataset id with the local job
        # TODO would we rather create the dataset at the end of the bsve search?
        # TODO change token to job token
        user, token = self.getCurrentUser(returnToken=True)
        kwargs = {
            'params': params,
            'user': currentUser,
            'dataset': dataset,
            'analysis': analysis,
            'token': token
        }

        job = self.model('job', 'jobs').createLocalJob(
            title='bsve search: %s' % datasetName,
            user=currentUser,
            type='bsve.search',
            public=False,
            kwargs=kwargs,
            module='girder.plugins.minerva.jobs.bsve_search_worker',
            async=True)

        if 'meta' in dataset:
            metadata = dataset['meta']
        else:
            metadata = {}

        minerva_metadata = {
            'dataset_id': dataset['_id'],
            'source': 'bsve_search',
            'bsve_search_params': bsveSearchParams,
            'original_type': 'json'
        }
        metadata['minerva'] = minerva_metadata
        self.model('item').setMetadata(dataset, metadata)

        self.model('job', 'jobs').scheduleJob(job)

        return minerva_metadata
Beispiel #28
0
    def createPostgresGeojsonDataset(self, assetstore, params):
        filter = params['filter']
        table = params['table']
        field = params['field']
        aggregateFunction = params['aggregateFunction']
        geometryField = json.loads(params['geometryField'])

        if geometryField['type'] == 'built-in':
            buildInGeomField = geometryField['field']
            properties = [
                field, {
                    'func': aggregateFunction,
                    'param': {
                        'field': field
                    }
                }
            ]
            # add string fields with concat aggregate function and in the format for
            # json_build_object
            for i in self._getColumns(assetstore, {'table': params['table']}):
                if i['datatype'] == 'string' and i['name'] != field:
                    properties.extend((i['name'], {
                        'func':
                        'string_agg',
                        'param': [{
                            'func': 'distinct',
                            'param': {
                                'field': i['name']
                            }
                        }, '|'],
                        'reference':
                        i['name']
                    }))
            fields = [{
                'func':
                'json_build_object',
                'param': [
                    'type', 'Feature', 'geometry', {
                        'func':
                        'cast',
                        'param': [{
                            'func':
                            'st_asgeojson',
                            'param': [{
                                'func':
                                'st_transform',
                                'param': [{
                                    'field': buildInGeomField
                                }, 4326]
                            }]
                        }, 'JSON']
                    }, 'properties', {
                        'func': 'json_build_object',
                        'param': properties
                    }
                ]
            }]
            group = [buildInGeomField]
        elif geometryField['type'] == 'link':
            fields = [{
                'func': aggregateFunction,
                'param': {
                    'field': field
                },
                'reference': field
            }]
            group = [x['value'] for x in geometryField['links']]
            # add string fields with concat aggregate function
            for i in self._getColumns(assetstore, {'table': params['table']}):
                if i['datatype'] in ('string', 'number',
                                     'date') and i['name'] != field:
                    if i['datatype'] == 'string':
                        fields.append({
                            'func':
                            'string_agg',
                            'param': [{
                                'func': 'distinct',
                                'param': {
                                    'field': i['name']
                                }
                            }, '|'],
                            'reference':
                            i['name']
                        })

        datasetName = params['datasetName']
        # TODO: schema should be read from the listed table, not set explicitly
        schema = 'public'
        hash = hashlib.md5(filter).hexdigest()
        if datasetName:
            output_name = datasetName
        else:
            output_name = '{0}.{1}.{2}.geojson'.format(table, field, hash[-6:])
        currentUser = self.getCurrentUser()
        datasetFolder = findDatasetFolder(currentUser, currentUser)
        adapter = assetstore_utilities.getAssetstoreAdapter(assetstore)
        # Create the item
        dbParams = self._getQueryParams(
            schema, table, fields, group, filter,
            'GeoJSON' if geometryField['type'] == 'built-in' else 'json')
        dbParams['tables'][0]['name'] = output_name
        result = adapter.importData(datasetFolder, 'folder', dbParams,
                                    progress.noProgress, currentUser)
        resItem = result[0]['item']
        GeojsonDataset().createGeojsonDataset(itemId=resItem['_id'],
                                              postgresGeojson={
                                                  'geometryField':
                                                  geometryField,
                                                  'field':
                                                  field,
                                                  'aggregateFunction':
                                                  aggregateFunction
                                              },
                                              params={})
        return resItem['_id']
Beispiel #29
0
 def createDatasetFolder(self, user, params):
     folder = findDatasetFolder(self.getCurrentUser(), user, create=True)
     return {'folder': folder}
Beispiel #30
0
    def createPostgresGeojsonDataset(self, assetstore, params):
        filter = params['filter']
        table = params['table']
        field = params['field']
        aggregateFunction = params['aggregateFunction']
        geometryField = json.loads(params['geometryField'])

        if geometryField['type'] == 'built-in':
            buildInGeomField = geometryField['field']
            properties = [field, {
                'func': aggregateFunction,
                'param': {'field': field}
            }]
            # add string fields with concat aggregate function and in the format for
            # json_build_object
            for i in self._getColumns(assetstore, {'table': params['table']}):
                if i['datatype'] == 'string' and i['name'] != field:
                    properties.extend((i['name'], {
                        'func': 'string_agg',
                        'param': [{
                            'func': 'distinct',
                            'param': {'field': i['name']}
                        }, '|'],
                        'reference': i['name']
                    }))
            fields = [{
                'func': 'json_build_object', 'param': [
                    'type', 'Feature',
                    'geometry', {
                        'func': 'cast', 'param': [{
                            'func': 'st_asgeojson', 'param': [{
                                'func': 'st_transform', 'param': [{'field': buildInGeomField}, 4326]
                            }]}, 'JSON']
                    },
                    'properties', {
                        'func': 'json_build_object', 'param': properties
                    }
                ]
            }]
            group = [buildInGeomField]
        elif geometryField['type'] == 'link':
            fields = [{
                'func': aggregateFunction,
                'param': {'field': field},
                'reference': field
            }]
            group = [x['value'] for x in geometryField['links']]
            # add string fields with concat aggregate function
            for i in self._getColumns(assetstore, {'table': params['table']}):
                if i['datatype'] in ('string', 'number', 'date') and i['name'] != field:
                    if i['datatype'] == 'string':
                        fields.append({
                            'func': 'string_agg',
                            'param': [{
                                'func': 'distinct',
                                'param': {'field': i['name']}
                            }, '|'],
                            'reference': i['name']
                        })

        datasetName = params['datasetName']
        # TODO: schema should be read from the listed table, not set explicitly
        schema = 'public'
        hash = hashlib.md5(filter).hexdigest()
        if datasetName:
            output_name = datasetName
        else:
            output_name = '{0}.{1}.{2}.geojson'.format(
                table, field, hash[-6:])
        currentUser = self.getCurrentUser()
        datasetFolder = findDatasetFolder(currentUser, currentUser)
        adapter = assetstore_utilities.getAssetstoreAdapter(assetstore)
        # Create the item
        dbParams = self._getQueryParams(
            schema, table, fields, group, filter,
            'GeoJSON' if geometryField['type'] == 'built-in' else 'json')
        dbParams['tables'][0]['name'] = output_name
        result = adapter.importData(datasetFolder, 'folder', dbParams,
                                    progress.noProgress, currentUser)
        resItem = result[0]['item']
        GeojsonDataset().createGeojsonDataset(
            itemId=resItem['_id'],
            postgresGeojson={
                'geometryField': geometryField,
                'field': field,
                'aggregateFunction': aggregateFunction
            }, params={})
        return resItem['_id']