def setIsValide(job, dbsession, logger, testing = False): """ This function sets a georeference process as 'isvalide' and if there is no other newer georeference process for this map its activate this georeference process for this map. :type georeference.models.vkdb.adminjobs.AdminJobs: job :type sqlalchemy.orm.session.Session: dbsession :type logging.Logger: logger :type boolean: testing (Default: False) """ logger.debug('Set georeference process for id %s to isvalide ...'%(job.georefid)) # set georeferenceprocess to isvalide georefProcess = Georeferenzierungsprozess.by_id(job.georefid, dbsession) # check if there is an other process which is newer and more up to date activeGeorefProcess = Georeferenzierungsprozess.getActualGeoreferenceProcessForMapId(georefProcess.mapid, dbsession) mapObj = Map.by_id(georefProcess.mapid, dbsession) if activeGeorefProcess and activeGeorefProcess.id >= georefProcess.id: logger.info('The georeference process with the id %s or younger process is already active for this map object.'%georefProcess.id) pass elif activeGeorefProcess and activeGeorefProcess.id < georefProcess.id: logger.info('Activate the is valide georeference process and deactive old one ...') deactivate(activeGeorefProcess, mapObj, dbsession, logger) activate(georefProcess, mapObj, dbsession, logger) else: logger.info('Activate georeference process %s for the map object %s ...'%(georefProcess.id, georefProcess.mapid)) activate(georefProcess, mapObj, dbsession, logger) if georefProcess.adminvalidation == 'invalide': mapObj.hasgeorefparams = mapObj.hasgeorefparams + 1 georefProcess.adminvalidation = 'isvalide'
def setUp(self): self.process = Georeferenzierungsprozess( mapid = 10002567, nutzerid = TEST_LOGIN, clippolygon = { 'source':'pixel', 'polygon': [[467, 923],[7281, 999],[7224, 7432],[258, 7471],[467, 923]]}, georefparams = { 'source': 'pixel', 'target': 'EPSG:4314', 'gcps': [ {'source': [467, 923], 'target': [10.6666660308838, 51.4000015258789]}, {'source': [7281, 999], 'target': [10.8333339691162, 51.4000015258789]}, {'source': [7224, 7432], 'target': [10.8333339691162, 51.2999992370605]}, {'source': [258, 7471], 'target': [10.6666660308838, 51.2999992370605]} ], "algorithm": "affine", }, timestamp = "2014-08-09 12:20:26", type = 'new', algorithm = 'affine', isactive = True, processed = False, overwrites = 0, adminvalidation = '') self.map = Map(id = 10002567, apsobjectid=90015724, apsdateiname = "df_dk_0010001_4630_1928", boundingbox = "POLYGON((10.6666660308838 51.2999992370605,10.6666660308838 51.4000015258789,10.8333339691162 51.4000015258789,10.8333339691162 51.2999992370605,10.6666660308838 51.2999992370605))", originalimage = os.path.join(TEST_DATA_DIR, "df_dk_0010001_4630_1928.tif"))
def runningNewJobs(dbsession, logger): """ Runs the persistent georeference job for new georeference jobs :type sqlalchemy.orm.session.Session: dbsession :type logging.Logger: logger :return: int Number of processed jobs """ logger.info('Check for unprocessed new georeference jobs ...') unprocessedJobs = Georeferenzierungsprozess.getUnprocessedObjectsOfTypeNew( dbsession) counter = 0 for job in unprocessedJobs: logger.info( 'Start processing of a "new" georeference process with id - %s' % job.id) georefObj = Georeferenzierungsprozess.clearRaceConditions( job, dbsession) mapObj = Map.by_id(georefObj.mapid, dbsession) activate(georefObj, mapObj, dbsession, logger) logger.info( 'Finish processing of a "new" georeference process with id - %s' % job.id) counter += 1 return counter
def runningUpdateJobs(dbsession, logger): """ Runs the persistent georeference job for update georeference jobs :type sqlalchemy.orm.session.Session: dbsession :type logging.Logger: logger :return: int Number of processed jobs """ logger.info('Check for unprocessed update georeference jobs ...') unprocessedJobs = Georeferenzierungsprozess.getUnprocessedObjectsOfTypeUpdate(dbsession) counter = 0 for job in unprocessedJobs: logger.info('Start processing of a "update" georeference process with id - %s'%job.id) georefObj = Georeferenzierungsprozess.clearRaceConditions(job, dbsession) # get active georeference process and deactive him, if exist activeGeorefProcess = Georeferenzierungsprozess.getActualGeoreferenceProcessForMapId(georefObj.mapid, dbsession) mapObj = Map.by_id(georefObj.mapid, dbsession) if activeGeorefProcess: logger.info('Deactivate georeference processes with id %s ...'%activeGeorefProcess.id) deactivate(activeGeorefProcess, mapObj, dbsession, logger) logger.info('Activate georeference processes with id %s ...'%georefObj.id) activate(georefObj, mapObj, dbsession, logger) logger.info('Finish processing of a "update" georeference process with id - %s'%job.id) counter += 1 return counter
def generateGeoreferenceProcessForSpecificGeoreferenceId(georeferenceId, request, log): """ Function generates a process process for a given map object id :type georeferenceId: int :type request: pyramid.request :type log: logging.Logger :return: dict """ georefProcessObj = Georeferenzierungsprozess.by_id(georeferenceId, request.db) mapObj = Map.by_id(georefProcessObj.mapid, request.db) # get general metadata log.debug('Get general process process information ...') generalMetadata = getGeneralMetadata(mapObj, request) # check if there exist already a activate process process for # this mapObj log.debug('Get specific process process information ...') georeferenceData = getSpecificGeoreferenceData(georefProcessObj, mapObj, 4326, request.db) log.debug('Check if there are pending processes in the database') warnMsg = {} if checkIfPendingProcessesExist(mapObj, request): warnMsg["warn"] = 'Right now another users is working on the georeferencing of this map sheet. For preventing information losses please try again in 15 minutes.' # now merge dictionaries and create response response = {} response.update(generalMetadata) response.update(georeferenceData) response.update(warnMsg) return response
def updateMTBLayer(dbsession, logger, withOverviews=False): """ Functions update the vrt databasis for a mtb layer :type sqlalchemy.orm.session.Session: dbsession :type logging.Logger: logger :type boolean: withOverviews (Default: False) Be careful with this parameter, because of much more workload todo. :return: str """ logger.info('Update aggregated layer for MTBs ...') logger.info('Update database reference of the aggregated layer ...') mapObjs = Map.all(dbsession) for mapObj in mapObjs: if str(mapObj.maptype).lower() == 'mtb': if mapObj.isttransformiert: pushMapObjToWmsDatabaseIndex( mapObj, AGGREGATED_LAYERID[str(mapObj.maptype).lower()], dbsession) else: removeMapObjFromWmsDatabaseIndex( mapObj, AGGREGATED_LAYERID[str(mapObj.maptype).lower()], dbsession) logger.info('Update vrts for this aggregated layer ... ') for value in range(1868, 1946): updateVirtualdatasetForTimestamp( '%s-01-01 00:00:00' % value, os.path.join(GEOREFERENCE_PERSITENT_VRT, 'mtb'), TMP_DIR, DBCONFIG_PARAMS, dbsession, logger) if withOverviews: logger.info('Calculate overviews for vrts ...')
def setUp(self): # create and insert test data to database self.testData = [ Map( id = 10000023, apsobjectid=90015724, apsdateiname = "df_dk_0010001_4630_1928", boundingbox = "POLYGON((16.9999980926514 51.7999992370605,16.9999980926514 51.9000015258789,17.1666679382324 51.9000015258789,17.1666679382324 51.7999992370605,16.9999980926514 51.7999992370605))", maptype="M", originalimage = os.path.join(self.testDataDir, "df_dk_0010001_4630_1928.tif"), georefimage = os.path.join(self.testDataDir, "df_dk_0010001_4630_1928.tif"), isttransformiert = True, recommendedsrid = 4314 ), Metadata( mapid = 10000023, imagezoomify = 'http://fotothek.slub-dresden.de/zooms/df/dk/0010000/df_dk_0010001_3352_1918/ImageProperties.xml', title = '', titleshort = '', scale = '1:25000', timepublish = datetime.now(), imagejpg = 'http://fotothek.slub-dresden.de/fotos/df/dk/0010000/df_dk_0010001_2655.jpg', thumbssmall = 'http://fotothek.slub-dresden.de/thumbs/df/dk/0010000/df_dk_0010001_6817.jpg', description = 'Ars an der Mosel. - Aufn. 1880, hrsg. 1882, Aufldr. 1916. - 1:25000. - [Berlin]: Kgl. Preuss. Landesaufnahme, 1916. - 1 Kt.', technic = 'Lithografie & Umdruck', type = 'Druckgraphik' ) ] self.georefProcess = Georeferenzierungsprozess( mapid = 10000023, messtischblattid = 90015724, nutzerid = self.user, clipparameter = {'source': 'pixel', 'target': 'EPSG:4314', 'gcps': [\ {'source': [467, 923], 'target': [10.6666660308838, 51.4000015258789]}, \ {'source': [7281, 999], 'target': [10.8333339691162, 51.4000015258789]}, \ {'source': [7224, 7432], 'target': [10.8333339691162, 51.2999992370605]},\ {'source': [258, 7471], 'target': [10.6666660308838, 51.2999992370605]}]}, georefparams = {'source': 'pixel', 'target': 'EPSG:4314', 'gcps': [\ {'source': [467, 923], 'target': [10.6666660308838, 51.4000015258789]}, \ {'source': [7281, 999], 'target': [10.8333339691162, 51.4000015258789]}, \ {'source': [7224, 7432], 'target': [10.8333339691162, 51.2999992370605]},\ {'source': [258, 7471], 'target': [10.6666660308838, 51.2999992370605]}]}, clippolygon = {'source': 'pixel', 'polygon': [[7813, 7517], [1652, 7523], [1677, 1666], [7830, 1661], [7813, 7517]]}, timestamp = "2014-08-09 12:20:26", type = 'update', isactive = False, processed = False, overwrites = 0, adminvalidation = '' ) try: for obj in self.testData: self.dbsession.add(obj) self.dbsession.flush() except Exception: raise
def setUpClass(cls): BaseTestCase.setUpClass() # create and insert test data to database cls.testData = { 'mapObj': Map(id = 10000023, apsobjectid = 90015724, apsdateiname = "df_dk_0010001_3352_1918", originalimage = '', georefimage = '', istaktiv = False, isttransformiert = False, maptype = 'M', hasgeorefparams = 0) }
def testGetDataRecord(self): mapObj = Map.by_id(10000023, self.dbsession) response = createSearchRecord(mapObj, self.dbsession, self.logger, self.georefProcess) print '=====================' print 'Test if testGetDataRecord ...' print 'Response: %s'%response print '=====================' self.assertTrue('oai:de:slub-dresden:vk:id-10000023' in response, 'Missing key in response') self.assertEqual(response['oai:de:slub-dresden:vk:id-10000023']['dataid'], 'df_dk_0010001_4630_1928', 'Dataid has not expected value')
def setUp(self): self.config = testing.setUp() self.config.registry.dbmaker = self.Session # create and insert test data to database self.testData = [ Map(id=10000023, apsobjectid=90015724, apsdateiname="df_dk_0010001_3352_1890", recommendedsrid=4314, boundingbox= "POLYGON((16.9999980926514 51.7999992370605,16.9999980926514 51.9000015258789,17.1666679382324 51.9000015258789,17.1666679382324 51.7999992370605,16.9999980926514 51.7999992370605))", maptype="M"), Map(id=10000024, apsobjectid=90015725, apsdateiname="df_dk_0010001_3352_18901", maptype="GL"), Metadata( mapid=10000023, imagezoomify= 'http://fotothek.slub-dresden.de/zooms/df/dk/0010000/df_dk_0010001_3352_1918/ImageProperties.xml', title='', titleshort=''), Metadata( mapid=10000024, imagezoomify= 'http://fotothek.slub-dresden.de/zooms/df/dk/0010000/df_dk_0010001_3352_1918/ImageProperties.xml', title='', titleshort='') ] try: for obj in self.testData: self.dbsession.add(obj) self.dbsession.flush() except Exception: raise
def testSetBoundingBox(self): mapObj = Map(id=1, georefimage=self.file) # check if response is correct dataset = gdal.Open(self.file, GA_ReadOnly) bounds = getBoundsFromDataset(dataset) polygon = "POLYGON((%(lx)s, %(ly)s, %(lx)s, %(uy)s, %(ux)s, %(uy)s, %(ux)s, %(ly)s, %(lx)s, %(ly)s))" % { "lx": bounds[0], "ly": bounds[1], "ux": bounds[2], "uy": bounds[3] } print polygon print self.file print getBoundsFromDataset(dataset)
def setInValide(job, dbsession, logger, testing=False): """ This function sets a georeference process as 'isvalide' and if there is no other newer georeference process for this map its activate this georeference process for this map. :type georeference.models.vkdb.adminjobs.AdminJobs: job :type sqlalchemy.orm.session.Session: dbsession :type logging.Logger: logger :type boolean: testing (Default: False) """ logger.debug('Set georeference process for id %s to isvalide ...' % (job.georefid)) # set georeferenceprocess to isvalide georefProcess = Georeferenzierungsprozess.by_id(job.georefid, dbsession) # update map object and datasource mapObj = Map.by_id(georefProcess.mapid, dbsession) if georefProcess.isactive == True and georefProcess.overwrites > 0: logger.info( 'Deactive georeference process and activate georeference process with id %s ...' % georefProcess.overwrites) # deactive the georeference process deactivate(georefProcess, mapObj, dbsession, logger) # look if there is a valid overwrite id and if yes activate the matching # process newGeorefProcess = getLastValidGeoreferenceProcess( georefProcess.overwrites, dbsession, logger) if newGeorefProcess: activate(newGeorefProcess, mapObj, dbsession, logger) elif georefProcess.isactive == True and georefProcess.overwrites == 0: logger.info('Deactive georeference process %s ...' % georefProcess.overwrites) # deactive the georeference process deactivate(georefProcess, mapObj, dbsession, logger) logger.debug('Set georeference process with id %s to inactive ...' % georefProcess.overwrites) if georefProcess.adminvalidation != 'invalide': georefProcess.adminvalidation = 'invalide' mapObj.hasgeorefparams = 0 if mapObj.hasgeorefparams - 1 < 0 else mapObj.hasgeorefparams - 1
def testPushRecordToEs(self): mapObj = Map.by_id(10000023, self.dbsession) datarecord = createSearchRecord(mapObj, self.dbsession, self.logger) key = pushRecordToEs(datarecord, ELASTICSEARCH_INDEX, self.logger) print '=====================' print 'Test if testPushRecordToEs ...' print 'Response: %s'%key print '=====================' # check if the record was insert correctly response = getRecordFromEsById(key, ELASTICSEARCH_INDEX) self.assertEqual(response['found'], True, 'Could not find expected record') self.assertEqual(response['_id'], key, 'Key is not like expected') # clear up response = deleteRecordFromEsById(key, ELASTICSEARCH_INDEX) if response['found'] is not True: raise Exception("Problems while trying to clean up elasticsearch test index")
def parseMapObjForId(requestParams, name, dbsession): """ This functions parses a map objectid from an objectid :param requestParams: dict :param name: str :param dbsession: sqlalchemy.orm.session.Session :return: process.models.vkdb.map.Map :raise: process.utils.exceptions.ParameterException """ if name in requestParams: validateId(requestParams[name]) # @deprecated # do mapping for support of new name schema mapObj = Map.by_id(int(requestParams[name]), dbsession) if mapObj is None: raise ParameterException('Missing or wrong objectid parameter.') else: return mapObj raise ParameterException('Missing or wrong objectid parameter.')
def runningNewJobs(dbsession, logger): """ Runs the persistent georeference job for new georeference jobs :type sqlalchemy.orm.session.Session: dbsession :type logging.Logger: logger :return: int Number of processed jobs """ logger.info('Check for unprocessed new georeference jobs ...') unprocessedJobs = Georeferenzierungsprozess.getUnprocessedObjectsOfTypeNew(dbsession) counter = 0 for job in unprocessedJobs: logger.info('Start processing of a "new" georeference process with id - %s'%job.id) georefObj = Georeferenzierungsprozess.clearRaceConditions(job, dbsession) mapObj = Map.by_id(georefObj.mapid, dbsession) activate(georefObj, mapObj, dbsession, logger) logger.info('Finish processing of a "new" georeference process with id - %s'%job.id) counter += 1 return counter
def testPushRecordToEs(self): mapObj = Map.by_id(10000023, self.dbsession) datarecord = createSearchRecord(mapObj, self.dbsession, self.logger) key = pushRecordToEs(datarecord, ELASTICSEARCH_INDEX, self.logger) print '=====================' print 'Test if testPushRecordToEs ...' print 'Response: %s' % key print '=====================' # check if the record was insert correctly response = getRecordFromEsById(key, ELASTICSEARCH_INDEX) self.assertEqual(response['found'], True, 'Could not find expected record') self.assertEqual(response['_id'], key, 'Key is not like expected') # clear up response = deleteRecordFromEsById(key, ELASTICSEARCH_INDEX) if response['found'] is not True: raise Exception( "Problems while trying to clean up elasticsearch test index")
def setIsValide(job, dbsession, logger, testing=False): """ This function sets a georeference process as 'isvalide' and if there is no other newer georeference process for this map its activate this georeference process for this map. :type georeference.models.vkdb.adminjobs.AdminJobs: job :type sqlalchemy.orm.session.Session: dbsession :type logging.Logger: logger :type boolean: testing (Default: False) """ logger.debug('Set georeference process for id %s to isvalide ...' % (job.georefid)) # set georeferenceprocess to isvalide georefProcess = Georeferenzierungsprozess.by_id(job.georefid, dbsession) # check if there is an other process which is newer and more up to date activeGeorefProcess = Georeferenzierungsprozess.getActualGeoreferenceProcessForMapId( georefProcess.mapid, dbsession) mapObj = Map.by_id(georefProcess.mapid, dbsession) if activeGeorefProcess and activeGeorefProcess.id >= georefProcess.id: logger.info( 'The georeference process with the id %s or younger process is already active for this map object.' % georefProcess.id) pass elif activeGeorefProcess and activeGeorefProcess.id < georefProcess.id: logger.info( 'Activate the is valide georeference process and deactive old one ...' ) deactivate(activeGeorefProcess, mapObj, dbsession, logger) activate(georefProcess, mapObj, dbsession, logger) else: logger.info( 'Activate georeference process %s for the map object %s ...' % (georefProcess.id, georefProcess.mapid)) activate(georefProcess, mapObj, dbsession, logger) if georefProcess.adminvalidation == 'invalide': mapObj.hasgeorefparams = mapObj.hasgeorefparams + 1 georefProcess.adminvalidation = 'isvalide'
def setInValide(job, dbsession, logger, testing = False): """ This function sets a georeference process as 'isvalide' and if there is no other newer georeference process for this map its activate this georeference process for this map. :type georeference.models.vkdb.adminjobs.AdminJobs: job :type sqlalchemy.orm.session.Session: dbsession :type logging.Logger: logger :type boolean: testing (Default: False) """ logger.debug('Set georeference process for id %s to isvalide ...'%(job.georefid)) # set georeferenceprocess to isvalide georefProcess = Georeferenzierungsprozess.by_id(job.georefid, dbsession) # update map object and datasource mapObj = Map.by_id(georefProcess.mapid, dbsession) if georefProcess.isactive == True and georefProcess.overwrites > 0: logger.info('Deactive georeference process and activate georeference process with id %s ...'%georefProcess.overwrites) # deactive the georeference process deactivate(georefProcess, mapObj, dbsession, logger) # look if there is a valid overwrite id and if yes activate the matching # process newGeorefProcess = getLastValidGeoreferenceProcess(georefProcess.overwrites, dbsession, logger) if newGeorefProcess: activate(newGeorefProcess, mapObj, dbsession, logger) elif georefProcess.isactive == True and georefProcess.overwrites == 0: logger.info('Deactive georeference process %s ...'%georefProcess.overwrites) # deactive the georeference process deactivate(georefProcess, mapObj, dbsession, logger) logger.debug('Set georeference process with id %s to inactive ...'%georefProcess.overwrites) if georefProcess.adminvalidation != 'invalide': georefProcess.adminvalidation = 'invalide' mapObj.hasgeorefparams = 0 if mapObj.hasgeorefparams - 1 < 0 else mapObj.hasgeorefparams - 1
def runningUpdateJobs(dbsession, logger): """ Runs the persistent georeference job for update georeference jobs :type sqlalchemy.orm.session.Session: dbsession :type logging.Logger: logger :return: int Number of processed jobs """ logger.info('Check for unprocessed update georeference jobs ...') unprocessedJobs = Georeferenzierungsprozess.getUnprocessedObjectsOfTypeUpdate( dbsession) counter = 0 for job in unprocessedJobs: logger.info( 'Start processing of a "update" georeference process with id - %s' % job.id) georefObj = Georeferenzierungsprozess.clearRaceConditions( job, dbsession) # get active georeference process and deactive him, if exist activeGeorefProcess = Georeferenzierungsprozess.getActualGeoreferenceProcessForMapId( georefObj.mapid, dbsession) mapObj = Map.by_id(georefObj.mapid, dbsession) if activeGeorefProcess: logger.info('Deactivate georeference processes with id %s ...' % activeGeorefProcess.id) deactivate(activeGeorefProcess, mapObj, dbsession, logger) logger.info('Activate georeference processes with id %s ...' % georefObj.id) activate(georefObj, mapObj, dbsession, logger) logger.info( 'Finish processing of a "update" georeference process with id - %s' % job.id) counter += 1 return counter
def setUp(self): # create and insert test data to database self.testData = [ Map(id=10000023, apsobjectid=90015724, apsdateiname="df_dk_0010001_4630_1928", boundingbox= "POLYGON((16.9999980926514 51.7999992370605,16.9999980926514 51.9000015258789,17.1666679382324 51.9000015258789,17.1666679382324 51.7999992370605,16.9999980926514 51.7999992370605))", maptype="M", originalimage=os.path.join(self.testDataDir, "df_dk_0010001_4630_1928.tif"), georefimage=os.path.join(self.testDataDir, "df_dk_0010001_4630_1928.tif"), isttransformiert=True), Metadata( mapid=10000023, imagezoomify= 'http://fotothek.slub-dresden.de/zooms/df/dk/0010000/df_dk_0010001_3352_1918/ImageProperties.xml', title='', titleshort='', scale='1:25000', timepublish=datetime.now(), imagejpg= 'http://fotothek.slub-dresden.de/fotos/df/dk/0010000/df_dk_0010001_2655.jpg', thumbssmall= 'http://fotothek.slub-dresden.de/thumbs/df/dk/0010000/df_dk_0010001_6817.jpg', description= 'Ars an der Mosel. - Aufn. 1880, hrsg. 1882, Aufldr. 1916. - 1:25000. - [Berlin]: Kgl. Preuss. Landesaufnahme, 1916. - 1 Kt.', technic='Lithografie & Umdruck', type='Druckgraphik') ] try: for obj in self.testData: self.dbsession.add(obj) self.dbsession.flush() except Exception: raise
def updateMTBLayer(dbsession, logger, withOverviews=False): """ Functions update the vrt databasis for a mtb layer :type sqlalchemy.orm.session.Session: dbsession :type logging.Logger: logger :type boolean: withOverviews (Default: False) Be careful with this parameter, because of much more workload todo. :return: str """ logger.info('Update aggregated layer for MTBs ...') logger.info('Update database reference of the aggregated layer ...') mapObjs = Map.all(dbsession) for mapObj in mapObjs: if str(mapObj.maptype).lower() == 'mtb': if mapObj.isttransformiert: pushMapObjToWmsDatabaseIndex(mapObj, AGGREGATED_LAYERID[str(mapObj.maptype).lower()], dbsession) else: removeMapObjFromWmsDatabaseIndex(mapObj, AGGREGATED_LAYERID[str(mapObj.maptype).lower()], dbsession) logger.info('Update vrts for this aggregated layer ... ') for value in range(1868, 1946): updateVirtualdatasetForTimestamp('%s-01-01 00:00:00'%value, os.path.join(GEOREFERENCE_PERSITENT_VRT, 'mtb'), TMP_DIR, DBCONFIG_PARAMS, dbsession, logger) if withOverviews: logger.info('Calculate overviews for vrts ...')
def generateGeoreferenceProcessForMapObj(mapObjId, request, log): """ Function generates a process process for a given map object id :type mapObjId: int :type request: pyramid.request :type log: logging.Logger :return: dict """ def getMtbGLSpecificGeoreferenceInformation(mapObj, request): """ Query the specific process base data for a messtischblatt/geological map :type mapObj: georeference.models.vkdb.map.Map :type request: pyramid.request :return: dict """ srid = mapObj.getSRID(request.db) extent = mapObj.getExtent(request.db, srid) return { 'georeference': { 'source':'pixel', 'target':'EPSG:%s'%srid, 'gcps': [ {"source":[], "target":[extent[0],extent[1]]}, {"source":[], "target":[extent[0],extent[3]]}, {"source":[], "target":[extent[2],extent[1]]}, {"source":[], "target":[extent[2],extent[3]]} ], 'algorithm': 'affine' }, 'extent':extent } mapObj = Map.by_id(mapObjId, request.db) # get general metadata log.debug('Get general process process information ...') generalMetadata = getGeneralMetadata(mapObj, request) # check if there exist already a activate process process for # this mapObj log.debug('Get specific process process information ...') if Georeferenzierungsprozess.isGeoreferenced(mapObj.id, request.db): # there does exist a process process for this mapObj georefProcessObj = Georeferenzierungsprozess.getActualGeoreferenceProcessForMapId(mapObj.id, request.db) georeferenceData = getSpecificGeoreferenceData(georefProcessObj, mapObj, 4326, request.db) else: # there does not exist a process process for this mapObj georeferenceData = { "timestamp": "", "type": "new" } # This blog could be used for attaching information regarding the boundingbox and the extent to the # response. In case no georeference information is registered yet this could be used by the client for # doing a matching of corner point coordinates and the extent corners. # # log.debug('Check if there is special behavior needed in case of messtischblatt') # mtbGeorefBaseData = {} # if mapObj.maptype == 'M' and 'georeference' not in georeferenceData and mapObj.boundingbox is not None: # mtbGeorefBaseData = getMtbGLSpecificGeoreferenceInformation(mapObj, request) # response.update(mtbGeorefBaseData) log.debug('Check if there are pending processes in the database') warnMsg = {} if checkIfPendingProcessesExist(mapObj, request): warnMsg["warn"] = 'Right now another users is working on the georeferencing of this map sheet. For preventing information losses please try again in 15 minutes.' # now merge dictionaries and create response response = { "recommendedsrid": mapObj.recommendedsrid } response.update(generalMetadata) response.update(georeferenceData) response.update(warnMsg) return response
def setUp(self): self.config = testing.setUp() self.config.registry.dbmaker = self.Session # create dummy georefprocess self.notReferencedObjId = 10000023 self.dummyProcess = Georeferenzierungsprozess( mapid=10000023, messtischblattid=90015724, nutzerid=self.user, clipparameter="{'Test':'Test'}", georefparams="{'Test':'Test'}", timestamp="2014-08-09 12:20:26", type='new', isactive=True, processed=False, overwrites=0, adminvalidation='') self.dummyProcessUpdate = Georeferenzierungsprozess( mapid=10000023, messtischblattid=90015724, nutzerid=self.user, clipparameter= "{'new': {'source': 'pixel', 'target': 'EPSG:4314', 'gcps': [\ {'source': [467, 923], 'target': [10.6666660308838, 51.4000015258789]}, \ {'source': [7281, 999], 'target': [10.8333339691162, 51.4000015258789]}, \ {'source': [7224, 7432], 'target': [10.8333339691162, 51.2999992370605]},\ {'source': [258, 7471], 'target': [10.6666660308838, 51.2999992370605]}]},\ 'remove':{'source': 'pixel', 'target': 'EPSG:4314', 'gcps':[]}}", georefparams= "{'new': {'source': 'pixel', 'target': 'EPSG:4314', 'gcps': [\ {'source': [467, 923], 'target': [10.6666660308838, 51.4000015258789]}, \ {'source': [7281, 999], 'target': [10.8333339691162, 51.4000015258789]}, \ {'source': [7224, 7432], 'target': [10.8333339691162, 51.2999992370605]},\ {'source': [258, 7471], 'target': [10.6666660308838, 51.2999992370605]}]},\ 'remove':{'source': 'pixel', 'target': 'EPSG:4314', 'gcps':[]}}", timestamp="2014-08-09 12:20:26", type='update', isactive=False, processed=False, overwrites=0, adminvalidation='') # create and insert test data to database self.testData = [ Map(id=10000023, apsobjectid=90015724, apsdateiname="df_dk_0010001_3352_1890", boundingbox= "POLYGON((16.9999980926514 51.7999992370605,16.9999980926514 51.9000015258789,17.1666679382324 51.9000015258789,17.1666679382324 51.7999992370605,16.9999980926514 51.7999992370605))", maptype="M"), Map(id=10000024, apsobjectid=90015725, apsdateiname="df_dk_0010001_3352_18901", maptype="GL"), Metadata( mapid=10000023, imagezoomify= 'http://fotothek.slub-dresden.de/zooms/df/dk/0010000/df_dk_0010001_3352_1918/ImageProperties.xml', title='', titleshort=''), Metadata( mapid=10000024, imagezoomify= 'http://fotothek.slub-dresden.de/zooms/df/dk/0010000/df_dk_0010001_3352_1918/ImageProperties.xml', title='', titleshort='') ] try: for obj in self.testData: self.dbsession.add(obj) self.dbsession.flush() except Exception: raise
Copyright (c) 2015 Jacob Mendt Created on 02.10.15 @author: mendt @description: The following scripts pushs all database records to elasticsearch ''' import logging from georeference.settings import DBCONFIG_PARAMS from georeference.models.meta import getPostgresEngineString from georeference.models.meta import initializeDb from georeference.models.vkdb.georeferenzierungsprozess import Georeferenzierungsprozess from georeference.models.vkdb.map import Map from georeference.persistent.jobs.genericjobs import pushRecordToSearchIndex from georeference.persistent.jobs.genericjobs import removeRecordFromSearchIndex if __name__ == '__main__': logging.basicConfig() logger = logging.getLogger('Push recrords to ES') dbsession = initializeDb(getPostgresEngineString(DBCONFIG_PARAMS), False) maps = Map.all(dbsession) for mapObj in maps: if mapObj.istaktiv == True: print 'Push map record %s to elastic search ...' % mapObj.id georefObj = Georeferenzierungsprozess.getActualGeoreferenceProcessForMapId(mapObj.id, dbsession) pushRecordToSearchIndex(mapObj, dbsession, logger, georefObj) else: print 'Remove map record %s from elastic search ...' % mapObj.id removeRecordFromSearchIndex(mapObj)
@author: mendt ''' import os import shutil from georeference.settings import DBCONFIG_PARAMS from georeference.models.meta import getPostgresEngineString from georeference.models.meta import initializeDb from georeference.models.vkdb.map import Map DATA_DIRECTORY_ORGINAL = '/srv/vk/data/original' DATA_DIRECTORY_GEOREF = '/srv/vk/data/georef' if __name__ == '__main__': dbsession = initializeDb(getPostgresEngineString(DBCONFIG_PARAMS)) mapObjs = Map.all(dbsession) imageDoesNotExist = [] oldPaths = [] for mapObj in mapObjs: print 'Update data dirs for %s ...' % mapObj.originalimage # update orginal paths newPath = os.path.join(DATA_DIRECTORY_ORGINAL, os.path.join(str(mapObj.maptype).lower(), str(mapObj.originalimage).split('/')[-1])) if not os.path.exists(newPath): # check if path exist in old folder and if yes move the file oldPath = os.path.join(DATA_DIRECTORY_ORGINAL, os.path.join('mtb', str(mapObj.originalimage).split('/')[-1])) if os.path.exists(oldPath): oldPaths.append(oldPath) print 'Move file from %s to %s ...' % (oldPath, newPath) print 'Copy file ...'
def testCreateMapObjWithBBox(self): geometry = "POLYGON((10.8333330154419 50.5,10.8333330154419 50.6000022888184,11.0000009536743 50.6000022888184,11.0000009536743 50.5,10.8333330154419 50.5))" mapObj = Map(id=1, boundingbox=geometry) self.dbsession.add(mapObj) self.dbsession.flush()
def setUp(self): self.process = Georeferenzierungsprozess( mapid=10002567, nutzerid=TEST_LOGIN, clippolygon={ 'source': 'pixel', 'polygon': [[467, 923], [7281, 999], [7224, 7432], [258, 7471], [467, 923]] }, georefparams={ 'source': 'pixel', 'target': 'EPSG:4314', 'gcps': [{ 'source': [467, 923], 'target': [10.6666660308838, 51.4000015258789] }, { 'source': [7281, 999], 'target': [10.8333339691162, 51.4000015258789] }, { 'source': [7224, 7432], 'target': [10.8333339691162, 51.2999992370605] }, { 'source': [258, 7471], 'target': [10.6666660308838, 51.2999992370605] }], "algorithm": "affine", }, timestamp="2014-08-09 12:20:26", type='new', algorithm='affine', isactive=True, processed=False, overwrites=0, adminvalidation='') self.map = Map( id=10002567, apsobjectid=90015724, apsdateiname="df_dk_0010001_4630_1928", boundingbox= "POLYGON((10.6666660308838 51.2999992370605,10.6666660308838 51.4000015258789,10.8333339691162 51.4000015258789,10.8333339691162 51.2999992370605,10.6666660308838 51.2999992370605))", originalimage=os.path.join(TEST_DATA_DIR, "df_dk_0010001_4630_1928.tif")) self.metadata = Metadata( mapid=10002567, imagezoomify= 'http://fotothek.slub-dresden.de/zooms/df/dk/0010000/df_dk_0010001_3352_1918/ImageProperties.xml', title='', titleshort='', scale='1:25000', timepublish=datetime.now(), imagejpg= 'http://fotothek.slub-dresden.de/fotos/df/dk/0010000/df_dk_0010001_2655.jpg', thumbssmall= 'http://fotothek.slub-dresden.de/thumbs/df/dk/0010000/df_dk_0010001_6817.jpg', description= 'Ars an der Mosel. - Aufn. 1880, hrsg. 1882, Aufldr. 1916. - 1:25000. - [Berlin]: Kgl. Preuss. Landesaufnahme, 1916. - 1 Kt.', technic='Lithografie & Umdruck', type='Druckgraphik') try: self.dbsession.add(self.metadata) self.dbsession.add(self.map) self.dbsession.flush() except Exception: raise
Created on 02.10.15 @author: mendt @description: The following scripts pushs all database records to elasticsearch ''' import logging from georeference.settings import DBCONFIG_PARAMS from georeference.models.meta import getPostgresEngineString from georeference.models.meta import initializeDb from georeference.models.vkdb.georeferenzierungsprozess import Georeferenzierungsprozess from georeference.models.vkdb.map import Map from georeference.persistent.jobs.genericjobs import pushRecordToSearchIndex from georeference.persistent.jobs.genericjobs import removeRecordFromSearchIndex if __name__ == '__main__': logging.basicConfig() logger = logging.getLogger('Push recrords to ES') dbsession = initializeDb(getPostgresEngineString(DBCONFIG_PARAMS), False) maps = Map.all(dbsession) for mapObj in maps: if mapObj.istaktiv == True: print 'Push map record %s to elastic search ...' % mapObj.id georefObj = Georeferenzierungsprozess.getActualGeoreferenceProcessForMapId( mapObj.id, dbsession) pushRecordToSearchIndex(mapObj, dbsession, logger, georefObj) else: print 'Remove map record %s from elastic search ...' % mapObj.id removeRecordFromSearchIndex(mapObj)