def setUpClass(cls): print '==============' print 'Start georeferencing tests ...' print '==============' cls.proj = '+proj=longlat +ellps=bessel +datum=potsdam +no_defs' cls.dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../test-data') cls.file = os.path.join(cls.dir, 'test.tif') cls.file_vrt = os.path.join(cls.dir, 'test.vrt') cls.clip_raster = os.path.join(cls.dir, 'test_georef.tif') cls.clip_shp = os.path.join(cls.dir, 'test_shp.shp') cls.logger = createLogger('GeoreferenceTest', logging.DEBUG) cls.gcps = [] cls.gcps.append( gdal.GCP(21.1666679382324, 55.7999992370605, 0, 7057, 7348)) cls.gcps.append( gdal.GCP(21.1666679382324, 55.9000015258789, 0, 7043, 879)) cls.gcps.append( gdal.GCP(20.9999980926514, 55.7999992370605, 0, 985, 7331)) cls.gcps.append( gdal.GCP(20.9999980926514, 55.9000015258789, 0, 969, 869)) cls.boundingbox = [[20.9999980926514, 55.7999992370605], [20.9999980926514, 55.9000015258789], [21.1666679382324, 55.9000015258789], [21.1666679382324, 55.7999992370605], [20.9999980926514, 55.7999992370605]]
def loadLogger(debug=True): """ This function initialize the logger for the application. Arguments: debug {Boolean} """ if debug: log = createLogger('vkviewer', logging.DEBUG) else: log = logging.getLogger(__name__)
def setUpClass(cls): print '==============' print 'Start georeferencing tests ...' print '==============' cls.proj = '+proj=longlat +ellps=bessel +datum=potsdam +no_defs' cls.dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../test-data') cls.file = os.path.join(cls.dir,'test.tif') cls.file_vrt = os.path.join(cls.dir,'test.vrt') cls.clip_raster = os.path.join(cls.dir,'test_georef.tif') cls.clip_shp = os.path.join(cls.dir,'test_shp.shp') cls.logger = createLogger('GeoreferenceTest', logging.DEBUG) cls.gcps = [] cls.gcps.append(gdal.GCP(21.1666679382324, 55.7999992370605, 0, 7057,7348)) cls.gcps.append(gdal.GCP(21.1666679382324, 55.9000015258789, 0, 7043,879)) cls.gcps.append(gdal.GCP(20.9999980926514, 55.7999992370605, 0, 985,7331)) cls.gcps.append(gdal.GCP(20.9999980926514, 55.9000015258789, 0, 969,869)) cls.boundingbox = [[20.9999980926514,55.7999992370605],[20.9999980926514,55.9000015258789], [21.1666679382324,55.9000015258789],[21.1666679382324,55.7999992370605], [20.9999980926514,55.7999992370605]]
for file in directory_content: buildTMSCache(os.path.join(source_dir, file), target_dir, 4314, logger) """ Main """ if __name__ == '__main__': script_name = 'UpdateTMSCache.py' parser = argparse.ArgumentParser( description= 'This scripts create a TMS Cache for all georeferenced maps.', prog='Script %s' % script_name) parser.add_argument('--log_file', help='define a log file') parser.add_argument( '--target_dir', help='Directory where the TMS directories should be placed.') parser.add_argument('--source_dir', help='Source directory') arguments = parser.parse_args() # create logger if arguments.log_file: formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') logger = createLogger('UpdateTMSCache', logging.DEBUG, logFile=''.join(arguments.log_file), formatter=formatter) else: logger = createLogger('UpdateTMSCache', logging.DEBUG) updateTMSCache(arguments.source_dir, arguments.target_dir, logger)
# parse command line parser = argparse.ArgumentParser(description='Parse the key/value pairs from the command line!') parser.add_argument('--mode', type=str, default='testing', help='Run in "production" or "testing" mode. Without mode parameter it run\'s in testing mode.') parser.add_argument('--host', help='host for messtischblattdb') parser.add_argument('--user', help='user for messtischblattdb') parser.add_argument('--password', help='password for messtischblattdb') parser.add_argument('--db', help='db name for messtischblattdb') parser.add_argument('--log_file', help='define a log file') parser.add_argument('--tmp_dir', default='/tmp', help='define directory for temporary files (default: /tmp') parser.add_argument('--vrt_dir', default='/tmp', help='define directory for vrt files (default: /tmp') arguments = parser.parse_args() # create logger if arguments.log_file: formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') sqlalchemy_logger = createLogger('sqlalchemy.engine', logging.DEBUG, logFile=''.join(arguments.log_file), formatter=formatter) logger = createLogger('UpdateVRT', logging.DEBUG, logFile=''.join(arguments.log_file), formatter=formatter) else: sqlalchemy_logger = createLogger('sqlalchemy.engine', logging.WARN) logger = createLogger('UpdateVRT', logging.DEBUG) # parse parameter parameters database_params = {} if arguments.host: database_params['host'] = arguments.host if arguments.user: database_params['user'] = arguments.user if arguments.password: database_params['password'] = arguments.password if arguments.db: database_params['db'] = arguments.db
'--layerid', default=87, help= 'database layer id, which represents the vrt time layer (default: 87)') parser.add_argument( '--cache_dir', default=87, help='directory where the cache files should be placed.') arguments = parser.parse_args() # create logger if arguments.log_file: formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') sqlalchemy_logger = createLogger('sqlalchemy.engine', logging.DEBUG, logFile=''.join(arguments.log_file), formatter=formatter) logger = createLogger('UpdateGeorefDataSources', logging.DEBUG, logFile=''.join(arguments.log_file), formatter=formatter) else: sqlalchemy_logger = createLogger('sqlalchemy.engine', logging.WARN) logger = createLogger('UpdateGeorefDataSources', logging.DEBUG) # parse parameter parameters if arguments.host: PARAMS_DATABASE['host'] = arguments.host if arguments.user: PARAMS_DATABASE['user'] = arguments.user if arguments.password:
def setUpClass(cls): print '==============' print 'Start georeferenceupdate tests ...' print '==============' cls.logger = createLogger(name='GeoreferenceUpdateTest', level=logging.DEBUG)
def setUpClass(cls): print '==============' print 'Start tools tests ...' print '==============' cls.logger = createLogger(name = 'Tools', level = logging.INFO)
runUpdateGeoreferenceProcess(pendingJob, dbsession, logger, testing) logger.debug('Processed %s update georeference process.'%counter) def clearRaceConflicts(overwrite, dbsession): # double check if there are jobs which should be deleted # this clears the case that while there was on job activated in the past # there was still because of concurrency another jobs registered with the same # overwrites id possibleConflictJobs = Georeferenzierungsprozess.getJobsWithSameOverwrites(overwrite, dbsession) alreadyActiveJob = None for conflictJob in possibleConflictJobs: if conflictJob.isactive == True: alreadyActiveJob = conflictJob if alreadyActiveJob is not None: for conflictJob in possibleConflictJobs: if conflictJob.id != alreadyActiveJob.id: dbsession.delete(conflictJob) return True return False """ Main """ if __name__ == '__main__': logger = createLogger(name = 'test', level = logging.DEBUG) logger.info('Looking for pending georeference processes ...') dbsession = loadDbSession(DBCONFIG_PARAMS, logger) lookForUpdateProcess(dbsession, logger, True) dbsession.commit()
def setUpClass(cls): cls.sqlalchemy_logger = createLogger('sqlalchemy.engine', logging.DEBUG) cls.logger = createLogger('TestUpdateGeorefDataSources', logging.DEBUG) cls.dbsession = initializeDb(DBCONFIG)
'--target_dir', default='/tmp', help= 'define directory where the target TIFFs should be placed (default: /tmp' ), parser.add_argument('--delete_old', default=False, help='delete old files (default: False') arguments = parser.parse_args() # create logger if arguments.log_file: formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') logger = createLogger('ProcessMinifiedTiffs', logging.DEBUG, logFile=''.join(arguments.log_file), formatter=formatter) else: logger = createLogger('ProcessMinifiedTiffs', logging.DEBUG) # parse parameter parameters if arguments.source_dir: source_dir = arguments.source_dir if arguments.target_dir: target_dir = arguments.target_dir if arguments.delete_old: delete_old = arguments.delete_old else: delete_old = False
parser = argparse.ArgumentParser(description = 'This scripts simple update the cache', prog = 'Script %s'%script_name) parser.add_argument('--time_range', type=str, default='1868/1945', help='Should be in the form "1868/1945". Works in collaboration with mode "update_cache" and describe\'s the time range, for which the cache should be updated.') parser.add_argument('--log_file', help='define a log file') parser.add_argument('--tmp_dir', default='/tmp', help='define directory for temporary files (default: /tmp)') parser.add_argument('--seeder_threads', default=2, help='Number of threads the seeder utility should use (default: 2') parser.add_argument('--host', help='host for messtischblattdb') parser.add_argument('--user', help='user for messtischblattdb') parser.add_argument('--password', help='password for messtischblattdb') parser.add_argument('--db', help='db name for messtischblattdb') parser.add_argument('--with_restricted', default='true', help='Recalculates the cache only for the restricted area, given by the true area of interest (default: true).') arguments = parser.parse_args() # create logger if arguments.log_file: formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') logger = createLogger('%s'%script_name, logging.DEBUG, logFile=''.join(arguments.log_file), formatter=formatter) else: logger = createLogger('%s'%script_name, logging.DEBUG) if arguments.seeder_threads: SEEDER_NRTHREADS = arguments.seeder_threads if arguments.host: PARAMS_DATABASE['host'] = arguments.host if arguments.user: PARAMS_DATABASE['user'] = arguments.user if arguments.password: PARAMS_DATABASE['password'] = arguments.password if arguments.db: PARAMS_DATABASE['db'] = arguments.db if arguments.tmp_dir: TMP_DIR = arguments.tmp_dir
def setUpClass(cls): print '==============' print 'Start tools tests ...' print '==============' cls.logger = createLogger(name='Tools', level=logging.INFO)
def setUpClass(cls): print '==============' print 'Start georeferenceupdate tests ...' print '==============' cls.logger = createLogger(name = 'GeoreferenceUpdateTest', level = logging.DEBUG)
from sqlalchemy import create_engine, engine_from_config from sqlalchemy.orm import sessionmaker, scoped_session from zope.sqlalchemy import ZopeTransactionExtension # import of own python classes from settings import DBCONFIG, ROUTE_PREFIX, SECRET_KEY from vkviewer.python.utils.logger import createLogger from vkviewer.python.security import EntryFactory, groupfinder from vkviewer.python.proxy import proxy_post from vkviewer.python.models.Meta import initialize_sql, Base from vkviewer.python.i18n import custom_locale_negotiator #from python.models.meta import DBSession, Base, initialize_sql # load logger log = createLogger('vkviewer', logging.DEBUG) # base path here = os.path.dirname(os.path.abspath(__file__)) def loadLogger(debug=True): """ This function initialize the logger for the application. Arguments: debug {Boolean} """ if debug: log = createLogger('vkviewer', logging.DEBUG) else: log = logging.getLogger(__name__) def addRoutes(config):
parser.add_argument('--password', help='password for messtischblattdb') parser.add_argument('--db', help='db name for messtischblattdb') parser.add_argument( '--with_restricted', default='true', help= 'Recalculates the cache only for the restricted area, given by the true area of interest (default: true).' ) arguments = parser.parse_args() # create logger if arguments.log_file: formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') logger = createLogger('%s' % script_name, logging.DEBUG, logFile=''.join(arguments.log_file), formatter=formatter) else: logger = createLogger('%s' % script_name, logging.DEBUG) if arguments.seeder_threads: SEEDER_NRTHREADS = arguments.seeder_threads if arguments.host: PARAMS_DATABASE['host'] = arguments.host if arguments.user: PARAMS_DATABASE['user'] = arguments.user if arguments.password: PARAMS_DATABASE['password'] = arguments.password if arguments.db: PARAMS_DATABASE['db'] = arguments.db if arguments.tmp_dir:
def clearRaceConflicts(overwrite, dbsession): # double check if there are jobs which should be deleted # this clears the case that while there was on job activated in the past # there was still because of concurrency another jobs registered with the same # overwrites id possibleConflictJobs = Georeferenzierungsprozess.getJobsWithSameOverwrites( overwrite, dbsession) alreadyActiveJob = None for conflictJob in possibleConflictJobs: if conflictJob.isactive == True: alreadyActiveJob = conflictJob if alreadyActiveJob is not None: for conflictJob in possibleConflictJobs: if conflictJob.id != alreadyActiveJob.id: dbsession.delete(conflictJob) return True return False """ Main """ if __name__ == '__main__': logger = createLogger(name='test', level=logging.DEBUG) logger.info('Looking for pending georeference processes ...') dbsession = loadDbSession(DBCONFIG_PARAMS, logger) lookForUpdateProcess(dbsession, logger, True) dbsession.commit()
# parse command line parser = argparse.ArgumentParser(description="Parse the key/value pairs from the command line!") parser.add_argument("--log_file", help="define a log file") parser.add_argument("--source_dir", default="/tmp", help="define directory for the source TIFFs (default: /tmp") parser.add_argument( "--target_dir", default="/tmp", help="define directory where the target TIFFs should be placed (default: /tmp" ), parser.add_argument("--delete_old", default=False, help="delete old files (default: False") arguments = parser.parse_args() # create logger if arguments.log_file: formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") logger = createLogger( "ProcessMinifiedTiffs", logging.DEBUG, logFile="".join(arguments.log_file), formatter=formatter ) else: logger = createLogger("ProcessMinifiedTiffs", logging.DEBUG) # parse parameter parameters if arguments.source_dir: source_dir = arguments.source_dir if arguments.target_dir: target_dir = arguments.target_dir if arguments.delete_old: delete_old = arguments.delete_old else: delete_old = False
source_dir {string} target_dir (string) logger {Logger} """ logger.info(' Get files from source directory ...') directory_content = os.listdir(source_dir) for file in directory_content: buildTsmCache(os.path.join(source_dir, file), target_dir, logger) """ Main """ if __name__ == '__main__': script_name = 'UpdateTMSCache.py' parser = argparse.ArgumentParser(description = 'This scripts create a TMS Cache for all georeferenced maps.', prog = 'Script %s'%script_name) parser.add_argument('--log_file', help='define a log file') parser.add_argument('--target_dir', help='Directory where the TMS directories should be placed.') parser.add_argument('--source_dir', help='Source directory') arguments = parser.parse_args() # create logger if arguments.log_file: formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') logger = createLogger('UpdateTMSCache', logging.DEBUG, logFile=''.join(arguments.log_file), formatter=formatter) else: logger = createLogger('UpdateTMSCache', logging.DEBUG) updateTMSCache(arguments.source_dir, arguments.target_dir,logger)
from vkviewer.python.utils.logger import createLogger from georeference.settings import DAEMON_SETTINGS, LOGGER_NAME, LOGGER_FILE, LOGGER_LEVEL, LOGGER_FORMATTER, DBCONFIG_PARAMS from georeference.georeferenceupdate import lookForUpdateProcess from georeference.utils.tools import loadDbSession # Initialize the logger if not os.path.exists(LOGGER_FILE): open(LOGGER_FILE, 'a').close() formatter = logging.Formatter(LOGGER_FORMATTER) handler = TimedRotatingFileHandler(LOGGER_FILE, when='d', interval=1, backupCount=14) handler.setFormatter(formatter) logger = createLogger(name=LOGGER_NAME, level=LOGGER_LEVEL, handler=handler) class GeoreferenceDaemonApp(): def __init__(self): self.stdin_path = DAEMON_SETTINGS['stdin'] if not os.path.exists(self.stdin_path): open(self.stdin_path, 'a').close() self.stdout_path = DAEMON_SETTINGS['stdout'] self.stderr_path = DAEMON_SETTINGS['stderr'] self.pidfile_path = DAEMON_SETTINGS['pidfile_path'] self.pidfile_timeout = DAEMON_SETTINGS['pidfile_timeout'] def run(self): logger.info('Georeference update runner daemon is started!')
parser.add_argument('--method', type=str, help='What method should be run (add or delete)') parser.add_argument('--host', help='host for messtischblattdb') parser.add_argument('--user', help='user for messtischblattdb') parser.add_argument('--password', help='password for messtischblattdb') parser.add_argument('--db', help='db name for messtischblattdb') arguments = parser.parse_args() # create logger if arguments.log_file: formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') sqlalchemy_logger = createLogger('sqlalchemy.engine', logging.DEBUG, logFile=''.join(arguments.log_file), formatter=formatter) logger = createLogger('%s' % script_name, logging.DEBUG, logFile=''.join(arguments.log_file), formatter=formatter) else: sqlalchemy_logger = createLogger('sqlalchemy.engine', logging.WARN) logger = createLogger('%s' % script_name, logging.DEBUG) # parse parameter parameters if arguments.host: PARAMS_DATABASE['host'] = arguments.host if arguments.user: PARAMS_DATABASE['user'] = arguments.user if arguments.password: