def get_rest_cacher_dir(): cacher_dir = None if config_has_section('rest') and config_has_option('rest', 'cacher_dir'): cacher_dir = config_get('rest', 'cacher_dir') if cacher_dir and os.path.exists(cacher_dir): return cacher_dir raise Exception("cacher_dir is not defined or it doesn't exist")
def setup_logging(name): """ Setup logging """ if config_has_section('common') and config_has_option('common', 'loglevel'): loglevel = getattr(logging, config_get('common', 'loglevel').upper()) else: loglevel = logging.INFO if config_has_section('common') and config_has_option('common', 'logdir'): logging.basicConfig(filename=os.path.join(config_get('common', 'logdir'), name), level=loglevel, format='%(asctime)s\t%(name)s\t%(levelname)s\t%(message)s') else: logging.basicConfig(stream=sys.stdout, level=loglevel, format='%(asctime)s\t%(name)s\t%(levelname)s\t%(message)s')
def load_config_agents(): if config_has_section(Sections.Main) and config_has_option( Sections.Main, 'agents'): agents = config_get(Sections.Main, 'agents') agents = agents.split(',') agents = [d.strip() for d in agents] return agents return []
def load_plugin_sequence(config_section, config_option='plugin_sequence'): """ load plugin sequence """ plugin_sequence = [] if config_has_section(config_section) and config_has_option( config_section, config_option): plugin_sequence = config_get(config_section, config_option) plugin_sequence = plugin_sequence.split(",") plugin_sequence = [plugin.strip() for plugin in plugin_sequence] return plugin_sequence
def get_rest_url_prefix(): if config_has_section('rest') and config_has_option('rest', 'url_prefix'): url_prefix = config_get('rest', 'url_prefix') else: url_prefix = None if url_prefix: while url_prefix.startswith('/'): url_prefix = url_prefix[1:] while url_prefix.endswith('/'): url_prefix = url_prefix[:-1] url_prefix = '/' + url_prefix return url_prefix
def check_database(): """ Function to check whether database is defined in config. To be used to decide whether to skip some test functions. :returns True: if database.default is available. Otherwise False. """ if config_has_option('database', 'default'): database = config_get('database', 'default') if database: return True return False
def check_rest_host(): """ Function to check whether rest host is defined in config. To be used to decide whether to skip some test functions. :returns True: if rest host is available. Otherwise False. """ if config_has_option('rest', 'host'): host = config_get('rest', 'host') if host: return True return False
def build_database(echo=True, tests=False): """Build the database. """ engine = session.get_engine(echo=echo) if config_has_option('database', 'schema'): schema = config_get('database', 'schema') if schema and not engine.dialect.has_schema(engine, schema): print('Schema set in config, trying to create schema:', schema) try: engine.execute(CreateSchema(schema)) except Exception as e: print( 'Cannot create schema, please validate manually if schema creation is needed, continuing:', e) print(traceback.format_exc()) models.register_models(engine)
def destroy_everything(echo=True): """ Using metadata.reflect() to get all constraints and tables. metadata.drop_all() as it handles cyclical constraints between tables. Ref. http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything """ engine = session.get_engine(echo=echo) try: # the transaction only applies if the DB supports # transactional DDL, i.e. Postgresql, MS SQL Server with engine.begin() as conn: inspector = inspect(conn) # type: Union[Inspector, PGInspector] for tname, fkcs in reversed( inspector.get_sorted_table_and_fkc_names(schema='*')): if tname: drop_table_stmt = DropTable( Table(tname, MetaData(), schema='*')) conn.execute(drop_table_stmt) elif fkcs: if not engine.dialect.supports_alter: continue for tname, fkc in fkcs: fk_constraint = ForeignKeyConstraint((), (), name=fkc) Table(tname, MetaData(), fk_constraint) drop_constraint_stmt = DropConstraint(fk_constraint) conn.execute(drop_constraint_stmt) if config_has_option('database', 'schema'): schema = config_get('database', 'schema') if schema: conn.execute(DropSchema(schema, cascade=True)) if engine.dialect.name == 'postgresql': assert isinstance(inspector, PGInspector), 'expected a PGInspector' for enum in inspector.get_enums(schema='*'): sqlalchemy.Enum(**enum).drop(bind=conn) except Exception as e: print('Cannot destroy db:', e) print(traceback.format_exc())
from os.path import basename from sqlalchemy import create_engine, event from sqlalchemy.exc import DatabaseError, DisconnectionError, OperationalError, TimeoutError from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, scoped_session from idds.common.config import config_get, config_has_option from idds.common.exceptions import IDDSException, DatabaseException DATABASE_SECTION = 'database' BASE = declarative_base() DEFAULT_SCHEMA_NAME = None if config_has_option(DATABASE_SECTION, 'schema'): DEFAULT_SCHEMA_NAME = config_get(DATABASE_SECTION, 'schema') if DEFAULT_SCHEMA_NAME: BASE.metadata.schema = DEFAULT_SCHEMA_NAME _MAKER, _ENGINE, _LOCK = None, None, Lock() def _fk_pragma_on_connect(dbapi_con, con_record): # Hack for previous versions of sqlite3 try: dbapi_con.execute('pragma foreign_keys=ON') except AttributeError: pass
def get_rest_debug(): if config_has_section('rest') and config_has_option('rest', 'debug'): return config_get_bool('rest', 'debug') return False
def get_cache_url(): if config_has_section('cache') and config_has_option('cache', 'url'): return config_get('cache', 'url') return '127.0.0.1:11211'