def load_config(cls, config_path=None): """ Loads Walkoff configuration from JSON file Args: config_path (str): Optional path to the config. Defaults to the CONFIG_PATH class variable. """ if config_path: cls.CONFIG_PATH = config_path if cls.CONFIG_PATH: try: if isfile(cls.CONFIG_PATH): with open(cls.CONFIG_PATH) as config_file: config = json.loads(config_file.read()) for key, value in config.items(): if value: setattr(cls, key.upper(), value) logger.info('Loaded config from {}.'.format( cls.CONFIG_PATH)) else: logger.info('Config path {} is not a file.'.format( cls.CONFIG_PATH)) except (IOError, OSError, ValueError): logger.warning('Could not read config file.', exc_info=True) cls.SQLALCHEMY_DATABASE_URI = format_db_path(cls.WALKOFF_DB_TYPE, cls.DB_PATH, 'WALKOFF_DB_USERNAME', 'WALKOFF_DB_PASSWORD', cls.WALKOFF_DB_HOST)
def __init__(self): # All of these imports are necessary from api_gateway.executiondb.returns import ReturnApi from api_gateway.executiondb.parameter import Parameter, ParameterApi from api_gateway.executiondb.action import Action, ActionApi from api_gateway.executiondb.appapi import AppApi from api_gateway.executiondb.branch import Branch from api_gateway.executiondb.condition import Condition from api_gateway.executiondb.transform import Transform # from api_gateway.executiondb.trigger import Trigger from api_gateway.executiondb.global_variable import GlobalVariable from api_gateway.executiondb.workflow_variable import WorkflowVariable from api_gateway.executiondb.workflow import Workflow from api_gateway.executiondb.workflowresults import WorkflowStatus, NodeStatus ExecutionDatabase.db_type = config.DB_TYPE if 'sqlite' in config.DB_TYPE: self.engine = create_engine( format_db_path(config.DB_TYPE, config.EXECUTION_DB_NAME), connect_args={'check_same_thread': False}, poolclass=NullPool) else: self.engine = create_engine(format_db_path( config.DB_TYPE, config.EXECUTION_DB_NAME, config.DB_USERNAME, config.get_from_file(config.POSTGRES_KEY_PATH), config.DB_HOST), poolclass=NullPool, isolation_level="AUTOCOMMIT") if not database_exists(self.engine.url): try: create_database(self.engine.url) except IntegrityError as e: pass self.connection = self.engine.connect() self.transaction = self.connection.begin() session = sessionmaker() session.configure(bind=self.engine) self.session = scoped_session(session) Base.metadata.bind = self.engine Base.metadata.create_all(self.engine)
def __init__(self, execution_db_type, execution_db_path, execution_db_host="localhost"): # All of these imports are necessary from api_gateway.executiondb.parameter import Parameter from api_gateway.executiondb.action import Action from api_gateway.executiondb.branch import Branch from api_gateway.executiondb.condition import Condition from api_gateway.executiondb.position import Position from api_gateway.executiondb.transform import Transform from api_gateway.executiondb.trigger import Trigger from api_gateway.executiondb.global_variable import GlobalVariable from api_gateway.executiondb.workflow_variable import WorkflowVariable from api_gateway.executiondb.workflow import Workflow from api_gateway.executiondb.workflowresults import WorkflowStatus, ActionStatus ExecutionDatabase.db_type = execution_db_type if 'sqlite' in execution_db_type: self.engine = create_engine( format_db_path(execution_db_type, execution_db_path), connect_args={'check_same_thread': False}, poolclass=NullPool) else: self.engine = create_engine(format_db_path( execution_db_type, execution_db_path, 'EXECUTION_DB_USERNAME', 'EXECUTION_DB_PASSWORD', execution_db_host), poolclass=NullPool) if not database_exists(self.engine.url): try: create_database(self.engine.url) except IntegrityError as e: pass self.connection = self.engine.connect() self.transaction = self.connection.begin() Session = sessionmaker() Session.configure(bind=self.engine) self.session = scoped_session(Session) Execution_Base.metadata.bind = self.engine Execution_Base.metadata.create_all(self.engine)
class FlaskConfig(object): SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_DATABASE_URI = format_db_path( config.DB_TYPE, config.SERVER_DB_NAME, config.DB_USERNAME, config.get_from_file(config.POSTGRES_KEY_PATH), config.DB_HOST) JWT_BLACKLIST_ENABLED = True JWT_BLACKLIST_TOKEN_CHECKS = ['refresh'] JWT_TOKEN_LOCATION = 'headers' JWT_BLACKLIST_PRUNE_FREQUENCY = 1000 MAX_STREAM_RESULTS_SIZE_KB = 156 ITEMS_PER_PAGE = 20 SECRET_KEY = config.get_from_file(config.ENCRYPTION_KEY_PATH)
def load_env_vars(cls): for field in (field for field in dir(cls) if field.isupper()): if field in os.environ: var_type = type(getattr(cls, field)) if var_type == dict: setattr(cls, field, json.loads(os.environ.get(field))) else: setattr(cls, field, var_type(os.environ.get(field))) logger.info( "Loading field {} from environment variables.".format( field)) cls.SQLALCHEMY_DATABASE_URI = format_db_path(cls.WALKOFF_DB_TYPE, cls.DB_PATH, 'WALKOFF_DB_USERNAME', 'WALKOFF_DB_PASSWORD', cls.WALKOFF_DB_HOST)
# SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) # # Base = declarative_base() Base = declarative_base() naming_convention = { "ix": 'ix_%(column_0_label)s', "uq": "uq_%(table_name)s_%(column_0_name)s", "ck": "ck_%(table_name)s_%(column_0_name)s", "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", "pk": "pk_%(table_name)s" } Base.metadata = MetaData(naming_convention=naming_convention) if 'sqlite' in config.DB_TYPE: engine = create_engine(format_db_path(config.DB_TYPE, config.EXECUTION_DB_NAME), connect_args={'check_same_thread': False}, poolclass=NullPool) else: engine = create_engine( format_db_path(config.DB_TYPE, config.EXECUTION_DB_NAME, config.DB_USERNAME, config.get_from_file(config.POSTGRES_KEY_PATH), config.DB_HOST), poolclass=NullPool, isolation_level="AUTOCOMMIT") if not database_exists(engine.url): try: create_database(engine.url) except IntegrityError as e: pass connection = engine.connect()
class Config(object): from common.config import config as common_config # TODO: Merge triple-play config with this old config and replace the hack below common_config = common_config # CONFIG VALUES # IP and port for the webserver HOST = "127.0.0.1" PORT = 5000 # IP addresses and ports for IPC (inter-process communication). Do not change these unless necessary. There must # not be conflicts. ZMQ_RESULTS_ADDRESS = 'tcp://127.0.0.1:5556' ZMQ_COMMUNICATION_ADDRESS = 'tcp://127.0.0.1:5557' # Specify the number of worker processes, and the number of threads for each worker process. Multiplying these # numbers together specifies the max number of workflows that may be executing at the same time. NUMBER_PROCESSES = 4 NUMBER_THREADS_PER_PROCESS = 3 # Database types WALKOFF_DB_TYPE = 'sqlite' EXECUTION_DB_TYPE = 'sqlite' WALKOFF_DB_HOST = 'localhost' EXECUTION_DB_HOST = 'localhost' # PATHS DATA_PATH = 'data' API_PATH = join("api_gateway", "api") CACHE = {'type': 'redis', 'host': 'localhost', 'port': 6379} CLIENT_PATH = join("api_gateway", "client") CONFIG_PATH = join(DATA_PATH, 'api_gateway.config') DB_PATH = abspath(join(DATA_PATH, 'api_gateway.db')) DEFAULT_APPDEVICE_EXPORT_PATH = join(DATA_PATH, 'appdevice.json') EXECUTION_DB_PATH = abspath(join(DATA_PATH, 'execution.db')) LOGGING_CONFIG_PATH = join(DATA_PATH, 'log', 'logging.json') WALKOFF_SCHEMA_PATH = join(DATA_PATH, 'walkoff_schema.json') WORKFLOWS_PATH = join(DATA_PATH, 'workflows') KEYS_PATH = join("api_gateway", '.certificates') CERTIFICATE_PATH = join(KEYS_PATH, 'api_gateway.crt') PRIVATE_KEY_PATH = join(KEYS_PATH, 'api_gateway.key') # AppConfig SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_DATABASE_URI = format_db_path(WALKOFF_DB_TYPE, DB_PATH, 'WALKOFF_DB_USERNAME', 'WALKOFF_DB_PASSWORD', WALKOFF_DB_HOST) JWT_BLACKLIST_ENABLED = True JWT_BLACKLIST_TOKEN_CHECKS = ['refresh'] JWT_TOKEN_LOCATION = 'headers' JWT_BLACKLIST_PRUNE_FREQUENCY = 1000 MAX_STREAM_RESULTS_SIZE_KB = 156 SEPARATE_WORKERS = False SEPARATE_RECEIVER = False SEPARATE_INTERFACES = False ITEMS_PER_PAGE = 20 ACTION_EXECUTION_STRATEGY = 'local' EXECUTION_DB_USERNAME = '' EXECUTION_DB_PASSWORD = '' WALKOFF_DB_USERNAME = '' WALKOFF_DB_PASSWORD = '' SERVER_PUBLIC_KEY = '' SERVER_PRIVATE_KEY = '' CLIENT_PUBLIC_KEY = '' CLIENT_PRIVATE_KEY = '' ACCUMULATOR_TYPE = 'external' SECRET_KEY = "SHORTSTOPKEY" __passwords = [ 'EXECUTION_DB_PASSWORD', 'WALKOFF_DB_PASSWORD', 'SERVER_PRIVATE_KEY', 'CLIENT_PRIVATE_KEY', 'SERVER_PUBLIC_KEY', 'CLIENT_PUBLIC_KEY', 'SECRET_KEY' ] WORKFLOW_RESULTS_HANDLER = 'zmq' WORKFLOW_RESULTS_PROTOCOL = 'protobuf' WORKFLOW_RESULTS_KAFKA_CONFIG = { 'bootstrap.servers': 'localhost:9092', 'group.id': 'results' } WORKFLOW_RESULTS_KAFKA_TOPIC = 'results' WORKFLOW_COMMUNICATION_HANDLER = 'zmq' WORKFLOW_COMMUNICATION_PROTOCOL = 'protobuf' WORKFLOW_COMMUNICATION_KAFKA_CONFIG = { 'bootstrap.servers': 'localhost:9092', 'group.id': 'comm' } WORKFLOW_COMMUNICATION_KAFKA_TOPIC = 'comm' SEPARATE_PROMETHEUS = False ALEMBIC_CONFIG = join('.', 'alembic.ini') SWAGGER_URL = '/api/docs' @classmethod def load_config(cls, config_path=None): """ Loads Walkoff configuration from JSON file Args: config_path (str): Optional path to the config. Defaults to the CONFIG_PATH class variable. """ if config_path: cls.CONFIG_PATH = config_path if cls.CONFIG_PATH: try: if isfile(cls.CONFIG_PATH): with open(cls.CONFIG_PATH) as config_file: config = json.loads(config_file.read()) for key, value in config.items(): if value: setattr(cls, key.upper(), value) logger.info('Loaded config from {}.'.format( cls.CONFIG_PATH)) else: logger.info('Config path {} is not a file.'.format( cls.CONFIG_PATH)) except (IOError, OSError, ValueError): logger.warning('Could not read config file.', exc_info=True) cls.SQLALCHEMY_DATABASE_URI = format_db_path(cls.WALKOFF_DB_TYPE, cls.DB_PATH, 'WALKOFF_DB_USERNAME', 'WALKOFF_DB_PASSWORD', cls.WALKOFF_DB_HOST) @classmethod def write_values_to_file(cls, keys=None): """ Writes the current walkoff configuration to a file""" if keys is None: keys = [key for key in dir(cls) if not key.startswith('__')] output = {} for key in keys: if key.upper() not in cls.__passwords and hasattr( cls, key.upper()): output[key.lower()] = getattr(cls, key.upper()) with open(cls.CONFIG_PATH, 'w') as config_file: config_file.write( json.dumps(output, sort_keys=True, indent=4, separators=(',', ': '))) logger.info("Wrote config to {}".format(cls.CONFIG_PATH)) @classmethod def load_env_vars(cls): for field in (field for field in dir(cls) if field.isupper()): if field in os.environ: var_type = type(getattr(cls, field)) if var_type == dict: setattr(cls, field, json.loads(os.environ.get(field))) else: setattr(cls, field, var_type(os.environ.get(field))) logger.info( "Loading field {} from environment variables.".format( field)) cls.SQLALCHEMY_DATABASE_URI = format_db_path(cls.WALKOFF_DB_TYPE, cls.DB_PATH, 'WALKOFF_DB_USERNAME', 'WALKOFF_DB_PASSWORD', cls.WALKOFF_DB_HOST)