def set_test_environment(): if not os.path.exists(os.path.join(HERE, 'values.yaml')): raise Exception(os.path.join(HERE, 'values.yaml')) with open(os.path.join(HERE, 'values.yaml')) as f: values = yaml.safe_load(f) from pprint import pprint pprint(values) conf.get_configuration().update(values) set_default_environment()
class Config(object): DEBUG = False TESTING = False CSRF_ENABLED = True SQLALCHEMY_TRACK_MODIFICATIONS = False SECRET_KEY = 'this-really-needs-to-be-changed' SENTRY_POSTGRES_APP = None SENTRY_APP = None try: SENTRY_POSTGRES_APP = conf.get_application_by_filter( name='sentry')[0].postgres SENTRY_APP = conf.get_application_by_filter(name='sentry')[0].name SQLALCHEMY_DATABASE_URI = f'postgresql+psycopg2://{SENTRY_POSTGRES_APP.user}:{SENTRY_POSTGRES_APP.password}@{SENTRY_POSTGRES_APP.name}:{SENTRY_POSTGRES_APP.port}/{SENTRY_POSTGRES_APP.initialdb}' except: log.error("Cannot configure SENTRY")
class Config(object): # ... DATABASE_NAME = "wsmgr" SQLALCHEMY_DATABASE_URI = 'postgresql://*****:*****@workspaces-postgres-host:5432/workspaces' SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_ECHO = False BASE_DIR = os.path.dirname(__file__) STATIC_DIR = os.path.join(BASE_DIR, "static") TEMPLATE_DIR = os.path.join(BASE_DIR, "templates") WORKSPACES_DIR = "workspaces" OPENAPI_DIR = os.path.join(BASE_DIR, "openapi") OPENAPI_FILE = "openapi.yaml" LOG_LEVEL = logging.INFO APP_NAME = "workspaces" WSMGR_HOSTNAME = socket.gethostname() WSMGR_IPADDRESS = socket.gethostbyname(WSMGR_HOSTNAME) try: CH_NAMESPACE = conf.get_configuration()['namespace'] except: logging.warning( 'Cannot get cluster deployment configuration: assuming local deployment', exc_info=True) SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join( basedir, DATABASE_NAME + '.db') CH_NAMESPACE = 'osb' DEBUG = False # Keycloak SECURITY_CONFIG_FILE_NAME = 'client_secrets.json'
def create_persistent_volume_claim(name, size, logger, **kwargs): """ Create a Persistent Volume Claim in the Kubernetes cluster. If a PVC with the name given already exists then the function will just return to the caller function. Args: name (string): the name of the PVC size (string): the size of the PVC, e.g. 2Gi for a 2Gb PVC logger (logger): the logger where the information message is sent to **kwargs - the dictionary is used to override the default template Returns: - """ if not size: raise Exception(f"Size must be set. Got {size!r}.") if not persistent_volume_claim_exists(name): path = os.path.join(os.path.dirname(__file__), 'templates', 'pvc.yaml') tmpl = open(path, 'rt').read() text = tmpl.format(name=name, size=size) data = dict_merge(yaml.safe_load(text), kwargs) obj = _get_api().create_namespaced_persistent_volume_claim( namespace=conf.get_configuration()['namespace'], body=data, ) logger.info(f"PVC child is created: %s", obj)
def get_configuration(app_name) -> ApplicationConfiguration: conf = CloudharnessConfig.get_application_by_filter(harness__name=app_name) if len(conf) > 1: raise ConfigurationCallException( f'Application {app_name} is not unique inside the current deployment.' ) if not conf: raise ConfigurationCallException( f'Application {app_name} is not part of the current deployment.') return ApplicationConfiguration(conf[0])
def send(self): logger.info(f"Sending notification email to {self.email_to}") msg = EmailMessage() msg['Subject'] = self.subject msg['From'] = self.email_from msg['To'] = self.email_to msg.set_content(self.message, subtype='html') email_user = get_secret_or_empty('email-user') email_pass = get_secret_or_empty('email-password') email_host = conf.get_configuration()["smtp"]["host"] email_port = conf.get_configuration()["smtp"]["port"] email_tls = conf.get_configuration()["smtp"].get("use_tls") smtp = smtplib.SMTP(email_host, email_port) if email_user or email_pass: smtp.login(email_user, email_pass) if email_tls: smtp.starttls() smtp.send_message(msg)
def test_sync_workflow(): def f(): import time time.sleep(2) print('whatever') task = tasks.PythonTask('my-task', f) assert 'registry' in CloudharnessConfig.get_configuration() op = operations.DistributedSyncOperation('test-sync-op-', task) print('\n', yaml.dump(op.to_workflow())) if execute: print(op.execute())
def get_current_configuration() -> ApplicationConfiguration: """ Get the configuration for the "current" application Returns: ApplicationConfiguration """ try: return get_configuration(CloudharnessConfig.get_current_app_name()) except Exception as e: raise ConfigurationCallException( f'Configuration error: cannot find current app - check env variable CH_CURRENT_APP_NAME' ) from e
def send(self, context): subject = Template(self.notification["subject"]).render( domain=conf.get_configuration()["domain"], message_type=context.get("message_type") ) context.update({ "subject": subject }) message = self.render_content(context) self.backend( email_from=self.email_from, email_to=self.email_to, subject=subject, message=message).send()
def get_persistent_volume_claim(name): """ Get the Persistent Volume Claim with the given name from the Kubernetes cluster. Args: name (string): the name of the PVC Returns: The PVC data (see https://kubernetes.io/docs/concepts/storage/persistent-volumes/) """ foundPVCs = _get_api().list_namespaced_persistent_volume_claim( namespace=conf.get_configuration()['namespace'], field_selector=f'metadata.name={name}') if len(foundPVCs.items) > 0: return foundPVCs.items[0] return None
def get_config(): # noqa: E501 """ Gets the config for logging in into accounts :rtype: json { 'url': '', 'realm': '', 'clientId': '' } """ accounts_app = applications.get_configuration('accounts') return { 'url': urljoin(accounts_app.get_public_address(), 'auth'), 'realm': CloudharnessConfig.get_namespace(), 'clientId': accounts_app['webclient']['id'] }
def _init_handlers(self): app = conf.get_application_by_filter( name="notifications")[0] # find the notification app configuration for event_type in app["harness"]["events"]: for notification_app in app["harness"]["events"][event_type]: for notification_type in notification_app["types"]: log.info( f"Init handler for event {notification_app['app']}.{notification_type['name']} type {event_type}" ) nss = NotificationHandler(event_type, notification_app["app"], notification_type["name"], notification_type["events"]) if not nss.topic_id in ( handler.topic_id for handler in NotificationsController._notification_handlers): self._consume_topic(nss.topic_id) NotificationsController._notification_handlers.append(nss)
import kubernetes import yaml import os from pathlib import Path from cloudharness import log # TODO handle group version = 'v1alpha1' # determine the namespace of the current app and run the workflow in that namespace from cloudharness.utils.config import CloudharnessConfig as conf namespace = conf.get_namespace() # --- Api functions --- ` def get_api_client(): configuration = get_configuration() api_instance = kubernetes.client.CoreV1Api(kubernetes.client.ApiClient(get_configuration())) return api_instance def get_configuration(): try: configuration = kubernetes.config.load_incluster_config() except: log.warning('Kubernetes cluster configuration not found. Trying local configuration')
def get_auth_realm(): return conf.get_namespace()
from keycloak.exceptions import KeycloakGetError from kafka import KafkaProducer, KafkaConsumer from kafka.admin import KafkaAdminClient, NewTopic from kafka.errors import TopicAlreadyExistsError, UnknownTopicOrPartitionError, KafkaTimeoutError from cloudharness import log from cloudharness import applications as apps from cloudharness.auth.keycloak import AuthClient from cloudharness.errors import * from cloudharness.utils import env from cloudharness.utils.config import CloudharnessConfig as config logging.getLogger('kafka').setLevel(logging.ERROR) AUTH_CLIENT = None CURRENT_APP_NAME = config.get_current_app_name() def get_authclient(): global AUTH_CLIENT if not AUTH_CLIENT: AUTH_CLIENT = AuthClient() return AUTH_CLIENT class EventClient: def __init__(self, topic_id): self.topic_id = topic_id @classmethod def _get_bootstrap_servers(cls):
def delete_persistent_volume_claim(name): _get_api().delete_namespaced_persistent_volume_claim( name=name, namespace=conf.get_configuration()['namespace'])
def get_configuration(): if not conf.is_test(): host = applications.get_configuration('argo').get_service_address() else: host = applications.get_configuration('argo').get_public_address() return Configuration(host=host)
""" Access workflows using Argo REST API Reference: https://argoproj.github.io/docs/argo/docs/rest-api.html https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/CustomObjectsApi.md """ import yaml from argo.workflows.client import ApiClient, WorkflowServiceApi, Configuration, V1alpha1WorkflowCreateRequest, \ V1alpha1Workflow # determine the namespace of the current app and run the workflow in that namespace from cloudharness.utils.config import CloudharnessConfig as conf from cloudharness import log, applications ch_conf = conf.get_configuration() namespace = conf.get_namespace() class WorkflowException(Exception): def __init__(self, status, message=''): super().__init__(message) self.status = status class WorkflowNotFound(WorkflowException): def __init__(self): super().__init__(404) class BadParam(WorkflowException):
def get_configurations(**kwargs): return [ ApplicationConfiguration(conf) for conf in CloudharnessConfig.get_application_by_filter(**kwargs) ]
import yaml from .test_env import set_test_environment set_test_environment() from cloudharness.workflows import operations, tasks from cloudharness import set_debug from cloudharness.workflows import argo from cloudharness.utils.config import CloudharnessConfig set_debug() execute = False assert 'registry' in CloudharnessConfig.get_configuration() def test_sync_workflow(): def f(): import time time.sleep(2) print('whatever') task = tasks.PythonTask('my-task', f) assert 'registry' in CloudharnessConfig.get_configuration() op = operations.DistributedSyncOperation('test-sync-op-', task) print('\n', yaml.dump(op.to_workflow())) if execute: print(op.execute())