Ejemplo n.º 1
0
def set_test_environment():

    if not os.path.exists(os.path.join(HERE, 'values.yaml')):
        raise Exception(os.path.join(HERE, 'values.yaml'))
    with open(os.path.join(HERE, 'values.yaml')) as f:
        values = yaml.safe_load(f)
    from pprint import pprint

    pprint(values)
    conf.get_configuration().update(values)
    set_default_environment()
Ejemplo n.º 2
0
class Config(object):
    # ...
    DATABASE_NAME = "wsmgr"

    SQLALCHEMY_DATABASE_URI = 'postgresql://*****:*****@workspaces-postgres-host:5432/workspaces'

    SQLALCHEMY_TRACK_MODIFICATIONS = False
    SQLALCHEMY_ECHO = False
    BASE_DIR = os.path.dirname(__file__)
    STATIC_DIR = os.path.join(BASE_DIR, "static")
    TEMPLATE_DIR = os.path.join(BASE_DIR, "templates")
    WORKSPACES_DIR = "workspaces"
    OPENAPI_DIR = os.path.join(BASE_DIR, "openapi")
    OPENAPI_FILE = "openapi.yaml"
    LOG_LEVEL = logging.INFO
    APP_NAME = "workspaces"
    WSMGR_HOSTNAME = socket.gethostname()
    WSMGR_IPADDRESS = socket.gethostbyname(WSMGR_HOSTNAME)

    try:
        CH_NAMESPACE = conf.get_configuration()['namespace']
    except:
        logging.warning(
            'Cannot get cluster deployment configuration: assuming local deployment',
            exc_info=True)
        SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(
            basedir, DATABASE_NAME + '.db')
        CH_NAMESPACE = 'osb'

    DEBUG = False

    # Keycloak
    SECURITY_CONFIG_FILE_NAME = 'client_secrets.json'
Ejemplo n.º 3
0
def create_persistent_volume_claim(name, size, logger, **kwargs):
    """
    Create a Persistent Volume Claim in the Kubernetes cluster.
    If a PVC with the name given already exists then the function
    will just return to the caller function.

    Args:
        name (string): the name of the PVC
        size (string): the size of the PVC, e.g. 2Gi for a 2Gb PVC
        logger (logger): the logger where the information message is sent to
        **kwargs - the dictionary is used to override the default template
    Returns:
        -
    """
    if not size:
        raise Exception(f"Size must be set. Got {size!r}.")

    if not persistent_volume_claim_exists(name):
        path = os.path.join(os.path.dirname(__file__), 'templates', 'pvc.yaml')
        tmpl = open(path, 'rt').read()
        text = tmpl.format(name=name, size=size)
        data = dict_merge(yaml.safe_load(text), kwargs)

        obj = _get_api().create_namespaced_persistent_volume_claim(
            namespace=conf.get_configuration()['namespace'],
            body=data,
        )
        logger.info(f"PVC child is created: %s", obj)
Ejemplo n.º 4
0
    def send(self):
        logger.info(f"Sending notification email to {self.email_to}")
        msg = EmailMessage()
        msg['Subject'] = self.subject
        msg['From'] = self.email_from
        msg['To'] = self.email_to
        msg.set_content(self.message, subtype='html')

        email_user = get_secret_or_empty('email-user')
        email_pass = get_secret_or_empty('email-password')
        email_host = conf.get_configuration()["smtp"]["host"]
        email_port = conf.get_configuration()["smtp"]["port"]
        email_tls  = conf.get_configuration()["smtp"].get("use_tls")

        smtp = smtplib.SMTP(email_host, email_port)
        if email_user or email_pass:
            smtp.login(email_user, email_pass)
        if email_tls:
            smtp.starttls()
        smtp.send_message(msg)
Ejemplo n.º 5
0
def test_sync_workflow():
    def f():
        import time
        time.sleep(2)
        print('whatever')

    task = tasks.PythonTask('my-task', f)
    assert 'registry' in CloudharnessConfig.get_configuration()
    op = operations.DistributedSyncOperation('test-sync-op-', task)
    print('\n', yaml.dump(op.to_workflow()))

    if execute:
        print(op.execute())
Ejemplo n.º 6
0
 def send(self, context):
     subject = Template(self.notification["subject"]).render(
         domain=conf.get_configuration()["domain"],
         message_type=context.get("message_type")
     )
     context.update({
         "subject": subject
     })
     message = self.render_content(context)
     self.backend(
         email_from=self.email_from,
         email_to=self.email_to,
         subject=subject,
         message=message).send()
Ejemplo n.º 7
0
def get_persistent_volume_claim(name):
    """
    Get the Persistent Volume Claim with the given name from the Kubernetes
    cluster.

    Args:
        name (string): the name of the PVC

    Returns:
        The PVC data (see https://kubernetes.io/docs/concepts/storage/persistent-volumes/)
    """
    foundPVCs = _get_api().list_namespaced_persistent_volume_claim(
        namespace=conf.get_configuration()['namespace'],
        field_selector=f'metadata.name={name}')
    if len(foundPVCs.items) > 0:
        return foundPVCs.items[0]
    return None
Ejemplo n.º 8
0
"""
Access workflows using Argo REST API
Reference: https://argoproj.github.io/docs/argo/docs/rest-api.html
https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/CustomObjectsApi.md
"""

import yaml

from argo.workflows.client import ApiClient, WorkflowServiceApi, Configuration, V1alpha1WorkflowCreateRequest, \
    V1alpha1Workflow

# determine the namespace of the current app and run the workflow in that namespace
from cloudharness.utils.config import CloudharnessConfig as conf
from cloudharness import log, applications

ch_conf = conf.get_configuration()
namespace = conf.get_namespace()


class WorkflowException(Exception):
    def __init__(self, status, message=''):
        super().__init__(message)
        self.status = status


class WorkflowNotFound(WorkflowException):
    def __init__(self):
        super().__init__(404)


class BadParam(WorkflowException):
Ejemplo n.º 9
0
def delete_persistent_volume_claim(name):
    _get_api().delete_namespaced_persistent_volume_claim(
        name=name, namespace=conf.get_configuration()['namespace'])
Ejemplo n.º 10
0
import yaml

from .test_env import set_test_environment

set_test_environment()

from cloudharness.workflows import operations, tasks
from cloudharness import set_debug
from cloudharness.workflows import argo
from cloudharness.utils.config import CloudharnessConfig

set_debug()

execute = False

assert 'registry' in CloudharnessConfig.get_configuration()


def test_sync_workflow():
    def f():
        import time
        time.sleep(2)
        print('whatever')

    task = tasks.PythonTask('my-task', f)
    assert 'registry' in CloudharnessConfig.get_configuration()
    op = operations.DistributedSyncOperation('test-sync-op-', task)
    print('\n', yaml.dump(op.to_workflow()))

    if execute:
        print(op.execute())