Пример #1
0
def test_wait_for_completion(mocker):
    workflow_status_mock = MagicMock()
    workflow_status_mock.phase = 'Succeeded'
    get_workflow_mock = mocker.patch('platform_resources.workflow.ArgoWorkflow.get', return_value=workflow_status_mock)

    test_workflow = ArgoWorkflow()
    test_workflow.wait_for_completion()

    assert get_workflow_mock.call_count == 1
Пример #2
0
def test_wait_for_completion_failure(mocker):
    workflow_status_mock = MagicMock()
    workflow_status_mock.phase = 'Failed'
    get_workflow_mock = mocker.patch('platform_resources.workflow.ArgoWorkflow.get', return_value=workflow_status_mock)

    test_workflow = ArgoWorkflow()
    with pytest.raises(RuntimeError):
        test_workflow.wait_for_completion()

    assert get_workflow_mock.call_count == 1
Пример #3
0
def test_add_process_with_steps(mocker):
    mocker.patch('kubernetes.config.load_kube_config')
    with patch('builtins.open', mock_open(read_data=workflow_template.format('step-template', workflow_steps))):
        main_workflow = ArgoWorkflow.from_yaml('workflow_template')
    with patch('builtins.open', mock_open(read_data=process_template.format(''))):
        process_workflow = ArgoWorkflow.from_yaml('process_template')

    main_workflow.add_process(process_workflow)

    spec = main_workflow.body['spec']
    assert spec['entrypoint'] == 'step-template'

    list_of_templates = spec['templates']

    process_template_exists = False
    flow_template_exists = False
    step_template_exists = False

    for template in list_of_templates:
        if template['name'] == 'step-template':
            step_template_exists = True
            assert template.get('steps')
            assert len(template.get('steps')) == 2

            swt = None
            pwt = None

            for step in template.get('steps'):
                step_name = step[0]['name']
                if step_name == 'step1':
                    swt = step
                elif step_name == 'process-template':
                    pwt = step

            parameters = step[0].get('arguments', []).get('parameters', [])
            assert parameters
            check_parameters(parameters)

            assert swt
            assert pwt

        elif template['name'] == 'workflow-template':
            flow_template_exists = True

        elif template['name'] == 'process-template':
            process_template_exists = True
            parameters = template.get('inputs', []).get('parameters')
            assert parameters
            check_parameters(parameters)

    assert process_template_exists
    assert flow_template_exists
    assert step_template_exists
Пример #4
0
def export(path: str, format: str, operation_options: Tuple[str, ...]):
    if path == FORMATS_OPTION:
        try:
            list_of_workflows = get_list_of_workflows(EXPORT_WORKFLOWS_LOCATION)
        except Exception:
            handle_error(logger, Texts.EXPORT_LIST_ERROR_MSG, Texts.EXPORT_LIST_ERROR_MSG)
            sys.exit(1)

        click.echo(tabulate(list_of_workflows, headers=EXPORT_LIST_HEADERS,
                            tablefmt=TBLT_TABLE_FORMAT))
        sys.exit(0)

    config_path = Config().config_path
    formats: List[str] = []  # noqa: E701
    if os.path.isdir(config_path):
        workflow_exports_files = os.listdir(f'{config_path}/workflows/exports')
        formats = [os.path.splitext(file)[0] for file in workflow_exports_files if file.endswith('.yaml')]

    if not format:
        click.echo(Texts.MISSING_EXPORT_FORMAT.format(formats=formats))
        sys.exit(2)

    format = format.lower()

    if format not in formats:
        click.echo(Texts.WRONG_EXPORT_FORMAT.format(format=format, formats=formats))
        sys.exit(2)

    additional_params_str = " ".join(operation_options)

    try:
        current_namespace = get_kubectl_current_context_namespace()

        export_workflow = ArgoWorkflow.from_yaml(f'{Config().config_path}/workflows/exports/{format}.yaml')

        export_workflow.parameters = {
            'cluster-registry-address': NAUTAConfigMap().registry,
            'saved-model-dir-path': path,
            'additional-params': additional_params_str
        }

        export_workflow.create(namespace=current_namespace)

        workflow: ArgoWorkflow = ArgoWorkflow.get(namespace=current_namespace, name=export_workflow.name)
    except Exception:
        error_msg = 'Failed to create export workflow.'
        click.echo(error_msg)
        logger.exception(error_msg)
        sys.exit(1)

    click.echo(tabulate([workflow.cli_representation], headers=MODEL_HEADERS, tablefmt=TBLT_TABLE_FORMAT))
    click.echo(f'\nSuccessfully created export workflow')
Пример #5
0
def logs(ctx: click.Context, workflow_name: str):
    try:
        namespace = get_kubectl_current_context_namespace()
        workflow: ArgoWorkflow = ArgoWorkflow.get(namespace=namespace,
                                                  name=workflow_name)
        if not workflow:
            click.echo(Texts.NOT_FOUND_MSG.format(workflow_name=workflow_name))
            exit(0)

        es_client = K8sElasticSearchClient(
            host=f'{get_kubectl_host(with_port=True)}'
            f'/api/v1/namespaces/nauta/services/nauta-elasticsearch:nauta/proxy',
            verify_certs=False,
            use_ssl=True,
            headers={'Authorization': get_api_key()})
        start_date = workflow.started_at
        workflow_logs_generator = es_client.get_argo_workflow_logs_generator(
            workflow=workflow, namespace=namespace, start_date=start_date)
        for log_entry in workflow_logs_generator:
            if not log_entry.content.isspace():
                click.echo(
                    f'{log_entry.date} {log_entry.pod_name} {log_entry.content}'
                )
    except Exception:
        handle_error(logger,
                     Texts.OTHER_ERROR_MSG,
                     Texts.OTHER_ERROR_MSG,
                     add_verbosity_msg=True)
        exit(1)
Пример #6
0
def status(state: State, model_name: str, status: PodPhase, username: str):
    """
    Returns status of a model

    :param model_name: name of a model data of which should be displayed
    :param status: status of a model step that should be displayed
    :param username; if checked - searches for model for a certain user
    """
    try:
        if not username:
            namespace = get_kubectl_current_context_namespace()
        else:
            namespace = username
        with spinner(text=Texts.LOAD_DATA_MSG):
            workflow: ArgoWorkflow = ArgoWorkflow.get(namespace=namespace, name=model_name)

        if not workflow:
            click.echo(Texts.MODEL_NOT_FOUND.format(model_name=model_name))
            exit(0)
        click.echo('\nOperation details:\n')
        click.echo(tabulate([workflow.cli_representation], headers=MODEL_HEADERS, tablefmt=TBLT_TABLE_FORMAT))
        click.echo('\nOperation steps:\n')
        if workflow.steps:
            click.echo(tabulate([step.cli_representation for step in workflow.steps
                                 if status is None or status == step.phase], headers=STEP_HEADERS,
                                tablefmt=TBLT_TABLE_FORMAT))
        else:
            click.echo(Texts.LACK_OF_STEPS)
    except Exception:
        handle_error(logger, Texts.OTHER_ERROR_MSG, Texts.OTHER_ERROR_MSG, add_verbosity_msg=True)
        exit(1)
Пример #7
0
def logs(state: State, workflow_name: str):
    try:
        namespace = get_kubectl_current_context_namespace()
        workflow: ArgoWorkflow = ArgoWorkflow.get(namespace=namespace, name=workflow_name)
        if not workflow:
            click.echo(Texts.NOT_FOUND_MSG.format(workflow_name=workflow_name))
            exit(0)

        with K8sProxy(NAUTAAppNames.ELASTICSEARCH) as proxy:
            es_client = K8sElasticSearchClient(host="127.0.0.1", port=proxy.tunnel_port,
                                               verify_certs=False, use_ssl=False)
            start_date = workflow.started_at
            workflow_logs_generator = es_client.get_argo_workflow_logs_generator(workflow=workflow,
                                                                                 namespace=namespace,
                                                                                 start_date=start_date)
            for log_entry in workflow_logs_generator:
                if not log_entry.content.isspace():
                    click.echo(f'{log_entry.date} {log_entry.pod_name} {log_entry.content}')
    except K8sProxyCloseError:
        handle_error(logger, Texts.PROXY_CLOSE_LOG_ERROR_MSG, Texts.PROXY_CLOSE_USER_ERROR_MSG)
        exit(1)
    except LocalPortOccupiedError as exe:
        handle_error(logger, Texts.LOCAL_PORT_OCCUPIED_ERROR_MSG.format(exception_message=exe.message),
                     Texts.LOCAL_PORT_OCCUPIED_ERROR_MSG.format(exception_message=exe.message))
        exit(1)
    except K8sProxyOpenError:
        handle_error(logger, Texts.PROXY_CREATION_ERROR_MSG, Texts.PROXY_CREATION_ERROR_MSG)
        exit(1)
    except Exception:
        handle_error(logger, Texts.OTHER_ERROR_MSG, Texts.OTHER_ERROR_MSG, add_verbosity_msg=True)
        exit(1)
Пример #8
0
def status(ctx: click.Context, username: str):
    """
    Returns status of a model

    :param username; if checked - searches for model for a certain user
    """
    try:
        workflows: List[ArgoWorkflow.ArgoWorkflowCliModel] = []
        if not username:
            namespace = get_kubectl_current_context_namespace()
        else:
            namespace = username
        with spinner(text=Texts.LOAD_DATA_MSG):
            # filtering out workflows used to build images with training jobs
            workflows = [
                workflow.cli_representation for workflow in ArgoWorkflow.list(
                    namespace=namespace, label_selector="type!=build-workflow")
            ]

        click.echo(
            tabulate(workflows,
                     headers=MODEL_HEADERS,
                     tablefmt=TBLT_TABLE_FORMAT))
    except Exception:
        handle_error(logger,
                     Texts.OTHER_ERROR_MSG,
                     Texts.OTHER_ERROR_MSG,
                     add_verbosity_msg=True)
        exit(1)
Пример #9
0
def export(path: str, format: str, operation_options: Tuple[str, ...]):
    additional_params_str = " ".join(operation_options)
    format = format.lower()

    workflow_exports_files = os.listdir(
        f'{Config().config_path}/workflows/exports')
    formats = [
        file.rstrip('.yaml') for file in workflow_exports_files
        if file.endswith('.yaml')
    ]

    if format not in formats:
        click.echo(f'Format: {format} does not exist. Choose from: {formats}')
        sys.exit(2)

    try:
        current_namespace = get_kubectl_current_context_namespace()

        export_workflow = ArgoWorkflow.from_yaml(
            f'{Config().config_path}/workflows/exports/{format}.yaml')

        export_workflow.parameters = {
            'cluster-registry-address': NAUTAConfigMap().registry,
            'saved-model-dir-path': path,
            'additional-params': additional_params_str
        }

        export_workflow.create(namespace=current_namespace)
    except Exception:
        error_msg = 'Failed to create export workflow.'
        click.echo(error_msg)
        logger.exception(error_msg)
        sys.exit(1)

    click.echo(f'Successfully created export workflow: {export_workflow.name}')
Пример #10
0
def test_set_parameters_error():
    test_workflow = ArgoWorkflow()
    test_workflow.body = {
        'spec': {
            'arguments': {
                'parameters': [{
                    'name': 'test-param-1',
                    'value': 'test-value-1'
                }, {
                    'name': 'test-param-2'
                }]
            }
        }
    }

    with pytest.raises(KeyError):
        test_workflow.parameters = {'test-param-1': 'new-value'}
Пример #11
0
def test_parameters():
    test_workflow = ArgoWorkflow()
    test_workflow.body = {
        'spec': {
            'arguments': {
                'parameters': [{
                    'name': 'test-param-1',
                    'value': 'test-value-1'
                }, {
                    'name': 'test-param-2',
                    'value': 'test-value-2'
                }]
            }
        }
    }

    assert test_workflow.parameters == {
        'test-param-1': 'test-value-1',
        'test-param-2': 'test-value-2'
    }
Пример #12
0
def view(state: State, workflow_name: str):
    try:
        namespace = get_kubectl_current_context_namespace()
        workflow: ArgoWorkflow = ArgoWorkflow.get(namespace=namespace, name=workflow_name)
        if not workflow:
            click.echo(Texts.NOT_FOUND_MSG.format(workflow_name=workflow_name))
            exit(0)
        click.echo(tabulate([workflow.cli_representation], headers=HEADERS, tablefmt="orgtbl"))
        click.echo('\nWorkflow status:\n')
        click.echo(yaml.dump(workflow.status))
    except Exception:
        handle_error(logger, Texts.OTHER_ERROR_MSG, Texts.OTHER_ERROR_MSG, add_verbosity_msg=True)
        exit(1)
Пример #13
0
def workflow_list(ctx: click.Context):
    try:
        namespace = get_kubectl_current_context_namespace()
        workflows: List[ArgoWorkflow] = ArgoWorkflow.list(namespace=namespace)
        click.echo(
            tabulate([workflow.cli_representation for workflow in workflows],
                     headers=HEADERS,
                     tablefmt=TBLT_TABLE_FORMAT))
    except Exception:
        handle_error(logger,
                     Texts.OTHER_ERROR_MSG,
                     Texts.OTHER_ERROR_MSG,
                     add_verbosity_msg=True)
        exit(1)
Пример #14
0
def workflow_list(state: State):
    try:
        namespace = get_kubectl_current_context_namespace()
        workflows = ArgoWorkflow.list(namespace=namespace)
        click.echo(
            tabulate([workflow.cli_representation for workflow in workflows],
                     headers=HEADERS,
                     tablefmt="orgtbl"))
    except Exception:
        handle_error(logger,
                     Texts.OTHER_ERROR_MSG,
                     Texts.OTHER_ERROR_MSG,
                     add_verbosity_msg=True)
        exit(1)
Пример #15
0
def cancel(state: State, workflow_name: str):
    try:
        namespace = get_kubectl_current_context_namespace()
        workflow: ArgoWorkflow = ArgoWorkflow.get(name=workflow_name,
                                                  namespace=namespace)
        if not workflow:
            click.echo(Texts.NOT_FOUND_MSG.format(workflow_name=workflow_name))
            exit(0)
        with spinner(text=Texts.PROGRESS_MSG.format(
                workflow_name=workflow_name)):
            workflow.delete()
        click.echo(Texts.SUCCESS_MSG.format(workflow_name=workflow_name))
    except Exception:
        handle_error(logger,
                     Texts.OTHER_ERROR_MSG,
                     Texts.OTHER_ERROR_MSG,
                     add_verbosity_msg=True)
        exit(1)
Пример #16
0
def submit(state: State, workflow_path: str):
    try:
        workflow: ArgoWorkflow = ArgoWorkflow.from_yaml(workflow_path)
        namespace = get_kubectl_current_context_namespace()
        with spinner(text=Texts.PROGRESS_MSG):
            workflow.create(namespace=namespace)
            workflow.namespace = namespace  # Set namespace, to properly display owner in CLI
        click.echo(
            tabulate([workflow.cli_representation],
                     headers=HEADERS,
                     tablefmt=TBLT_TABLE_FORMAT))
    except IOError as e:
        handle_error(logger, Texts.LOAD_SPEC_ERROR_MSG.format(msg=str(e)),
                     Texts.LOAD_SPEC_ERROR_MSG.format(msg=str(e)))
        exit(1)
    except Exception:
        handle_error(logger,
                     Texts.OTHER_ERROR_MSG,
                     Texts.OTHER_ERROR_MSG,
                     add_verbosity_msg=True)
        exit(1)
Пример #17
0
def process(path: str, kind: str, options: Tuple[str, ...]):
    additional_params_str = " ".join(options)
    kind = kind.lower()
    config_path = Config().config_path
    process_path = f'{config_path}/workflows/processes'
    kinds: List[str] = []
    if os.path.isdir(process_path):
        process_kinds = os.listdir(f'{config_path}/workflows/processes')
        kinds = [
            os.path.splitext(file)[0] for file in process_kinds
            if file.endswith('.yaml')
        ]

    if kind not in kinds:
        click.echo(Texts.WRONG_PROCESS_KIND.format(process=kind, kinds=kinds))
        sys.exit(2)

    try:
        current_namespace = get_kubectl_current_context_namespace()

        process_workflow = ArgoWorkflow.from_yaml(
            f'{Config().config_path}/workflows/processes/{kind}.yaml')

        process_workflow.parameters = {
            'cluster-registry-address': NAUTAConfigMap().registry,
            'saved-model-dir-path': path,
            'additional-params': additional_params_str
        }

        process_workflow.create(namespace=current_namespace)
    except Exception:
        error_msg = 'Failed to create export workflow.'
        click.echo(error_msg)
        logger.exception(error_msg)
        sys.exit(1)

    click.echo(
        f'Successfully created process workflow: {process_workflow.name}')
Пример #18
0
# limitations under the License.
#

from unittest.mock import MagicMock

import pytest
from click.testing import CliRunner
from kubernetes.client import CustomObjectsApi

from commands.workflow.logs import logs
from cli_text_consts import WorkflowLogsTexts as Texts
from logs_aggregator.k8s_log_entry import LogEntry
from platform_resources.workflow import ArgoWorkflow

FAKE_WORKFLOW = ArgoWorkflow(
    name='fake-workflow',
    namespace='fake-namespace',
    k8s_custom_object_api=MagicMock(spec=CustomObjectsApi))

FAKE_LOGS = [
    LogEntry(date='2018-04-17T09:28:39+00:00',
             content='Warning: Unable to load '
             '/usr/share/zoneinfo/right/Factory as time zone. Skipping it.\n',
             pod_name='understood-gnat-mysql-868b556f8f-lwdr9',
             namespace='default'),
    LogEntry(date='2018-04-17T09:28:49+00:00',
             content='MySQL init process done. Ready for start up.\n',
             pod_name='understood-gnat-mysql-868b556f8f-lwdr9',
             namespace='default')
]

Пример #19
0
def get_logs(operation_name: str, start_date: str, end_date: str, match: str,
             output: bool, pager: bool, follow: bool):
    """
    Show logs for a given model export operation.
    """
    # check whether we have operations with a given name
    if operation_name and match:
        handle_error(user_msg=Texts.NAME_M_BOTH_GIVEN_ERROR_MSG)
        exit(1)
    elif not operation_name and not match:
        handle_error(user_msg=Texts.NAME_M_NONE_GIVEN_ERROR_MSG)
        exit(1)

    try:
        with K8sProxy(NAUTAAppNames.ELASTICSEARCH) as proxy:
            es_client = K8sElasticSearchClient(host="127.0.0.1",
                                               port=proxy.tunnel_port,
                                               verify_certs=False,
                                               use_ssl=False)
            namespace = get_kubectl_current_context_namespace()
            if match:
                operation_name = match
                name_filter = match
            else:
                name_filter = f'^{operation_name}$'
            workflows = ArgoWorkflow.list(namespace=namespace,
                                          name_filter=name_filter)
            if not workflows:
                raise ValueError(
                    f'Operation with given name: {operation_name} does not '
                    f'exists in namespace {namespace}.')

            follow_logs = True if follow and not output else False

            if output and len(workflows) > 1:
                click.echo(Texts.MORE_EXP_LOGS_MESSAGE)

            for workflow in workflows:
                start_date = start_date if start_date else workflow.started_at

                ops_logs_generator = es_client.get_argo_workflow_logs_generator(
                    workflow=workflow,
                    namespace=namespace,
                    start_date=start_date,
                    end_date=end_date,
                    follow=follow_logs)

                if output:
                    save_logs_to_file(logs_generator=ops_logs_generator,
                                      instance_name=workflow.name,
                                      instance_type="operation")
                else:
                    if len(workflows) > 1:
                        click.echo(f'Operation : {workflow.name}')
                    print_logs(run_logs_generator=ops_logs_generator,
                               pager=pager)

    except K8sProxyCloseError:
        handle_error(logger, Texts.PROXY_CLOSE_LOG_ERROR_MSG,
                     Texts.PROXY_CLOSE_LOG_ERROR_MSG)
        exit(1)
    except LocalPortOccupiedError as exe:
        handle_error(
            logger,
            Texts.LOCAL_PORT_OCCUPIED_ERROR_MSG.format(
                exception_message=exe.message),
            Texts.LOCAL_PORT_OCCUPIED_ERROR_MSG.format(
                exception_message=exe.message))
        exit(1)
    except K8sProxyOpenError:
        handle_error(logger, Texts.PROXY_CREATION_ERROR_MSG,
                     Texts.PROXY_CREATION_ERROR_MSG)
        exit(1)
    except ValueError:
        handle_error(
            logger,
            Texts.OPERATION_NOT_EXISTS_ERROR_MSG.format(
                operation_name=operation_name),
            Texts.OPERATION_NOT_EXISTS_ERROR_MSG.format(
                experiment_name=operation_name))
        exit(1)
    except Exception:
        handle_error(logger, Texts.LOGS_GET_OTHER_ERROR_MSG,
                     Texts.LOGS_GET_OTHER_ERROR_MSG)
        exit(1)
Пример #20
0
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

import pytest
from unittest.mock import MagicMock, mock_open, patch
from typing import List

from platform_resources.workflow import ArgoWorkflow

workflow_w_two_param = ArgoWorkflow()
workflow_w_two_param.body = {'spec': {'arguments': {'parameters': [{'name': 'test-param-1', 'value': 'test-value-1'},
                                                                   {'name': 'test-param-2', 'value': 'test-value-2'}]}}}

workflow_wo_value = ArgoWorkflow()
workflow_wo_value.body = {'spec': {'arguments': {'parameters': [{'name': 'test-param-1', 'value': 'test-value-1'},
                                                                {'name': 'test-param-2'}]}}}

process_template = '''
apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
  generateName: process-template-
spec:
  entrypoint: process-template
  templates:
Пример #21
0
# limitations under the License.
#

from unittest.mock import MagicMock

import pytest
from click.testing import CliRunner
from kubernetes.client import CustomObjectsApi

from commands.workflow.view import view
from cli_text_consts import WorkflowViewTexts as Texts
from platform_resources.workflow import ArgoWorkflow

FAKE_WORKFLOW = ArgoWorkflow(
    name='fake-workflow',
    namespace='fake-namespace',
    k8s_custom_object_api=MagicMock(spec=CustomObjectsApi),
    status={'phase': 'Succeeded'})

FAKE_WORKFLOW_PATH = '/bla/workflow.yaml'


class WorkflowViewMocks:
    def __init__(self, mocker):
        self.get_namespace = mocker.patch(
            'commands.workflow.view.get_kubectl_current_context_namespace',
            return_value='fake-namespace')
        self.get_workflow = mocker.patch(
            'commands.workflow.view.ArgoWorkflow.get',
            return_value=FAKE_WORKFLOW)
Пример #22
0
import pytest
from click.testing import CliRunner
from kubernetes.client import CustomObjectsApi

from commands.model.status import status
from cli_text_consts import ModelStatusCmdTexts as Texts
from platform_resources.workflow import ArgoWorkflow, ArgoWorkflowStep

MODEL_STEPS = [
    ArgoWorkflowStep(name="model1", phase="Running"),
    ArgoWorkflowStep(name="model2", phase="Failed")
]

MODEL = ArgoWorkflow(name='fake-workflow',
                     namespace='fake-namespace',
                     k8s_custom_object_api=MagicMock(spec=CustomObjectsApi),
                     phase='Succeeded',
                     steps=MODEL_STEPS)


class ModelStatusMocks:
    def __init__(self, mocker):
        self.get_namespace = mocker.patch(
            'commands.model.status.get_kubectl_current_context_namespace',
            return_value='fake-namespace')
        self.get_workflow = mocker.patch(
            'commands.model.status.ArgoWorkflow.get', return_value=MODEL)


@pytest.fixture()
def status_mocks(mocker) -> ModelStatusMocks:
Пример #23
0
def purge_experiment(exp_name: str, runs_to_purge: List[Run],
                     k8s_es_client: K8sElasticSearchClient,
                     namespace: str) -> Tuple[List[Run], List[Run]]:
    """
       Purge experiment with a given name by cancelling runs given as a parameter. If given experiment
       contains more runs than is in the list of runs - experiment's state remains intact.

       :param exp_name: name of an experiment to which belong runs passed in run_list parameter
       :param runs_to_purge: list of runs that should be purged, they have to belong to exp_name experiment
       :param k8s_es_client: Kubernetes ElasticSearch client
       :param namespace: namespace where experiment is located
       :return: two list - first contains runs that were cancelled successfully, second - those which weren't
       """
    logger.debug(f"Purging {exp_name} experiment ...")

    purged_runs: List[Run] = []
    not_purged_runs: List[Run] = []

    experiment = Experiment.get(name=exp_name, namespace=namespace)
    if not experiment:
        raise RuntimeError(Texts.GET_EXPERIMENT_ERROR_MSG)

    experiment_runs = Run.list(namespace=namespace, exp_name_filter=[exp_name])
    # check whether experiment has more runs that should be cancelled
    cancel_whole_experiment = (len(experiment_runs) == len(runs_to_purge))
    if cancel_whole_experiment:
        experiment.state = ExperimentStatus.CANCELLING
        experiment.update()

    try:
        cancelled_runs, not_cancelled_runs = cancel_experiment_runs(
            runs_to_cancel=runs_to_purge, namespace=namespace)
        not_purged_runs = not_cancelled_runs

        if cancel_whole_experiment:
            # Delete associated workflows
            experiment_associated_workflows = [
                wf for wf in ArgoWorkflow.list(namespace=namespace)
                if wf.labels.get('experimentName') == experiment.name
            ]
            for wf in experiment_associated_workflows:
                wf.delete()

            # Remove tags from git repo manager
            try:
                delete_exp_tag_from_git_repo_manager(
                    experiment_name=experiment.name,
                    username=namespace,
                    experiments_workdir=get_run_environment_path(''))
            except Exception:
                handle_error(logger, Texts.GIT_REPO_MANAGER_ERROR_MSG,
                             Texts.GIT_REPO_MANAGER_ERROR_MSG)
                raise

        for run in cancelled_runs:
            logger.debug(f"Purging {run.name} run ...")
            click.echo(Texts.PURGING_START_MSG.format(run_name=run.name))
            try:
                with spinner(text=Texts.PURGING_PROGRESS_MSG.format(
                        run_name=run.name)):
                    # purge helm release
                    delete_helm_release(run.name,
                                        namespace=namespace,
                                        purge=True)
                    # delete run
                    kubectl.delete_k8s_object("run", run.name)
                    purged_runs.append(run)
            except Exception as exe:
                not_purged_runs.append(run)
                logger.exception("Error during purging runs.")
                # occurence of NotFound error may mean, that run has been removed earlier
                if "NotFound" not in str(exe):
                    click.echo(
                        Texts.INCOMPLETE_PURGE_ERROR_MSG.format(
                            experiment_name=experiment_name))
                    raise exe
            try:
                # clear run logs
                if is_current_user_administrator():
                    logger.debug(f"Clearing logs for {run.name} run.")
                    with spinner(text=Texts.PURGING_LOGS_PROGRESS_MSG.format(
                            run_name=run.name)):
                        k8s_es_client.delete_logs_for_run(run=run.name,
                                                          namespace=namespace)
            except Exception:
                logger.exception("Error during clearing run logs.")

            # CAN-1099 - docker garbage collector has errors that prevent from correct removal of images
            # try:
            # try to remove images from docker registry
            #    delete_images_for_experiment(exp_name=run.name)
            # except Exception:
            #    logger.exception("Error during removing images.")

        if cancel_whole_experiment and not not_purged_runs:
            try:
                kubectl.delete_k8s_object("experiment", exp_name)
            except Exception:
                # problems during deleting experiments are hidden as if runs were
                # cancelled user doesn't have a possibility to remove them
                logger.exception("Error during purging experiment.")

    except Exception:
        logger.exception("Error during purging experiment.")
        return purged_runs, not_purged_runs

    return purged_runs, not_purged_runs
Пример #24
0
import pytest
from click.testing import CliRunner
from kubernetes.client import CustomObjectsApi

from commands.model.status import status
from cli_text_consts import ModelStatusCmdTexts as Texts
from platform_resources.workflow import ArgoWorkflow, ArgoWorkflowStep

MODEL_STEPS = [
    ArgoWorkflowStep(name="model1", phase="Running"),
    ArgoWorkflowStep(name="model2", phase="Failed")
]

MODEL = ArgoWorkflow(name='fake-workflow',
                     namespace='fake-namespace',
                     k8s_custom_object_api=MagicMock(spec=CustomObjectsApi),
                     phase='Succeeded',
                     steps=MODEL_STEPS)

BUILD_MODEL = ArgoWorkflow(
    name='fake-workflow',
    namespace='fake-namespace',
    k8s_custom_object_api=MagicMock(spec=CustomObjectsApi),
    phase='Succeeded',
    steps=MODEL_STEPS,
    body={'spec': {
        'meta-data': [{
            'labels': {
                'type': ['buildctl']
            }
        }]
Пример #25
0
from cli_text_consts import ModelExportCmdTexts as Texts
from commands.model.common import workflow_description
from commands.model.export import export
from platform_resources.workflow import ArgoWorkflow, QUEUED_PHASE

FEM_NAME = "EXPORT_1"
SEM_NAME = "EXPORT_2"
FEM_PARAMETERS = "PARAMS_1"
SEM_PARAMETERS = "PARAMS_2"

FEM_START_DATE = '2000-01-01'
FEM_NAMESPACE = 'test-namespace'

TEST_AGROWORKFLOW = ArgoWorkflow(name=FEM_NAME,
                                 started_at=FEM_START_DATE,
                                 finished_at=None,
                                 namespace=FEM_NAMESPACE,
                                 phase=None)

TWO_MODEL_OUTPUT = [
    workflow_description(name=FEM_NAME, parameters=FEM_PARAMETERS),
    workflow_description(name=SEM_NAME, parameters=SEM_PARAMETERS)
]


def setup_mocks(mocker):
    mocker.patch('commands.model.export.get_kubectl_current_context_namespace',
                 return_value='fake-namespace')
    mocker.patch('platform_resources.workflow.ArgoWorkflow.from_yaml',
                 return_value=mocker.MagicMock())
    mocker.patch('platform_resources.workflow.ArgoWorkflow.get',
Пример #26
0
from commands.model.status import status
from cli_text_consts import ModelStatusCmdTexts as Texts
from platform_resources.workflow import ArgoWorkflow, ArgoWorkflowStep

MODEL_STEPS = [
    ArgoWorkflowStep(name="model1", phase="Running"),
    ArgoWorkflowStep(name="model2", phase="Failed")
]

MODEL = ArgoWorkflow(name='fake-workflow',
                     namespace='fake-namespace',
                     k8s_custom_object_api=MagicMock(spec=CustomObjectsApi),
                     phase='Succeeded',
                     steps=MODEL_STEPS,
                     body={
                         'spec': {
                             'templates': [{
                                 'container': {
                                     'command': ['other command']
                                 }
                             }]
                         }
                     })

BUILD_MODEL = ArgoWorkflow(
    name='fake-workflow',
    namespace='fake-namespace',
    k8s_custom_object_api=MagicMock(spec=CustomObjectsApi),
    phase='Succeeded',
    steps=MODEL_STEPS,
    body={'spec': {
        'templates': [{