Exemple #1
0
def reports():
    """Report build/test status to source control tool."""
    payload = get_json()
    logger.info('Received reporting request (payload: %s)', payload)
    report_id = payload.get('id')
    repo = payload.get('repo')
    if not report_id:
        raise AXApiInvalidParam('Missing required parameters',
                                detail='Required parameters (id)')

    try:
        if not repo:
            cache = gateway.redis_client.get(report_id, decoder=json.loads)
            repo = cache['repo']
        vendor = gateway.axops_client.get_tool(repo)['type']
        if vendor not in gateway.scm_clients.keys():
            raise AXApiInvalidParam(
                'Invalid parameter values',
                detail='Unsupported type ({})'.format(vendor))
        result = gateway.scm_clients[vendor].upload_job_result(payload)
        if result == -1:
            logger.info(
                'GitHub does not support status report for the non-sha commits. Skip.'
            )
    except Exception as e:
        logger.error('Failed to report status: %s', e)
        gateway.event_notification_client.send_message_to_notification_center(
            CODE_JOB_CI_STATUS_REPORTING_FAILURE, detail=payload)
        raise AXApiInternalError('Failed to report status', detail=str(e))
    else:
        logger.info('Successfully reported status')
        return jsonify(result)
Exemple #2
0
 def _parse_repo_branch(repo, branch, repo_branch):
     """Parse repo / branch / repo_branch."""
     if repo:
         try:
             repo = unquote(repo)
             _, vendor, repo_owner, repo_name = Gateway.parse_repo(repo)
         except Exception as e:
             msg = 'Unable to parse repo: %s', e
             logger.error(msg)
             raise AXApiInvalidParam('Unable to parse repo', msg)
         else:
             dir = '{}/{}/{}/{}'.format(Gateway.BASE_DIR, vendor, repo_owner, repo_name)
             workspaces = {dir: [branch] if branch else []}
     elif repo_branch:
         try:
             repo_branch = json.loads(repo_branch)
             workspaces = {}
             for repo in repo_branch.keys():
                 repo = unquote(repo)
                 _, vendor, repo_owner, repo_name = Gateway.parse_repo(repo)
                 dir = '{}/{}/{}/{}'.format(Gateway.BASE_DIR, vendor, repo_owner, repo_name)
                 if dir not in workspaces:
                     workspaces[dir] = set()
                 for branch in repo_branch[repo]:
                     workspaces[dir].add(branch)
         except Exception as e:
             msg = 'Unable to parse repo_branch: %s' % str(e)
             logger.error(msg)
             raise AXApiInvalidParam('Unable to parse repo_branch', msg)
     else:
         dirs = [dir[0] for dir in os.walk(Gateway.BASE_DIR) if dir[0].endswith('/.git')]
         workspaces = list(map(lambda v: v[:-5], dirs))
         workspaces = dict([(k, [branch] if branch else []) for k in workspaces])
     return workspaces
Exemple #3
0
def get_create_delete_webhooks():
    """Create / delete a webhook."""
    payload = get_json()

    repo = payload.get('repo')
    vendor = payload.get('type')
    username = payload.get('username')
    password = payload.get('password')
    if not all([repo, vendor]):
        raise AXApiInvalidParam('Missing required parameters',
                                detail='Required parameters (repo, type)')
    if vendor not in gateway.scm_clients.keys():
        raise AXApiInvalidParam('Invalid parameter values',
                                detail='Unsupported type ({})'.format(vendor))

    if username and password:
        gateway.scm_clients[vendor].update_repo_info(repo, vendor, username,
                                                     password)
    if request.method == 'GET':
        result = gateway.get_webhook(vendor, repo)
    elif request.method == 'POST':
        result = gateway.create_webhook(vendor, repo)
    else:
        result = gateway.delete_webhook(vendor, repo)
    return jsonify(result)
Exemple #4
0
    def webhooks(self, request):
        """Create / delete a webhook.

        :param request:Translating
        :return:
        """
        repo = request.data.get('repo')
        vendor = request.data.get('type')
        username = request.data.get('username')
        password = request.data.get('password')
        if not all([repo, vendor]):
            raise AXApiInvalidParam('Missing required parameters',
                                    detail='Required parameters (repo, type)')
        if vendor not in self.scm_clients.keys():
            raise AXApiInvalidParam(
                'Invalid parameter values',
                detail='Unsupported type ({})'.format(vendor))

        if username and password:
            self.scm_clients[vendor].update_repo_info(repo, vendor, username,
                                                      password)
        if request.method == 'GET':
            result = self._get_webhook(vendor, repo)
        elif request.method == 'POST':
            result = self._create_webhook(vendor, repo)
        else:
            result = self._delete_webhook(vendor, repo)
        return Response(result)
Exemple #5
0
def _put_file(repo, branch, path):
    """Put a file in s3.

    :param repo:
    :param branch:
    :param path:
    :return:
    """
    _, vendor, repo_owner, repo_name = parse_repo(repo)
    workspace = '{}/{}/{}/{}'.format(BASE_DIR, vendor, repo_owner, repo_name)
    if not os.path.isdir(workspace):
        raise AXApiInvalidParam('Invalid repository',
                                'Invalid repository ({})'.format(repo))
    try:
        logger.info(
            'Extracting file content from repository (repo: %s, branch: %s, path: %s) ...',
            repo, branch, path)
        git_client = GitClient(path=workspace, read_only=True)
        files = git_client.get_files(branch=branch,
                                     subdir=path,
                                     binary_mode=True)
    except Exception as e:
        message = 'Failed to extract file content'
        detail = '{}: {}'.format(message, str(e))
        logger.error(detail)
        raise AXApiInternalError(message, detail)
    else:
        if len(files) == 0:
            raise AXApiInvalidParam(
                'Unable to locate file with given information')
        file_content = files[0]['content']
        logger.info('Successfully extracted file content')

    try:
        # Cluster name id always has the form <cluster_name>-<36_bytes_long_cluster_id>
        cluster_name, cluster_id = CLUSTER_NAME_ID[:-37], CLUSTER_NAME_ID[-36:]
        key = '{cluster_name}/{cluster_id}/{vendor}/{repo_owner}/{repo_name}/{branch}/{path}'.format(
            cluster_name=cluster_name,
            cluster_id=cluster_id,
            vendor=vendor,
            repo_owner=repo_owner,
            repo_name=repo_name,
            branch=branch,
            path=path)
        logger.info('Uploading file content to s3 (bucket: %s, key: %s) ...',
                    S3_BUCKET_NAME, key)
        response = s3_bucket.Object(key).put(Body=file_content)
        etag = response.get('ETag')
        if etag:
            etag = json.loads(etag)
    except Exception as e:
        message = 'Failed to upload file content'
        detail = '{}: {}'.format(message, str(e))
        logger.error(detail)
        raise AXApiInternalError(message, detail)
    else:
        logger.info('Successfully uploaded file content')
        return {'bucket': S3_BUCKET_NAME, 'key': key, 'etag': etag}
Exemple #6
0
    def delete_webhook(self, vendor, repo):
        """Delete webhook

        :param vendor:
        :param repo:
        :returns:
        """
        # Delete webhook
        try:
            logger.info('Deleting webhook (repo: %s) ...', repo)
            self.scm_clients[vendor].delete_webhook(repo)
        except AXApiAuthFailed as e:
            logger.error('Invalid credential supplied')
            detail = {
                'repo': repo,
                'error': 'Invalid credential supplied:' + str(e)
            }
            self.event_notification_client.send_message_to_notification_center(CODE_JOB_CI_WEBHOOK_DELETION_FAILURE,
                                                                               detail=detail)
            raise AXApiInvalidParam('User authentication failed', detail=str(e))
        except AXApiForbiddenReq as e:
            logger.error('Supplied credential is valid but having insufficient permission')
            detail = {
                'repo': repo,
                'error': 'Supplied credential is valid but having insufficient permission:' + str(e)
            }
            self.event_notification_client.send_message_to_notification_center(CODE_JOB_CI_WEBHOOK_DELETION_FAILURE,
                                                                               detail=detail)
            raise AXApiInvalidParam('User has insufficient permission', detail=str(e))
        except Exception as e:
            logger.error('Failed to delete webhook: %s', e)
            detail = {
                'repo': repo,
                'error': 'Failed to delete webhook:' + str(e)
            }
            self.event_notification_client.send_message_to_notification_center(CODE_JOB_CI_WEBHOOK_DELETION_FAILURE,
                                                                               detail=detail)
            raise AXApiInternalError('Failed to delete webhook', str(e))
        else:
            logger.info('Successfully deleted webhook (repo: %s)', repo)

        # Delete ELB
        try:
            if not self.has_webhook(repo):
                logger.info('Deleting ELB for webhook ...')
                self.axsys_client.delete_webhook()
        except Exception as e:
            logger.error('Failed to delete ELB for webhook: %s', str(e))
            detail = {'repo': repo,
                      'error': 'Failed to delete ELB for webhook' + str(e)
                      }
            self.event_notification_client.send_message_to_notification_center(CODE_JOB_CI_ELB_DELETION_FAILURE,
                                                                               detail=detail)
            raise AXApiInternalError('Failed to delete ELB for webhook', str(e))
        else:
            logger.info('Successfully deleted ELB for webhook')
            return {}
Exemple #7
0
    def yamls(self, request):
        """Update YAML contents (i.e. policy, template).

        :param request:
        :return:
        """
        vendor = request.data.get('type')
        repo = request.data.get('repo')
        branch = request.data.get('branch')
        if not all([vendor, repo, branch]):
            raise AXApiInvalidParam(
                'Missing required parameters',
                detail='Required parameters (type, repo, branch)')
        if vendor not in self.scm_clients.keys():
            raise AXApiInvalidParam(
                'Invalid parameter values',
                detail='Unsupported type ({})'.format(vendor))

        try:
            # The arrival of events may not always be in the natural order of commits. For
            # example, the user may resent an old event from UI of source control tool. In
            # this case, we may update the YAML contents to an older version. To avoid this,
            # we guarantee that every YAML update will only update the content to the latest
            # version on a branch. More specifically, whenever we receive an event, we extract
            # the repo and branch information, and find the HEAD of the branch. Then, we use
            # the commit of HEAD to retrieve the YAML content, and update policies/templates
            # correspondingly.
            scm_client = self.scm_clients[vendor]
            commit = scm_client.get_branch_head(repo, branch)
            yaml_files = scm_client.get_yamls(repo, commit)
            logger.info('Updating YAML contents (policy/template) ...')
            axops_client.update_yamls(repo, branch, commit, yaml_files)
        except Exception as e:
            if 'Branch not found' in e.message:
                logger.info('No need to update YAML contents')
                return Response()
            else:
                logger.error('Failed to update YAML contents: %s', e)
                event_notification_client.send_message_to_notification_center(
                    CODE_JOB_CI_YAML_UPDATE_FAILURE,
                    detail={
                        'vendor': vendor,
                        'repo': repo,
                        'branch': branch,
                        'error': str(e)
                    })
                raise AXApiInternalError('Failed to update YAML contents',
                                         str(e))
        else:
            logger.info('Successfully updated YAML contents')
            return Response()
Exemple #8
0
    def augment_updates(self, updates):
        """Accepts a dictionary of updates and augments the updates to the current instance, also performing validation of instance level fields"""
        updates = copy.deepcopy(updates)
        if 'status' in updates:
            new_status = updates.pop('status')
            if self.status != new_status:
                if new_status == InstanceStatus.ACTIVE:
                    self.transition_state(Event.MARK_ACTIVE, "")
                elif new_status == InstanceStatus.DELETED:
                    self.transition_state(Event.MARK_DELETED, "")
                else:
                    raise AXIllegalOperationException(
                        "Cannot change status to {}".format(new_status))

        new_attributes = updates.pop('attributes', {})

        # Validates the top level fields, but not attributes (we popped the 'attributes' from the updates)
        for field, val in updates.items():
            setattr(self, field, val)
        try:
            instance_schema(self.json())
        except voluptuous.Error as err:
            raise AXApiInvalidParam(humanize_error(str(err)))

        # Set attributes so that instance attributes can be validated later against the class schema
        for attr_name, val in new_attributes.items():
            self.attributes[attr_name] = val
Exemple #9
0
def update_fixture_class(class_id):
    updates = get_json()
    template_id = updates.get('template_id')
    if not template_id:
        raise AXApiInvalidParam("Required argument 'template_id' not supplied")
    fix_class = fixmgr.update_fixture_class(class_id, template_id)
    return jsonify(fix_class.json())
Exemple #10
0
def create_fixture_class():
    payload = get_json()
    template_id = payload.get('template_id')
    if not template_id:
        raise AXApiInvalidParam("Required argument 'template_id' not supplied")
    fix_class = fixmgr.upsert_fixture_class(template_id)
    return jsonify(fix_class.json())
Exemple #11
0
def update_volume(volume_id):
    updates = get_json()
    if 'id' in updates and updates['id'] != volume_id:
        raise AXApiInvalidParam("Volume id cannot be updated")
    updates['id'] = volume_id
    volume = fixmgr.volumemgr.update_volume(updates)
    return jsonify(volume.json())
Exemple #12
0
def build_option_validator(data_type, options):
    """Returns a validator which checks if the incoming value is in the set of valid options"""
    valid_options = set()
    # check and normalize the options to agree with the data type
    for option in options:
        try:
            if data_type == 'int':
                valid_options.add(int(option))
            elif data_type == 'string':
                valid_options.add(str(option))
            elif data_type == 'bool':
                valid_options.add(bool_validator(option))
            elif data_type == 'float':
                valid_options.add(float(option))
        except Exception:
            raise AXApiInvalidParam("'{}' is not of data type {}".format(
                option, data_type))

    def option_validator(val):
        if val not in valid_options:
            raise Invalid("'{}' is not a valid option. Expected: {}".format(
                val, ', '.join(valid_options)))
        return val

    return option_validator
Exemple #13
0
def api_stop_pod(pod):
    container = request.args.get('container')
    namespace = request.args.get('namespace')
    if not namespace:
        raise AXApiInvalidParam("namespace required")
    console.kill_pod(namespace, pod, container=container)
    return jsonify()
Exemple #14
0
    def purge_branches(self, repo, branch=None):
        """Purge branch heads.

        :param repo:
        :param branch:
        :return:
        """
        if not repo:
            raise AXApiInvalidParam('Missing required parameter',
                                    'Missing required parameter (repo)')
        logger.info('Purging branch heads (repo: %s, branch: %s) ...', repo,
                    branch)

        try:
            if not branch:
                self.axdb_client.purge_branch_heads(repo)
            else:
                self.axdb_client.purge_branch_head(repo, branch)
        except Exception as e:
            message = 'Unable to purge branch heads'
            detail = 'Unable to purge branch heads (repo: {}, branch: {}): {}'.format(
                repo, branch, str(e))
            logger.error(detail)
            raise AXApiInternalError(message, detail)
        else:
            logger.info('Successfully purged branch heads')
Exemple #15
0
def add_comment(issue_id):
    jira_client = Gateway.init_jira_client(gateway.axops_client)
    payload = get_json()
    comment = payload.get('comment', None)
    user = payload.get('user', None)

    if not comment:
        raise AXApiInvalidParam('Require Comment message info')
    if not user:
        raise AXApiInvalidParam('Require Commenter info')

    try:
        jira_client.add_issue_comment(issue_id, comment, commenter=user)
    except Exception as exc:
        raise AXApiInternalError('Failed to add comment', detail=str(exc))
    return jsonify({})
Exemple #16
0
def add_policy():
    """Add artifact retention policy"""
    policy_json = request.get_json(force=True)
    if 'name' not in policy_json or 'policy' not in policy_json:
        raise AXApiInvalidParam('Missing required parameters', detail='Missing required parameters ("name", "policy")')
    description = policy_json.get('description')
    result = artifact_manager.add_retention_policy(tag_name=policy_json['name'], policy=policy_json['policy'], description=description)
    return jsonify(result)
Exemple #17
0
def update_fixture_instance(fixture_id):
    updates = get_json()
    if 'id' in updates and updates['id'] != fixture_id:
        raise AXApiInvalidParam("Fixture id cannot be updated")
    updates['id'] = fixture_id
    username = _get_user_context()[1]
    fixture = fixmgr.update_fixture_instance(updates, user=username)
    return jsonify(fixture.json())
Exemple #18
0
    def create_volume(self, volume_dict, internal=False):
        """Create a volume
        :param volume_dict: dictionary containing the parameters for creating the volume
        :param internal: if internal, bypass validation of create fields (enables creation of anonymous volumes)
        """
        if not internal:
            for key in set(volume_dict.keys()) - Volume.create_fields:
                del volume_dict[key]
        resource_id = volume_dict.get('resource_id')
        if resource_id:
            # We allow the ability to import an existing volume which bypasses the creation logic and sets the volume
            # immediately in an 'active' state. To do so, the incoming volume document will supply 'resource_id'
            volume_dict['status'] = VolumeStatus.ACTIVE

        storage_class_name = volume_dict.get('storage_class')
        if not storage_class_name:
            raise AXApiInvalidParam("Storage class unspecified")
        storage_class = self.get_storage_class_by_name(storage_class_name)
        if not storage_class:
            raise AXApiInvalidParam(
                "Storage class '{}' does not exist".format(storage_class_name))

        # Fill in storage class & provider details into the volume attributes
        volume_dict['storage_class_id'] = storage_class.id
        storage_class_params = copy.deepcopy(
            storage_class.parameters[self.cloud_provider])
        volume_dict['storage_provider'] = storage_class_params.pop(
            'storage_provider_name')
        volume_dict['storage_provider_id'] = storage_class_params.pop(
            'storage_provider_id')
        volume = Volume(volume_dict)
        # override any user-supplied attributes that collide with the storage class' parameters
        volume.attributes.update(storage_class_params)
        with self._axrn_lock:
            existing = self.axdb_client.get_volume_by_axrn(volume.axrn)
            if existing:
                raise AXIllegalOperationException(
                    "Cannot create two volumes with same resource name: {}".
                    format(volume.axrn))
            self.axdb_client.create_volume(volume.axdbdoc())
            logger.info("Initialized %s:\n%s", volume,
                        pprint.pformat(volume.json()))
            if volume.status != VolumeStatus.ACTIVE:
                self.volume_work_q.put((QueuePriority.LOW, volume.id))
        return volume
Exemple #19
0
def get_commit(pk):
    """Get a single commit."""
    def get_commits_internal(commit_arg, repo_arg=None):
        """Get commit(s) by commit hash."""
        # Normally, this function should return only 1 commit object. However, if a repo and its forked repo
        # both appear in our workspace, there could be multiple commit objects.

        # If repo is not supplied, we need to scan all workspaces
        if repo_arg:
            _, vendor, repo_owner, repo_name = Gateway.parse_repo(repo_arg)
            workspaces = [
                '{}/{}/{}/{}'.format(Gateway.BASE_DIR, vendor, repo_owner,
                                     repo_name)
            ]
        else:
            dirs = [
                dir[0] for dir in os.walk(Gateway.BASE_DIR)
                if dir[0].endswith('/.git')
            ]
            workspaces = list(map(lambda v: v[:-5], dirs))

        commits = []
        with ThreadPoolExecutor(max_workers=20) as executor:
            futures = []
            for i in range(len(workspaces)):
                futures.append(
                    executor.submit(Gateway._get_commit,
                                    workspaces[i],
                                    commit=commit_arg))
            for future in as_completed(futures):
                try:
                    data = future.result()
                    if data:
                        commits.append(data)
                except Exception as e:
                    logger.warning(
                        'Unexpected exception occurred during processing: %s',
                        e)

        return commits

    repo = request.args.get('repo', "")
    if repo:
        repo = unquote(repo)
    logger.info('Retrieving commit (repo: %s, commit: %s) ...', repo, pk)
    commits_res = get_commits_internal(pk, repo)
    if not commits_res:
        logger.warning('Failed to retrieve commit')
        raise AXApiInvalidParam('Invalid revision',
                                detail='Invalid revision ({})'.format(pk))
    else:
        if len(commits_res) > 1:
            logger.warning(
                'Found multiple commits with given sha, returning the first one ...'
            )
        logger.info('Successfully retrieved commit')
        return jsonify(commits_res[0])
Exemple #20
0
 def validate_attribute(self, attribute, val):
     """Validate a single attribute value against the class definition"""
     try:
         validated_doc = self._attribute_schemas[attribute]({
             attribute: val
         })
         return validated_doc[attribute]
     except (KeyError, voluptuous.Error) as err:
         raise AXApiInvalidParam(humanize_error(str(err)))
Exemple #21
0
    def create(self, request):
        """
        :param request:
        :return:
        """
        if self.jira_client is None:
            self.jira_client = init_jira_client()

        logger.info('Received jira issue creation request (%s)', request.data)
        project = request.data.get('project', None)
        summary = request.data.get('summary', None)
        issuetype = request.data.get('issuetype', None)
        reporter = request.data.get('reporter', None)

        description = request.data.get('description', None)  # optional

        if project is None:
            raise AXApiInvalidParam('Missing required parameters: Project',
                                    detail='Missing required parameters, Project')
        if summary is None:
            raise AXApiInvalidParam('Missing required parameters: Summary',
                                    detail='Missing required parameters, Summary')
        if issuetype is None:
            raise AXApiInvalidParam('Missing required parameters: Issuetype',
                                    detail='Missing required parameters, Issuetype')
        if reporter is None:
            raise AXApiInvalidParam('Missing required parameters: Reporter',
                                    detail='Missing required parameters, Reporter')

        try:
            issue_obj = self.jira_client.create_issue(project,
                                                      summary,
                                                      issuetype=issuetype,
                                                      reporter=reporter,
                                                      description=description)
        except jira.exceptions.JIRAError as exc:
            raise AXApiInternalError('Invalid Parameters', detail=str(exc))
        else:
            issue_dict = copy.deepcopy(issue_obj.raw['fields'])
            issue_dict['url'] = issue_obj.self
            issue_dict['id'] = issue_obj.id
            issue_dict['key'] = issue_obj.key
            return Response(issue_dict)
Exemple #22
0
def _update_retention(payload):
    """Update retention tag

    :param payload:
    :returns:
    """
    if 'retention_tag' not in payload or 'artifact_id' not in payload:
        raise AXApiInvalidParam('Missing required parameters', 'Must supply "retention_tag" and "artifact_id"')
    artifact_manager.update_artifact_retention_tag(payload['artifact_id'], payload['retention_tag'])
    return jsonify({})
Exemple #23
0
def _download_artifact(params):
    """Download an artifact

    :param params:
    :returns:
    """
    if 'artifact_id' in params:
        _params = {'artifact_id': params['artifact_id']}
    elif 'service_instance_id' in params:
        if 'name' in params:
            _params = {
                'service_instance_id': params['service_instance_id'],
                'name': params['name']
            }
        elif 'retention_tags' in params and params['retention_tags'] == RETENTION_TAG_USER_LOG:
            _params = {
                'service_instance_id': params['service_instance_id'],
                'retention_tags': params['retention_tags']
            }
        elif 'retention_tags' in params and params['retention_tags'] != RETENTION_TAG_USER_LOG:
            raise AXApiInvalidParam('Invalid parameter value',
                                    'Can only download {} artifacts when "name" is not supplied'.format(RETENTION_TAG_USER_LOG))
        else:
            raise AXApiInvalidParam('Missing required parameter',
                                    'Must supply either "name" or "retention_tags" when supplying "service_instance_id"')
    elif 'workflow_id' in params:
        if 'full_path' not in params or 'name' not in params:
            raise AXApiInvalidParam('Missing required parameter',
                                    'Must supply both "full_path" and "name" when supplying "workflow_id"')
        _params = {
            'workflow_id': params['workflow_id'],
            'full_path': params['full_path'],
            'name': params['name']
        }
    else:
        raise AXApiInvalidParam('Missing required parameter', 'Must supply "artifact_id", "service_instance_id", or "workflow_id"')
    location, content = artifact_manager.download_artifact_by_query(**_params)
    if location:
        return redirect(location, code=302)
    if content:
        return Response(content)
    else:
        raise AXApiInternalError('Internal Error')
Exemple #24
0
def _browse_artifact(params):
    """Browse an artifact

    :param params:
    :returns:
    """
    if 'artifact_id' not in params:
        raise AXApiInvalidParam('Missing required parameter', 'Missing required parameter (artifact_id)')
    structure = artifact_manager.browse_artifact(request.args['artifact_id'])
    return jsonify(structure)
Exemple #25
0
def _retrieve_artifact(params):
    """Retrieve an artifact

    :param params:
    :returns:
    """
    if 'artifact_id' not in params:
        raise AXApiInvalidParam('Missing required parameter', 'Missing required parameter (artifact_id)')
    artifact = artifact_manager.get_artifact(request.args['artifact_id'])
    return jsonify(artifact)
Exemple #26
0
def get_artifacts():
    """Search/retrieve/browse/download artifact(s)"""
    action = request.args.get('action')
    if not action:
        raise AXApiInvalidParam('Missing required parameter', 'Missing required parameter (action)')
    if action == 'search':
        return _search_artifacts(request.args)
    elif action == 'retrieve':
        return _retrieve_artifact(request.args)
    elif action == 'browse':
        return _browse_artifact(request.args)
    elif action == 'download':
        return _download_artifact(request.args)
    elif action == 'list_tags':
        return _list_tags(request.args)
    elif action == 'get_usage':
        return _get_usage()
    else:
        raise AXApiInvalidParam('Invalid parameter value', 'Unsupported action ({})'.format(action))
Exemple #27
0
    def __init__(self, request):
        """
        :param request: fixture request dictionary
        """
        request = copy.deepcopy(request)
        try:
            request = request_schema(request)
        except voluptuous.Error as err:
            raise AXApiInvalidParam(humanize_error(str(err)))
        self.service_id = request['service_id']
        self.application_id = request['application_id'] or None
        self.application_name = request['application_name'] or None
        self.application_generation = request['application_generation'] or None
        self.deployment_name = request['deployment_name'] or None
        self.requester = request['requester']
        self.user = request['user']
        self.root_workflow_id = request['root_workflow_id']
        self.assignment = request['assignment']
        self.vol_assignment = request['vol_assignment']
        self.request_time = int(request['request_time']) if request['request_time'] else int(time.time() * 1e6)
        self.assignment_time = int(request['assignment_time']) if request['assignment_time'] else None
        # Validate/normalize the requirements
        self.requirements = {}
        for req_name, req in request['requirements'].items():
            if 'name' not in req and 'class' not in req and not req.get('attributes'):
                raise AXApiInvalidParam("name, class and/or attributes not specified in request of {}".format(req_name))
            normalized_req = {}
            for attr_name, req_value in req.items():
                normalized_req[attr_name.lower()] = req_value
            self.requirements[req_name] = normalized_req

        for req_name, req in request['vol_requirements'].items():
            if 'axrn' in req:
                # named volume request
                continue
            # anonymous volume request
            if not self.user:
                raise AXApiInvalidParam("Username must be supplied when requesting anonymous volumes")
            if self.requester == FIX_REQUESTER_AXAMM and (not self.application_name or not self.deployment_name):
                raise AXApiInvalidParam("Anonymous volume requests for deployments must supply application and deployment name")
            if not req.get('storage_class'):
                raise AXApiInvalidParam("Anonymous volume request of '{}' did not specify 'storage_class'".format(req_name))
            if not req.get('size_gb'):
                raise AXApiInvalidParam("Anonymous volume request of '{}' did not specify 'size_gb'".format(req_name))
            try:
                int(req['size_gb'])
            except ValueError:
                raise AXApiInvalidParam("Anonymous volume request of '{}' specified non numeric value for 'size_gb'".format(req_name))
        self.vol_requirements = request['vol_requirements']
        if not self.requirements and not self.vol_requirements:
            raise AXApiInvalidParam("Fixture request had no requirements or vol_requirements")

        self.notification_channel = "notification:{}".format(self.service_id)
Exemple #28
0
def update_artifacts():
    """Delete/restore/tag/untag artifact(s)"""
    payload = request.get_json(force=True)
    action = payload.get('action')
    if not action:
        raise AXApiInvalidParam('Missing required parameter', 'Missing required parameter (action)')
    if action == 'delete':
        return _delete_artifacts(payload)
    elif action == 'restore':
        return _restore_artifacts(payload)
    elif action == 'tag':
        return _tag_artifacts(payload)
    elif action == 'untag':
        return _untag_artifacts(payload)
    elif action == 'update_retention':
        return _update_retention(payload)
    elif action == 'clean':
        return _clean_artifacts()
    else:
        raise AXApiInvalidParam('Invalid parameter value', 'Unsupported action ({})'.format(action))
Exemple #29
0
    def _validate_fixture_request(self, fix_req):
        """Validates a fixture request by checking the current inventory of fixtures and volumes to ensure we can satisfy the request.
        :raises AXApiInvalidParam if request was invalid, or AXApiResourceNotFound if request was valid but could not be satisfied"""
        # Validate that the request (ensure attributes are valid)
        for requirement in fix_req.requirements.values():
            if 'class' not in requirement:
                continue
            try:
                fix_class = self.fixmgr.get_fixture_class(
                    name=requirement['class'])
            except AXApiResourceNotFound as err:
                raise AXApiInvalidParam(err.args[0])
            for attr_name in requirement.get('attributes', {}).keys():
                if attr_name not in fix_class.attributes:
                    raise AXApiInvalidParam(
                        "Fixture class {} does not have attribute {}".format(
                            requirement['class'], attr_name))

        if fix_req.requirements:
            # See if assignment is even possible given current inventory of fixtures.
            # If we cannot satisfy the request, we will reject the request, since it will never be assigned
            # (unless fixtures are added)
            self._find_candidates(fix_req.requirements, validate_request=True)

        # Do the same for volumes
        if fix_req.vol_requirements:
            self.fixmgr.volumemgr.find_named_volume_candidates(
                fix_req, validate_request=True)
            for vol_requirement in fix_req.vol_requirements.values():
                if not vol_requirement.get('axrn'):
                    # anonymous volume request. verify storage_class exists
                    storage_class_name = vol_requirement.get('storage_class')
                    if not storage_class_name:
                        raise AXApiInvalidParam(
                            "Volume request did not supply axrn or storage class"
                        )
                    if not self.fixmgr.volumemgr.get_storage_class_by_name(
                            storage_class_name):
                        raise AXApiInvalidParam(
                            "Storage class '{}' does not exist".format(
                                storage_class_name))
Exemple #30
0
    def provision_anonymous_volumes(self, fix_req, assignment):
        """Creates anonymous volumes requested by the fixture request and updates the assignment dictionary with the created/assigned volumes.

        :param volume_requirements: dictionary of requirement_name (in service template) to request dictionary
        :param assignment: assignment dictionary to update with the assignment
        :returns: modified assignment dictionary with the created volumes. None  requirements could be satisfied
        """
        for ref_name, requirements in fix_req.vol_requirements.items():
            if requirements.get('axrn'):
                # skip named volume requests
                continue
            # NOTE: we passed validation already, by virtue of having a FixtureRequest object, so can safely access these fields
            size_gb = requirements['size_gb']
            storage_class_name = requirements['storage_class']
            storage_class = self.get_storage_class_by_name(storage_class_name)
            if not storage_class:
                # This should not happen since we should have already validated the storage class in create_fixture_request
                raise AXApiInvalidParam(
                    "Storage class '{}' does not exist".format(
                        storage_class_name))
            axrn = anonymous_volume_axrn(fix_req, ref_name)
            volume = self.get_volume_by_axrn(axrn, verify_exists=False)
            if volume:
                # An existing anonymous volume can happen if:
                # 1) we create an anonymous volume
                # 2) crash before we notify the requestor about the created & assigned anonymous volumes
                # 3) start fixture manager again and process the request
                logger.warning(
                    "Processing a fixture request for anonymous volume which already exists:\n%s",
                    pprint.pformat(volume.json()))
                if not volume.has_referrer(fix_req.service_id):
                    # This should theoretically never happen because we mutate the axrn when we delete volumes
                    # so we would not have found the previous anonymous volume during the call to get_volume_by_axrn().
                    raise AXException(
                        "Anonymous {} already exists without {} referrer".
                        format(volume, fix_req.service_id))
            else:
                # NOTE: make owner/creator ids the same as submitter?
                create_payload = {
                    "name": None,
                    "anonymous": True,
                    "axrn": axrn,
                    "storage_class": storage_class.name,
                    "owner": fix_req.user,
                    "creator": fix_req.user,
                    "attributes": {
                        'size_gb': size_gb
                    },
                    "referrers": [fix_req.referrer()]
                }
                volume = self.create_volume(create_payload, internal=True)
                assignment[ref_name] = volume
        return assignment