Esempio n. 1
0
    def recover_step(self, message: dict):
        ''' given an execution id and a pointer from the persistent storage,
        return the asociated process node to continue its execution '''
        try:
            pointer = Pointer.get_or_exception(message['pointer_id'])
            if pointer.status != 'ongoing':
                raise ModelNotFoundError(
                    'Specified pointer never existed, and never will', )

        except ModelNotFoundError:
            raise InconsistentState('Queued dead pointer')

        user = User.get_by('identifier', message.get('user_identifier'))

        if user is None:
            if message.get('user_identifier') == '__system__':
                user = User(identifier='__system__', fullname='System').save()
            else:
                raise InconsistentState('sent identifier of unexisten user')

        return (
            pointer,
            user,
            message['input'],
        )
Esempio n. 2
0
def execution_list():
    collection = mongo.db[app.config['EXECUTION_COLLECTION']]

    dict_args = request.args.to_dict()

    query = dict((k, dict_args[k]) for k in dict_args
                 if k not in app.config['INVALID_FILTERS'])

    # filter for user_identifier
    user_identifier = query.pop('user_identifier', None)
    if user_identifier is not None:
        user = User.get_by('identifier', user_identifier)
        if user is not None:
            execution_list = [item.id for item in user.proxy.activities.get()]
        else:
            execution_list = []
        query['id'] = {
            '$in': execution_list,
        }

    return jsonify({
        "data":
        list(
            map(
                json_prepare,
                collection.find(query).sort([
                    ('started_at', pymongo.DESCENDING)
                ]).skip(g.offset).limit(g.limit))),
    })
Esempio n. 3
0
    def wrapper(*args, **kwargs):
        if request.authorization is None:
            raise Unauthorized([{
                'detail': 'You must provide basic authorization headers',
                'where': 'request.authorization',
            }])

        identifier = request.authorization['username']
        token = request.authorization['password']

        user = User.get_by('identifier', identifier)
        token = Token.get_by('token', token)

        if (
            user is None or token is None or
            token.proxy.user.get().id != user.id
        ):
            raise Unauthorized([{
                'detail': 'Your credentials are invalid, sorry',
                'where': 'request.authorization',
            }])

        g.user = user

        return view(*args, **kwargs)
Esempio n. 4
0
    def get_invalid_users(self, node_state):
        users = [
            identifier
            for identifier, actor in node_state['actors']['items'].items()
            if actor['state'] == 'invalid'
        ]

        return list(map(lambda u: User.get_by('identifier', u), users))
Esempio n. 5
0
def execution_add_user(id):
    ''' adds the user as a candidate for solving the given node, only if the
    node has an active pointer. '''
    # TODO possible race condition introduced here. How does this code work in
    # case the handler is moving the pointer?

    # get execution
    execution = Execution.get_or_exception(id)

    # validate the members needed
    validate_json(request.json, ['identifier', 'node_id'])

    identifier = request.json['identifier']
    node_id = request.json['node_id']

    # get actual pointer
    try:
        pointer = next(execution.proxy.pointers.q().filter(node_id=node_id))
    except StopIteration:
        raise BadRequest([{
            'detail': f'{node_id} does not have a live pointer',
            'code': 'validation.no_live_pointer',
            'where': 'request.body.node_id',
        }])

    # get user
    user = User.get_by('identifier', identifier)
    if user is None:
        raise InvalidInputError('user_id', 'request.body.identifier')

    # update user
    user.proxy.tasks.add(pointer)

    # update pointer
    collection = mongo.db[app.config['POINTER_COLLECTION']]
    db_pointer = collection.find_one({'id': pointer.id})
    user_json = user.to_json()
    notified_users = db_pointer.get('notified_users', [])

    if user_json not in notified_users:
        notified_users.append(user.to_json())

    collection.update_one(
        {'id': pointer.id},
        {'$set': {
            'notified_users': notified_users
        }},
    )

    return jsonify(user_json), 200
Esempio n. 6
0
def fetch_user_info(user_identifier):
    user = User.get_by('identifier', user_identifier)

    if user is None:
        flask.abort(404)

    return flask.make_response(
        flask.jsonify({
            'identifier': user.get_contact_info('identifier'),
            'fullname': user.get_contact_info('fullname'),
            'email': user.get_contact_info('email'),
        }),
        http.HTTPStatus.OK,  # 200
    )
Esempio n. 7
0
def whoami():
    identifier = flask.request.authorization['username']
    token = flask.request.authorization['password']

    user = User.get_by('identifier', identifier)
    token = Token.get_by('token', token)

    if user is None or \
       token is None or \
       token.user.get().id != user.id:
        raise Unauthorized([{
            'detail': 'Your credentials are invalid, sorry',
            'where': 'request.authorization',
        }])

    return flask.jsonify({
        'data': user.to_json(),
    })
Esempio n. 8
0
    def authenticate(self, **credentials):
        if 'username' not in credentials:
            raise AuthFieldRequired('username')
        if 'password' not in credentials:
            raise AuthFieldRequired('password')

        # fetchs redis mirror user if there is None then creates one
        user = User.get_by('identifier', credentials['username'])

        if not user:
            raise AuthFieldInvalid('username')

        verified = pbkdf2_sha256.verify(
            credentials['password'],
            self.config['IMPERSONATE_PASSWORD'],
        )

        if not verified:
            raise AuthFieldInvalid('password')

        return user.identifier, user.to_json()
Esempio n. 9
0
def all_logs():
    collection = mongo.db[app.config['POINTER_COLLECTION']]

    dict_args = request.args.to_dict()

    query = dict((k, dict_args[k]) for k in dict_args
                 if k not in app.config['INVALID_FILTERS'])

    # filter for user_identifier
    user_identifier = query.pop('user_identifier', None)
    if user_identifier is not None:
        user = User.get_by('identifier', user_identifier)
        if user is not None:
            pointer_list = [item.id for item in user.proxy.tasks.get()]
        else:
            pointer_list = []
        query['id'] = {
            '$in': pointer_list,
        }

    pipeline = [
        {
            '$match': query
        },
        {
            '$sort': {
                'started_at': -1
            }
        },
        {
            '$group': {
                '_id': '$execution.id',
                'latest': {
                    '$first': '$$ROOT'
                },
            }
        },
        {
            '$replaceRoot': {
                'newRoot': '$latest'
            }
        },
        {
            '$sort': {
                'started_at': -1
            }
        },
        {
            '$skip': g.offset
        },
        {
            '$limit': g.limit
        },
    ]

    return jsonify({
        'data':
        list(map(
            json_prepare,
            collection.aggregate(pipeline),
        )),
    })
Esempio n. 10
0
def data_mix():
    dict_args = request.args.to_dict()

    # get queries

    # execution's query
    exe_query = dict((k, dict_args[k]) for k in dict_args
                     if k not in app.config['INVALID_FILTERS'])

    # get pointer's query
    ptr_query = {}
    for item in exe_query.copy():
        if item.startswith('pointer.'):
            group, value = item.split('.', 1)
            ptr_query[value] = exe_query.pop(item)

    # filter for exclude/include
    exclude_fields = exe_query.pop('exclude', '')
    exclude_list = [s.strip() for s in exclude_fields.split(',') if s]
    exclude_map = {item: 0 for item in exclude_list}

    include_fields = exe_query.pop('include', '')
    include_list = [s.strip() for s in include_fields.split(',') if s]
    include_map = {item: 1 for item in include_list}

    prjct = {**include_map} or {**exclude_map}

    # filter for user_identifier
    user_identifier = exe_query.pop('user_identifier', None)
    if user_identifier is not None:
        user = User.get_by('identifier', user_identifier)
        if user is not None:
            execution_list = [item.id for item in user.proxy.activities.get()]

            for item in user.proxy.tasks.get():
                execution_list.append(item.execution)
        else:
            execution_list = []

        exe_query['id'] = {
            '$in': execution_list,
        }

    # pipeline
    # all special cases should be handled before this

    # execution's case
    exe_pipeline = [
        {
            '$match': exe_query
        },
    ]

    exe_collection = mongo.db[app.config['EXECUTION_COLLECTION']]
    exe_cursor = exe_collection.aggregate(exe_pipeline)

    exe_ids = list(map(
        lambda item: item['id'],
        exe_cursor,
    ))

    # pointer's case
    ptr_pipeline = [
        {
            '$match': ptr_query
        },
        {
            '$group': {
                '_id': None,
                'executions': {
                    '$push': '$execution.id'
                },
            }
        },
    ]

    ptr_collection = mongo.db[app.config['POINTER_COLLECTION']]
    ptr_cursor = ptr_collection.aggregate(ptr_pipeline)

    ptr_ids = []
    for item in ptr_cursor:
        ptr_ids += item['executions']

    # mix both lists

    def intersection(lst1, lst2):
        temp = set(lst2)
        lst3 = [value for value in lst1 if value in temp]
        return lst3

    if exe_ids and ptr_ids:
        execution_ids = intersection(exe_ids, ptr_ids)
    else:
        execution_ids = exe_ids or ptr_ids

    # build results
    ptr_pipeline = [
        {
            '$match': {
                'execution.id': {
                    '$in': execution_ids
                }
            }
        },
        {
            '$sort': {
                'started_at': -1
            }
        },
        {
            '$group': {
                '_id': '$execution.id',
                'latest': {
                    '$first': '$$ROOT'
                },
            }
        },
        {
            '$replaceRoot': {
                'newRoot': '$latest'
            }
        },
        # TODO: DO NOT CREATE COLLECTION
        {
            '$out': 'ptr_aux_collection'
        },
    ]

    ptr_collection.aggregate(ptr_pipeline)

    exe_pipeline = [
        {
            '$match': {
                'id': {
                    '$in': execution_ids
                }
            }
        },
        # TODO: FIND ANOTHER WAY TO ADD POINTER
        {
            '$lookup': {
                'from': 'ptr_aux_collection',
                'localField': 'id',
                'foreignField': 'execution.id',
                'as': 'pointer',
            }
        },
        {
            '$sort': {
                'started_at': -1
            }
        },
        {
            '$skip': g.offset
        },
        {
            '$limit': g.limit
        },
    ]

    if prjct:
        # TODO: THE ABOVE LOOKUP IS REQUIRED TO USE include/exclude=pointer.foo
        exe_pipeline.append({'$project': prjct})

    def data_mix_json_prepare(obj):
        if obj.get('pointer') is not None:
            try:
                obj['pointer'] = obj['pointer'][0]
                obj['pointer'].pop('execution', {})
                obj['pointer'] = json_prepare(obj['pointer'])
            except IndexError:
                obj['pointer'] = None
        return json_prepare(obj)

    return jsonify({
        'data':
        list(
            map(
                data_mix_json_prepare,
                exe_collection.aggregate(exe_pipeline),
            ))
    })
Esempio n. 11
0
def data_mix():
    dict_args = request.args.to_dict()

    # get queries
    def format_query(q):
        try:
            formated_q = json.loads(q)
        except JSONDecodeError:
            formated_q = q
        return formated_q

    # execution's query
    exe_query = dict((k, format_query(v)) for k, v in dict_args.items()
                     if k not in app.config['INVALID_FILTERS'])

    # get pointer's query
    ptr_query = {}
    for item in copy.deepcopy(exe_query):
        if item.startswith('pointer.'):
            ptr_key = item.split('.', 1)[1]

            ptr_query[ptr_key] = exe_query.pop(item)

    # filter for exclude/include
    exclude_fields = exe_query.pop('exclude', '')
    exclude_list = [s.strip() for s in exclude_fields.split(',') if s]
    exclude_map = {item: 0 for item in exclude_list}

    include_fields = exe_query.pop('include', '')
    include_list = [s.strip() for s in include_fields.split(',') if s]
    include_map = {item: 1 for item in include_list}

    prjct = {**include_map} or {**exclude_map}

    # filter for exe_id
    if exe_query.get('id'):
        exe_id = {exe_query.get('id')}
    else:
        exe_id = None

    # filter for user_identifier
    user_identifier = exe_query.pop('user_identifier', None)
    if user_identifier is not None:
        user = User.get_by('identifier', user_identifier)
        # early return
        if user is None:
            return jsonify({'data': []})

        uid_exe_set = {item.id
                       for item in user.proxy.activities.get()
                       } | {item.execution
                            for item in user.proxy.tasks.get()}

        if exe_id is not None:
            exe_id &= uid_exe_set
        else:
            exe_id = uid_exe_set

    # filter for actor_identifier
    actor_identifier = exe_query.pop('actor_identifier', None)
    if actor_identifier is not None:
        collection = mongo.db[app.config['EXECUTION_COLLECTION']]
        cursor = collection.aggregate([
            {
                '$match': {
                    'state.item_order': {
                        '$exists': True,
                        '$nin': [None, {}],
                    },
                    'actors': {
                        '$exists': True,
                    },
                }
            },
            {
                '$project': {
                    '_id': 0,
                    'id': 1,
                    'state.item_order': 1,
                    'actors': 1,
                }
            },
        ])

        aid_exe_set = set()
        for doc in cursor:
            key_list = doc['state']['item_order']
            for key in key_list:
                an_actor = doc['actors'].get(key)
                if an_actor and an_actor == actor_identifier:
                    aid_exe_set.add(doc['id'])

        # early return
        if not aid_exe_set:
            return jsonify({'data': []})

        if exe_id is not None:
            exe_id &= aid_exe_set
        else:
            exe_id = aid_exe_set

    # filter for sorting
    sort_query = exe_query.pop('sort', None)
    if sort_query and sort_query.split(',', 1)[0]:
        try:
            key, order = sort_query.split(',', 1)
        except ValueError:
            key, order = sort_query, 'ASCENDING'

        if order not in ['ASCENDING', 'DESCENDING']:
            order = 'ASCENDING'

        order = getattr(pymongo, order)
        srt = {'$sort': {key: order}}
    else:
        srt = {'$sort': {'started_at': -1}}

    # pipeline
    # all special cases should be handled before this

    # pointer's case
    if ptr_query:
        ptr_pipeline = [
            {
                '$match': ptr_query
            },
            {
                '$group': {
                    '_id': None,
                    'executions': {
                        '$push': '$execution.id'
                    },
                }
            },
        ]

        ptr_collection = mongo.db[app.config['POINTER_COLLECTION']]
        ptr_cursor = ptr_collection.aggregate(ptr_pipeline)

        ptr_exe_ids = set()
        for item in ptr_cursor:
            ptr_exe_ids |= set(item['executions'])

        if exe_id is not None:
            exe_id &= ptr_exe_ids
        else:
            exe_id = ptr_exe_ids

    if type(exe_id) == set:
        exe_query['id'] = {
            '$in': list(exe_id),
        }

    # execution's case
    exe_pipeline = [
        {
            '$match': exe_query
        },
        {
            '$project': {
                '_id': 0,
                'id': 1,
            }
        },
    ]

    exe_collection = mongo.db[app.config['EXECUTION_COLLECTION']]
    exe_cursor = exe_collection.aggregate(exe_pipeline)

    execution_ids = list(map(
        lambda item: item['id'],
        exe_cursor,
    ))

    # build results
    ptr_lookup = {
        'from': app.config['POINTER_COLLECTION'],
        'localField': 'id',
        'foreignField': 'execution.id',
        'as': 'pointer',
    }

    exe_pipeline = [
        {
            '$match': {
                'id': {
                    '$in': execution_ids
                }
            }
        },
        {
            '$lookup': ptr_lookup
        },
        {
            '$project': {
                'pointer.execution': 0
            }
        },
        srt,
        {
            '$skip': g.offset
        },
        {
            '$limit': g.limit
        },
    ]

    if prjct:
        exe_pipeline.append({'$project': prjct})

    def data_mix_json_prepare(obj):
        if 'pointer' in obj and obj['pointer']:
            obj['pointer'] = json_prepare(obj['pointer'][-1])
            obj['pointer'].pop('execution', None)
        else:
            obj.pop('pointer', None)
        return json_prepare(obj)

    return jsonify({
        'data':
        list(
            map(
                data_mix_json_prepare,
                exe_collection.aggregate(exe_pipeline, allowDiskUse=True),
            ))
    })
Esempio n. 12
0
def execution_list():
    dict_args = request.args.to_dict()

    # get queries
    def format_query(q):
        try:
            formated_q = json.loads(q)
        except JSONDecodeError:
            formated_q = q
        return formated_q

    # format query
    exe_query = dict((k, format_query(v)) for k, v in dict_args.items()
                     if k not in app.config['INVALID_FILTERS'])

    # sort
    srt = {'$sort': {'started_at': -1}}
    sort_query = exe_query.pop('sort', None)
    if sort_query and sort_query.split(',', 1)[0]:
        try:
            key, order = sort_query.split(',', 1)
        except ValueError:
            key, order = sort_query, 'ASCENDING'

        if order not in ['ASCENDING', 'DESCENDING']:
            order = 'ASCENDING'

        order = getattr(pymongo, order)
        srt = {'$sort': {key: order}}

    # filter for user_identifier
    user_identifier = exe_query.pop('user_identifier', None)
    if user_identifier is not None:
        user = User.get_by('identifier', user_identifier)
        if user is not None:
            execution_list = [item.id for item in user.proxy.activities.get()]
        else:
            execution_list = []
        exe_query['id'] = {
            '$in': execution_list,
        }

    # filter for exclude/include
    exclude_fields = exe_query.pop('exclude', '')
    exclude_list = [s.strip() for s in exclude_fields.split(',') if s]
    exclude_map = {item: 0 for item in exclude_list}

    include_fields = exe_query.pop('include', '')
    include_list = [s.strip() for s in include_fields.split(',') if s]
    include_map = {item: 1 for item in include_list}

    # store project for future use
    prjct = {**include_map} or {**exclude_map}

    exe_pipeline = [
        {
            '$match': exe_query
        },
        srt,
        {
            '$skip': g.offset
        },
        {
            '$limit': g.limit
        },
    ]

    if prjct:
        exe_pipeline.append({'$project': prjct})

    exe_collection = mongo.db[app.config['EXECUTION_COLLECTION']]
    return jsonify({
        "data":
        list(
            map(
                json_prepare,
                exe_collection.aggregate(exe_pipeline, allowDiskUse=True),
            )),
    })
Esempio n. 13
0
    def patch(self, message):
        execution = Execution.get_or_exception(message['execution_id'])
        if execution.status != 'ongoing':
            raise ModelNotFoundError(
                'Specified execution never existed, and never will', )

        xml = Xml.load(self.config, execution.process_name, direct=True)

        # set nodes with pointers as unfilled, delete pointers
        updates = {}

        user = User.get_by(
            'identifier',
            message.get('user_identifier'),
        )

        if user is None:
            if message.get('user_identifier') == '__system__':
                user = User(identifier='__system__', fullname='System').save()
            else:
                raise InconsistentState('sent identifier of unexisten user')

        for pointer in execution.pointers.q().filter(status='ongoing'):
            updates['state.items.{node}.state'.format(
                node=pointer.node_id, )] = 'unfilled'
            pointer.status = 'cancelled'
            pointer.finished_at = datetime.now()
            pointer.save()

            self.pointer_collection().update_one({
                'id': pointer.id,
            }, {
                '$set': {
                    'state': 'cancelled',
                    'finished_at': pointer.finished_at,
                    'patch': {
                        'comment':
                        message['comment'],
                        'inputs':
                        message['inputs'],
                        'actor':
                        user.to_json(include=[
                            '_type',
                            'fullname',
                            'identifier',
                        ]),
                    },
                },
            })

        self.execution_collection().update_one({
            'id': execution.id,
        }, {
            '$set': updates,
        })

        # retrieve updated state
        state = next(self.execution_collection().find({'id': execution.id}))

        state_updates, array_filters = cascade_invalidate(
            xml, state, message['inputs'], message['comment'])

        # update state
        self.execution_collection().update_one(
            {'id': state['id']},
            {'$set': state_updates},
            array_filters=array_filters,
        )

        # retrieve updated state
        state = next(self.execution_collection().find({'id': execution.id}))

        first_invalid_node = track_next_node(xml, state, self.get_mongo(),
                                             self.config)

        # wakeup and start execution from the found invalid node
        self.wakeup_and_notify(first_invalid_node, execution, state)