Exemplo n.º 1
0
    def recover_step(self, message: dict):
        ''' given an execution id and a pointer from the persistent storage,
        return the asociated process node to continue its execution '''
        try:
            pointer = Pointer.get_or_exception(message['pointer_id'])
            if pointer.status != 'ongoing':
                raise ModelNotFoundError(
                    'Specified pointer never existed, and never will', )

        except ModelNotFoundError:
            raise InconsistentState('Queued dead pointer')

        user = User.get_by('identifier', message.get('user_identifier'))

        if user is None:
            if message.get('user_identifier') == '__system__':
                user = User(identifier='__system__', fullname='System').save()
            else:
                raise InconsistentState('sent identifier of unexisten user')

        return (
            pointer,
            user,
            message['input'],
        )
Exemplo n.º 2
0
def get_or_create(identifier, data):
    identifier = clear_username(identifier)
    data['identifier'] = identifier

    try:
        return User.get_by_or_exception('identifier', identifier)
    except ModelNotFoundError:
        return User(**data).save()
Exemplo n.º 3
0
def execution_list():
    collection = mongo.db[app.config['EXECUTION_COLLECTION']]

    dict_args = request.args.to_dict()

    query = dict((k, dict_args[k]) for k in dict_args
                 if k not in app.config['INVALID_FILTERS'])

    # filter for user_identifier
    user_identifier = query.pop('user_identifier', None)
    if user_identifier is not None:
        user = User.get_by('identifier', user_identifier)
        if user is not None:
            execution_list = [item.id for item in user.proxy.activities.get()]
        else:
            execution_list = []
        query['id'] = {
            '$in': execution_list,
        }

    return jsonify({
        "data":
        list(
            map(
                json_prepare,
                collection.find(query).sort([
                    ('started_at', pymongo.DESCENDING)
                ]).skip(g.offset).limit(g.limit))),
    })
Exemplo n.º 4
0
    def wrapper(*args, **kwargs):
        if request.authorization is None:
            raise Unauthorized([{
                'detail': 'You must provide basic authorization headers',
                'where': 'request.authorization',
            }])

        identifier = request.authorization['username']
        token = request.authorization['password']

        user = User.get_by('identifier', identifier)
        token = Token.get_by('token', token)

        if (
            user is None or token is None or
            token.proxy.user.get().id != user.id
        ):
            raise Unauthorized([{
                'detail': 'Your credentials are invalid, sorry',
                'where': 'request.authorization',
            }])

        g.user = user

        return view(*args, **kwargs)
Exemplo n.º 5
0
    def get_invalid_users(self, node_state):
        users = [
            identifier
            for identifier, actor in node_state['actors']['items'].items()
            if actor['state'] == 'invalid'
        ]

        return list(map(lambda u: User.get_by('identifier', u), users))
Exemplo n.º 6
0
def fetch_user_info(user_identifier):
    user = User.get_by('identifier', user_identifier)

    if user is None:
        flask.abort(404)

    return flask.make_response(
        flask.jsonify({
            'identifier': user.get_contact_info('identifier'),
            'fullname': user.get_contact_info('fullname'),
            'email': user.get_contact_info('email'),
        }),
        http.HTTPStatus.OK,  # 200
    )
Exemplo n.º 7
0
def execution_add_user(id):
    ''' adds the user as a candidate for solving the given node, only if the
    node has an active pointer. '''
    # TODO possible race condition introduced here. How does this code work in
    # case the handler is moving the pointer?

    # get execution
    execution = Execution.get_or_exception(id)

    # validate the members needed
    validate_json(request.json, ['identifier', 'node_id'])

    identifier = request.json['identifier']
    node_id = request.json['node_id']

    # get actual pointer
    try:
        pointer = next(execution.proxy.pointers.q().filter(node_id=node_id))
    except StopIteration:
        raise BadRequest([{
            'detail': f'{node_id} does not have a live pointer',
            'code': 'validation.no_live_pointer',
            'where': 'request.body.node_id',
        }])

    # get user
    user = User.get_by('identifier', identifier)
    if user is None:
        raise InvalidInputError('user_id', 'request.body.identifier')

    # update user
    user.proxy.tasks.add(pointer)

    # update pointer
    collection = mongo.db[app.config['POINTER_COLLECTION']]
    db_pointer = collection.find_one({'id': pointer.id})
    user_json = user.to_json()
    notified_users = db_pointer.get('notified_users', [])

    if user_json not in notified_users:
        notified_users.append(user.to_json())

    collection.update_one(
        {'id': pointer.id},
        {'$set': {
            'notified_users': notified_users
        }},
    )

    return jsonify(user_json), 200
Exemplo n.º 8
0
def test_backref_backend(config):
    user = User(identifier='juan').save()
    br = BackrefHierarchyProvider(config)

    users = br.find_users(identifier='juan')

    assert len(users) == 1

    user = users[0]

    assert user[0] == 'juan'
    assert user[1] == {
        'identifier': 'juan',
        'email': 'juan',
        'fullname': 'juan',
    }
Exemplo n.º 9
0
def whoami():
    identifier = flask.request.authorization['username']
    token = flask.request.authorization['password']

    user = User.get_by('identifier', identifier)
    token = Token.get_by('token', token)

    if user is None or \
       token is None or \
       token.user.get().id != user.id:
        raise Unauthorized([{
            'detail': 'Your credentials are invalid, sorry',
            'where': 'request.authorization',
        }])

    return flask.jsonify({
        'data': user.to_json(),
    })
Exemplo n.º 10
0
    def authenticate(self, **credentials):
        if 'username' not in credentials:
            raise AuthFieldRequired('username')
        if 'password' not in credentials:
            raise AuthFieldRequired('password')

        # fetchs redis mirror user if there is None then creates one
        user = User.get_by('identifier', credentials['username'])

        if not user:
            raise AuthFieldInvalid('username')

        verified = pbkdf2_sha256.verify(
            credentials['password'],
            self.config['IMPERSONATE_PASSWORD'],
        )

        if not verified:
            raise AuthFieldInvalid('password')

        return user.identifier, user.to_json()
Exemplo n.º 11
0
 def get_contact_channels(self, user: User):
     return [('email', {
         'recipient': user.get_contact_info('email'),
         'subject': '[procesos] Tarea asignada',
         'template': 'assigned-task.html',
     })]
Exemplo n.º 12
0
def make_user(identifier, name):
    u = User(identifier=identifier, fullname=name).save()
    token = Token(token=random_string(9)).save()
    token.proxy.user.set(u)

    return u
Exemplo n.º 13
0
    def patch(self, message):
        execution = Execution.get_or_exception(message['execution_id'])
        if execution.status != 'ongoing':
            raise ModelNotFoundError(
                'Specified execution never existed, and never will', )

        xml = Xml.load(self.config, execution.process_name, direct=True)

        # set nodes with pointers as unfilled, delete pointers
        updates = {}

        user = User.get_by(
            'identifier',
            message.get('user_identifier'),
        )

        if user is None:
            if message.get('user_identifier') == '__system__':
                user = User(identifier='__system__', fullname='System').save()
            else:
                raise InconsistentState('sent identifier of unexisten user')

        for pointer in execution.pointers.q().filter(status='ongoing'):
            updates['state.items.{node}.state'.format(
                node=pointer.node_id, )] = 'unfilled'
            pointer.status = 'cancelled'
            pointer.finished_at = datetime.now()
            pointer.save()

            self.pointer_collection().update_one({
                'id': pointer.id,
            }, {
                '$set': {
                    'state': 'cancelled',
                    'finished_at': pointer.finished_at,
                    'patch': {
                        'comment':
                        message['comment'],
                        'inputs':
                        message['inputs'],
                        'actor':
                        user.to_json(include=[
                            '_type',
                            'fullname',
                            'identifier',
                        ]),
                    },
                },
            })

        self.execution_collection().update_one({
            'id': execution.id,
        }, {
            '$set': updates,
        })

        # retrieve updated state
        state = next(self.execution_collection().find({'id': execution.id}))

        state_updates, array_filters = cascade_invalidate(
            xml, state, message['inputs'], message['comment'])

        # update state
        self.execution_collection().update_one(
            {'id': state['id']},
            {'$set': state_updates},
            array_filters=array_filters,
        )

        # retrieve updated state
        state = next(self.execution_collection().find({'id': execution.id}))

        first_invalid_node = track_next_node(xml, state, self.get_mongo(),
                                             self.config)

        # wakeup and start execution from the found invalid node
        self.wakeup_and_notify(first_invalid_node, execution, state)
Exemplo n.º 14
0
def test_wakeup(config, mongo):
    ''' the first stage in a node's lifecycle '''
    # setup stuff
    handler = Handler(config)

    pointer = make_pointer('simple.2018-02-19.xml', 'start_node')
    execution = pointer.proxy.execution.get()
    juan = User(identifier='juan').save()
    manager = User(
        identifier='juan_manager',
        email='*****@*****.**'
    ).save()

    mongo[config["EXECUTION_COLLECTION"]].insert_one({
        '_type': 'execution',
        'id': execution.id,
        'state': Xml.load(config, execution.process_name).get_state(),
        'actors': {'start_node': 'juan'},
    })

    channel = MagicMock()

    # will wakeup the second node
    handler.call({
        'command': 'step',
        'pointer_id': pointer.id,
        'user_identifier': juan.identifier,
        'input': [],
    }, channel)

    # test manager is notified
    channel.basic_publish.assert_called_once()
    channel.exchange_declare.assert_called_once()

    args = channel.basic_publish.call_args[1]

    assert args['exchange'] == config['RABBIT_NOTIFY_EXCHANGE']
    assert args['routing_key'] == 'email'
    assert json.loads(args['body']) == {
        'recipient': '*****@*****.**',
        'subject': '[procesos] Tarea asignada',
        'template': 'assigned-task.html',
        'data': {
            'pointer': Pointer.get_all()[0].to_json(
                include=['*', 'execution']
            ),
            'cacahuate_url': config['GUI_URL'],
        },
    }

    # pointer collection updated
    reg = next(mongo[config["POINTER_COLLECTION"]].find())

    assert_near_date(reg['started_at'])
    assert reg['finished_at'] is None
    assert reg['execution']['id'] == execution.id
    assert reg['node'] == {
        'id': 'mid_node',
        'type': 'action',
        'description': 'añadir información',
        'name': 'Segundo paso',
    }
    assert reg['actors'] == {
        '_type': ':map',
        'items': {},
    }
    assert reg['notified_users'] == [manager.to_json()]
    assert reg['state'] == 'ongoing'

    # execution collection updated
    reg = next(mongo[config["EXECUTION_COLLECTION"]].find())

    assert reg['state']['items']['mid_node']['state'] == 'ongoing'

    # tasks where asigned
    assert manager.proxy.tasks.count() == 1

    task = manager.proxy.tasks.get()[0]

    assert isinstance(task, Pointer)
    assert task.node_id == 'mid_node'
    assert task.proxy.execution.get().id == execution.id
Exemplo n.º 15
0
def test_teardown(config, mongo):
    ''' second and last stage of a node's lifecycle '''
    # test setup
    handler = Handler(config)

    p_0 = make_pointer('simple.2018-02-19.xml', 'mid_node')
    execution = p_0.proxy.execution.get()

    User(identifier='juan').save()
    manager = User(identifier='manager').save()
    manager2 = User(identifier='manager2').save()

    assert manager not in execution.proxy.actors.get()
    assert execution not in manager.proxy.activities.get()

    manager.proxy.tasks.set([p_0])
    manager2.proxy.tasks.set([p_0])

    state = Xml.load(config, execution.process_name).get_state()
    state['items']['start_node']['state'] = 'valid'

    mongo[config["EXECUTION_COLLECTION"]].insert_one({
        '_type': 'execution',
        'id': execution.id,
        'state': state,
        'values': {
            '_execution': [{
                'name': '',
                'description': '',
            }],
        },
        'actors': {
            'start_node': 'juan',
        },
    })

    mongo[config["POINTER_COLLECTION"]].insert_one({
        'id': p_0.id,
        'started_at': datetime(2018, 4, 1, 21, 45),
        'finished_at': None,
        'execution': {
            'id': execution.id,
        },
        'node': {
            'id': p_0.node_id,
        },
        'actors': {
            '_type': ':map',
            'items': {},
        },
    })

    channel = MagicMock()

    # will teardown mid_node
    handler.call({
        'command': 'step',
        'pointer_id': p_0.id,
        'user_identifier': manager.identifier,
        'input': [Form.state_json('mid_form', [
            {
                '_type': 'field',
                'state': 'valid',
                'value': 'yes',
                'value_caption': 'yes',
                'name': 'data',
            },
        ])],
    }, channel)

    # assertions
    assert Pointer.get(p_0.id) is None

    assert Pointer.count() == 1
    assert Pointer.get_all()[0].node_id == 'final_node'

    # mongo has a registry
    reg = next(mongo[config["POINTER_COLLECTION"]].find())

    assert reg['started_at'] == datetime(2018, 4, 1, 21, 45)
    assert_near_date(reg['finished_at'])
    assert reg['execution']['id'] == execution.id
    assert reg['node']['id'] == p_0.node_id
    assert reg['actors'] == {
        '_type': ':map',
        'items': {
            'manager': {
                '_type': 'actor',
                'state': 'valid',
                'user': {
                    '_type': 'user',
                    'identifier': 'manager',
                    'fullname': None,
                },
                'forms': [Form.state_json('mid_form', [
                    {
                        '_type': 'field',
                        'state': 'valid',
                        'value': 'yes',
                        'value_caption': 'yes',
                        'name': 'data',
                    },
                ])],
            },
        },
    }

    # tasks where deleted from user
    assert manager.proxy.tasks.count() == 0
    assert manager2.proxy.tasks.count() == 0

    # state
    reg = next(mongo[config["EXECUTION_COLLECTION"]].find())

    assert reg['state'] == {
        '_type': ':sorted_map',
        'items': {
            'start_node': {
                '_type': 'node',
                'type': 'action',
                'id': 'start_node',
                'state': 'valid',
                'comment': '',
                'actors': {
                    '_type': ':map',
                    'items': {},
                },
                'milestone': False,
                'name': 'Primer paso',
                'description': 'Resolver una tarea',
            },

            'mid_node': {
                '_type': 'node',
                'type': 'action',
                'id': 'mid_node',
                'state': 'valid',
                'comment': '',
                'actors': {
                    '_type': ':map',
                    'items': {
                        'manager': {
                            '_type': 'actor',
                            'state': 'valid',
                            'user': {
                                '_type': 'user',
                                'identifier': 'manager',
                                'fullname': None,
                            },
                            'forms': [Form.state_json('mid_form', [
                                {
                                    '_type': 'field',
                                    'state': 'valid',
                                    'value': 'yes',
                                    'value_caption': 'yes',
                                    'name': 'data',
                                },
                            ])],
                        },
                    },
                },
                'milestone': False,
                'name': 'Segundo paso',
                'description': 'añadir información',
            },

            'final_node': {
                '_type': 'node',
                'type': 'action',
                'id': 'final_node',
                'state': 'ongoing',
                'comment': '',
                'actors': {
                    '_type': ':map',
                    'items': {},
                },
                'milestone': False,
                'name': '',
                'description': '',
            },
        },
        'item_order': [
            'start_node',
            'mid_node',
            'final_node',
        ],
    }

    assert reg['values'] == {
        '_execution': [{
            'name': '',
            'description': '',
        }],
        'mid_form': [{
            'data': 'yes',
        }],
    }

    assert reg['actors'] == {
        'start_node': 'juan',
        'mid_node': 'manager',
    }

    assert manager in execution.proxy.actors
    assert execution in manager.proxy.activities
Exemplo n.º 16
0
def all_logs():
    collection = mongo.db[app.config['POINTER_COLLECTION']]

    dict_args = request.args.to_dict()

    query = dict((k, dict_args[k]) for k in dict_args
                 if k not in app.config['INVALID_FILTERS'])

    # filter for user_identifier
    user_identifier = query.pop('user_identifier', None)
    if user_identifier is not None:
        user = User.get_by('identifier', user_identifier)
        if user is not None:
            pointer_list = [item.id for item in user.proxy.tasks.get()]
        else:
            pointer_list = []
        query['id'] = {
            '$in': pointer_list,
        }

    pipeline = [
        {
            '$match': query
        },
        {
            '$sort': {
                'started_at': -1
            }
        },
        {
            '$group': {
                '_id': '$execution.id',
                'latest': {
                    '$first': '$$ROOT'
                },
            }
        },
        {
            '$replaceRoot': {
                'newRoot': '$latest'
            }
        },
        {
            '$sort': {
                'started_at': -1
            }
        },
        {
            '$skip': g.offset
        },
        {
            '$limit': g.limit
        },
    ]

    return jsonify({
        'data':
        list(map(
            json_prepare,
            collection.aggregate(pipeline),
        )),
    })
Exemplo n.º 17
0
def data_mix():
    dict_args = request.args.to_dict()

    # get queries

    # execution's query
    exe_query = dict((k, dict_args[k]) for k in dict_args
                     if k not in app.config['INVALID_FILTERS'])

    # get pointer's query
    ptr_query = {}
    for item in exe_query.copy():
        if item.startswith('pointer.'):
            group, value = item.split('.', 1)
            ptr_query[value] = exe_query.pop(item)

    # filter for exclude/include
    exclude_fields = exe_query.pop('exclude', '')
    exclude_list = [s.strip() for s in exclude_fields.split(',') if s]
    exclude_map = {item: 0 for item in exclude_list}

    include_fields = exe_query.pop('include', '')
    include_list = [s.strip() for s in include_fields.split(',') if s]
    include_map = {item: 1 for item in include_list}

    prjct = {**include_map} or {**exclude_map}

    # filter for user_identifier
    user_identifier = exe_query.pop('user_identifier', None)
    if user_identifier is not None:
        user = User.get_by('identifier', user_identifier)
        if user is not None:
            execution_list = [item.id for item in user.proxy.activities.get()]

            for item in user.proxy.tasks.get():
                execution_list.append(item.execution)
        else:
            execution_list = []

        exe_query['id'] = {
            '$in': execution_list,
        }

    # pipeline
    # all special cases should be handled before this

    # execution's case
    exe_pipeline = [
        {
            '$match': exe_query
        },
    ]

    exe_collection = mongo.db[app.config['EXECUTION_COLLECTION']]
    exe_cursor = exe_collection.aggregate(exe_pipeline)

    exe_ids = list(map(
        lambda item: item['id'],
        exe_cursor,
    ))

    # pointer's case
    ptr_pipeline = [
        {
            '$match': ptr_query
        },
        {
            '$group': {
                '_id': None,
                'executions': {
                    '$push': '$execution.id'
                },
            }
        },
    ]

    ptr_collection = mongo.db[app.config['POINTER_COLLECTION']]
    ptr_cursor = ptr_collection.aggregate(ptr_pipeline)

    ptr_ids = []
    for item in ptr_cursor:
        ptr_ids += item['executions']

    # mix both lists

    def intersection(lst1, lst2):
        temp = set(lst2)
        lst3 = [value for value in lst1 if value in temp]
        return lst3

    if exe_ids and ptr_ids:
        execution_ids = intersection(exe_ids, ptr_ids)
    else:
        execution_ids = exe_ids or ptr_ids

    # build results
    ptr_pipeline = [
        {
            '$match': {
                'execution.id': {
                    '$in': execution_ids
                }
            }
        },
        {
            '$sort': {
                'started_at': -1
            }
        },
        {
            '$group': {
                '_id': '$execution.id',
                'latest': {
                    '$first': '$$ROOT'
                },
            }
        },
        {
            '$replaceRoot': {
                'newRoot': '$latest'
            }
        },
        # TODO: DO NOT CREATE COLLECTION
        {
            '$out': 'ptr_aux_collection'
        },
    ]

    ptr_collection.aggregate(ptr_pipeline)

    exe_pipeline = [
        {
            '$match': {
                'id': {
                    '$in': execution_ids
                }
            }
        },
        # TODO: FIND ANOTHER WAY TO ADD POINTER
        {
            '$lookup': {
                'from': 'ptr_aux_collection',
                'localField': 'id',
                'foreignField': 'execution.id',
                'as': 'pointer',
            }
        },
        {
            '$sort': {
                'started_at': -1
            }
        },
        {
            '$skip': g.offset
        },
        {
            '$limit': g.limit
        },
    ]

    if prjct:
        # TODO: THE ABOVE LOOKUP IS REQUIRED TO USE include/exclude=pointer.foo
        exe_pipeline.append({'$project': prjct})

    def data_mix_json_prepare(obj):
        if obj.get('pointer') is not None:
            try:
                obj['pointer'] = obj['pointer'][0]
                obj['pointer'].pop('execution', {})
                obj['pointer'] = json_prepare(obj['pointer'])
            except IndexError:
                obj['pointer'] = None
        return json_prepare(obj)

    return jsonify({
        'data':
        list(
            map(
                data_mix_json_prepare,
                exe_collection.aggregate(exe_pipeline),
            ))
    })
Exemplo n.º 18
0
def execution_list():
    dict_args = request.args.to_dict()

    # get queries
    def format_query(q):
        try:
            formated_q = json.loads(q)
        except JSONDecodeError:
            formated_q = q
        return formated_q

    # format query
    exe_query = dict((k, format_query(v)) for k, v in dict_args.items()
                     if k not in app.config['INVALID_FILTERS'])

    # sort
    srt = {'$sort': {'started_at': -1}}
    sort_query = exe_query.pop('sort', None)
    if sort_query and sort_query.split(',', 1)[0]:
        try:
            key, order = sort_query.split(',', 1)
        except ValueError:
            key, order = sort_query, 'ASCENDING'

        if order not in ['ASCENDING', 'DESCENDING']:
            order = 'ASCENDING'

        order = getattr(pymongo, order)
        srt = {'$sort': {key: order}}

    # filter for user_identifier
    user_identifier = exe_query.pop('user_identifier', None)
    if user_identifier is not None:
        user = User.get_by('identifier', user_identifier)
        if user is not None:
            execution_list = [item.id for item in user.proxy.activities.get()]
        else:
            execution_list = []
        exe_query['id'] = {
            '$in': execution_list,
        }

    # filter for exclude/include
    exclude_fields = exe_query.pop('exclude', '')
    exclude_list = [s.strip() for s in exclude_fields.split(',') if s]
    exclude_map = {item: 0 for item in exclude_list}

    include_fields = exe_query.pop('include', '')
    include_list = [s.strip() for s in include_fields.split(',') if s]
    include_map = {item: 1 for item in include_list}

    # store project for future use
    prjct = {**include_map} or {**exclude_map}

    exe_pipeline = [
        {
            '$match': exe_query
        },
        srt,
        {
            '$skip': g.offset
        },
        {
            '$limit': g.limit
        },
    ]

    if prjct:
        exe_pipeline.append({'$project': prjct})

    exe_collection = mongo.db[app.config['EXECUTION_COLLECTION']]
    return jsonify({
        "data":
        list(
            map(
                json_prepare,
                exe_collection.aggregate(exe_pipeline, allowDiskUse=True),
            )),
    })
Exemplo n.º 19
0
def test_teardown(config, mongo):
    ''' second and last stage of a node's lifecycle '''
    # test setup
    handler = Handler(config)

    p_0 = make_pointer('simple.2018-02-19.xml', 'mid_node')
    execution = p_0.execution.get()
    execution.started_at = datetime(2018, 4, 1, 21, 45)
    execution.save()

    User(identifier='juan').save()
    manager = User(identifier='manager').save()
    manager2 = User(identifier='manager2').save()

    assert manager not in execution.actors.all()
    assert execution not in manager.activities.all()

    manager.tasks.set([p_0])
    manager2.tasks.set([p_0])

    state = Xml.load(config, execution.process_name).get_state()
    state['items']['start_node']['state'] = 'valid'

    mongo[config["EXECUTION_COLLECTION"]].insert_one({
        '_type':
        'execution',
        'id':
        execution.id,
        'state':
        state,
        'values': [
            {
                '_type':
                'fgroup',
                'ref':
                '_execution',
                'forms': [{
                    'ref':
                    '_execution',
                    'fields': [
                        {
                            '_type': 'field',
                            'name': 'name',
                            'value': '',
                            'value_caption': '',
                            'state': 'valid',
                            'actor': {
                                'identifier': '__system__',
                            },
                            'set_at': execution.started_at,
                        },
                        {
                            '_type': 'field',
                            'name': 'description',
                            'value': '',
                            'value_caption': '',
                            'state': 'valid',
                            'actor': {
                                'identifier': '__system__',
                            },
                            'set_at': execution.started_at,
                        },
                    ],
                }],
            },
        ],
        'actors': {
            'start_node': 'juan',
        },
    })

    mongo[config["POINTER_COLLECTION"]].insert_one({
        'id':
        p_0.id,
        'started_at':
        datetime(2018, 4, 1, 21, 45),
        'finished_at':
        None,
        'execution': {
            'id': execution.id,
        },
        'node': {
            'id': p_0.node_id,
        },
        'actors': {
            '_type': ':map',
            'items': {},
        },
        'actor_list': [],
    })

    # will teardown mid_node
    handler.step({
        'command':
        'step',
        'pointer_id':
        p_0.id,
        'user_identifier':
        manager.identifier,
        'input': [
            Form.state_json('mid_form', [
                {
                    '_type': 'field',
                    'state': 'valid',
                    'value': 'yes',
                    'value_caption': 'yes',
                    'name': 'data',
                },
            ])
        ],
    })

    # assertions
    assert Pointer.get(p_0.id).status == 'finished'
    ptrs = list(Pointer.q().filter(status='ongoing'))
    assert len(ptrs) == 1
    assert ptrs[0].node_id == 'final_node'

    # mongo has a registry
    reg = next(mongo[config["POINTER_COLLECTION"]].find())

    assert reg['started_at'] == datetime(2018, 4, 1, 21, 45)
    assert_near_date(reg['finished_at'])
    assert reg['execution']['id'] == execution.id
    assert reg['node']['id'] == p_0.node_id
    assert reg['actors'] == {
        '_type': ':map',
        'items': {
            'manager': {
                '_type':
                'actor',
                'state':
                'valid',
                'user': {
                    '_type': 'user',
                    'identifier': 'manager',
                    'fullname': None,
                    'email': None,
                },
                'forms': [
                    Form.state_json('mid_form', [
                        {
                            '_type': 'field',
                            'state': 'valid',
                            'value': 'yes',
                            'value_caption': 'yes',
                            'name': 'data',
                        },
                    ])
                ],
            },
        },
    }
    assert reg['actor_list'] == [
        {
            'form': 'mid_form',
            'actor': {
                '_type': 'user',
                'fullname': None,
                'identifier': 'manager',
                'email': None,
            },
        },
    ]

    # tasks where deleted from user
    assert list(manager.tasks.q().filter(status='ongoing')) == []
    assert list(manager2.tasks.q().filter(status='ongoing')) == []

    # state
    reg = next(mongo[config["EXECUTION_COLLECTION"]].find())

    assert reg['state'] == {
        '_type': ':sorted_map',
        'items': {
            'start_node': {
                '_type': 'node',
                'type': 'action',
                'id': 'start_node',
                'state': 'valid',
                'comment': '',
                'actors': {
                    '_type': ':map',
                    'items': {},
                },
                'milestone': False,
                'name': 'Primer paso',
                'description': 'Resolver una tarea',
            },
            'mid_node': {
                '_type': 'node',
                'type': 'action',
                'id': 'mid_node',
                'state': 'valid',
                'comment': '',
                'actors': {
                    '_type': ':map',
                    'items': {
                        'manager': {
                            '_type':
                            'actor',
                            'state':
                            'valid',
                            'user': {
                                '_type': 'user',
                                'identifier': 'manager',
                                'fullname': None,
                                'email': None,
                            },
                            'forms': [
                                Form.state_json('mid_form', [
                                    {
                                        '_type': 'field',
                                        'state': 'valid',
                                        'value': 'yes',
                                        'value_caption': 'yes',
                                        'name': 'data',
                                    },
                                ])
                            ],
                        },
                    },
                },
                'milestone': False,
                'name': 'Segundo paso',
                'description': 'añadir información',
            },
            'final_node': {
                '_type': 'node',
                'type': 'action',
                'id': 'final_node',
                'state': 'ongoing',
                'comment': '',
                'actors': {
                    '_type': ':map',
                    'items': {},
                },
                'milestone': False,
                'name': '',
                'description': '',
            },
        },
        'item_order': [
            'start_node',
            'mid_node',
            'final_node',
        ],
    }

    values = reg['values']

    eval_context = make_context({'values': values}, {})
    eval_actor_map = make_actor_map({'values': values})

    expected_context = {
        '_env': [{}],
        '_execution': [{
            'name': '',
            'description': '',
            'get_name_display': '',
            'get_description_display': '',
        }],
        'mid_form': [{
            'data': 'yes',
            'get_data_display': 'yes'
        }],
    }

    assert {k: list(v.all())
            for k, v in eval_context.items()} == expected_context

    expected_actor_map = {
        '_execution': [{
            'name': {
                'actor': '__system__',
            },
            'description': {
                'actor': '__system__',
            },
        }],
        'mid_form': [{
            'data': {
                'actor': 'manager',
            },
        }],
    }

    for frms in eval_actor_map.values():
        for frm in frms:
            for fld in frm.values():
                assert fld.pop('set_at')

    assert eval_actor_map == expected_actor_map

    assert reg['actors'] == {
        'start_node': 'juan',
        'mid_node': 'manager',
    }

    assert manager in execution.actors
    assert execution in manager.activities
Exemplo n.º 20
0
def make_user(identifier, name, email=None):
    u = User(identifier=identifier, fullname=name, email=email).save()
    token = Token(token=random_string(9)).save()
    token.user.set(u)

    return u
Exemplo n.º 21
0
def data_mix():
    dict_args = request.args.to_dict()

    # get queries
    def format_query(q):
        try:
            formated_q = json.loads(q)
        except JSONDecodeError:
            formated_q = q
        return formated_q

    # execution's query
    exe_query = dict((k, format_query(v)) for k, v in dict_args.items()
                     if k not in app.config['INVALID_FILTERS'])

    # get pointer's query
    ptr_query = {}
    for item in copy.deepcopy(exe_query):
        if item.startswith('pointer.'):
            ptr_key = item.split('.', 1)[1]

            ptr_query[ptr_key] = exe_query.pop(item)

    # filter for exclude/include
    exclude_fields = exe_query.pop('exclude', '')
    exclude_list = [s.strip() for s in exclude_fields.split(',') if s]
    exclude_map = {item: 0 for item in exclude_list}

    include_fields = exe_query.pop('include', '')
    include_list = [s.strip() for s in include_fields.split(',') if s]
    include_map = {item: 1 for item in include_list}

    prjct = {**include_map} or {**exclude_map}

    # filter for exe_id
    if exe_query.get('id'):
        exe_id = {exe_query.get('id')}
    else:
        exe_id = None

    # filter for user_identifier
    user_identifier = exe_query.pop('user_identifier', None)
    if user_identifier is not None:
        user = User.get_by('identifier', user_identifier)
        # early return
        if user is None:
            return jsonify({'data': []})

        uid_exe_set = {item.id
                       for item in user.proxy.activities.get()
                       } | {item.execution
                            for item in user.proxy.tasks.get()}

        if exe_id is not None:
            exe_id &= uid_exe_set
        else:
            exe_id = uid_exe_set

    # filter for actor_identifier
    actor_identifier = exe_query.pop('actor_identifier', None)
    if actor_identifier is not None:
        collection = mongo.db[app.config['EXECUTION_COLLECTION']]
        cursor = collection.aggregate([
            {
                '$match': {
                    'state.item_order': {
                        '$exists': True,
                        '$nin': [None, {}],
                    },
                    'actors': {
                        '$exists': True,
                    },
                }
            },
            {
                '$project': {
                    '_id': 0,
                    'id': 1,
                    'state.item_order': 1,
                    'actors': 1,
                }
            },
        ])

        aid_exe_set = set()
        for doc in cursor:
            key_list = doc['state']['item_order']
            for key in key_list:
                an_actor = doc['actors'].get(key)
                if an_actor and an_actor == actor_identifier:
                    aid_exe_set.add(doc['id'])

        # early return
        if not aid_exe_set:
            return jsonify({'data': []})

        if exe_id is not None:
            exe_id &= aid_exe_set
        else:
            exe_id = aid_exe_set

    # filter for sorting
    sort_query = exe_query.pop('sort', None)
    if sort_query and sort_query.split(',', 1)[0]:
        try:
            key, order = sort_query.split(',', 1)
        except ValueError:
            key, order = sort_query, 'ASCENDING'

        if order not in ['ASCENDING', 'DESCENDING']:
            order = 'ASCENDING'

        order = getattr(pymongo, order)
        srt = {'$sort': {key: order}}
    else:
        srt = {'$sort': {'started_at': -1}}

    # pipeline
    # all special cases should be handled before this

    # pointer's case
    if ptr_query:
        ptr_pipeline = [
            {
                '$match': ptr_query
            },
            {
                '$group': {
                    '_id': None,
                    'executions': {
                        '$push': '$execution.id'
                    },
                }
            },
        ]

        ptr_collection = mongo.db[app.config['POINTER_COLLECTION']]
        ptr_cursor = ptr_collection.aggregate(ptr_pipeline)

        ptr_exe_ids = set()
        for item in ptr_cursor:
            ptr_exe_ids |= set(item['executions'])

        if exe_id is not None:
            exe_id &= ptr_exe_ids
        else:
            exe_id = ptr_exe_ids

    if type(exe_id) == set:
        exe_query['id'] = {
            '$in': list(exe_id),
        }

    # execution's case
    exe_pipeline = [
        {
            '$match': exe_query
        },
        {
            '$project': {
                '_id': 0,
                'id': 1,
            }
        },
    ]

    exe_collection = mongo.db[app.config['EXECUTION_COLLECTION']]
    exe_cursor = exe_collection.aggregate(exe_pipeline)

    execution_ids = list(map(
        lambda item: item['id'],
        exe_cursor,
    ))

    # build results
    ptr_lookup = {
        'from': app.config['POINTER_COLLECTION'],
        'localField': 'id',
        'foreignField': 'execution.id',
        'as': 'pointer',
    }

    exe_pipeline = [
        {
            '$match': {
                'id': {
                    '$in': execution_ids
                }
            }
        },
        {
            '$lookup': ptr_lookup
        },
        {
            '$project': {
                'pointer.execution': 0
            }
        },
        srt,
        {
            '$skip': g.offset
        },
        {
            '$limit': g.limit
        },
    ]

    if prjct:
        exe_pipeline.append({'$project': prjct})

    def data_mix_json_prepare(obj):
        if 'pointer' in obj and obj['pointer']:
            obj['pointer'] = json_prepare(obj['pointer'][-1])
            obj['pointer'].pop('execution', None)
        else:
            obj.pop('pointer', None)
        return json_prepare(obj)

    return jsonify({
        'data':
        list(
            map(
                data_mix_json_prepare,
                exe_collection.aggregate(exe_pipeline, allowDiskUse=True),
            ))
    })