Exemplo n.º 1
0
    def __init__(
        self,
        columns,
        add_row=None,
        remove_row=None,
        edit_row_at_index=None,
        clear=None,
        initial_rows=None,
    ):
        self.add_row = Subject() if add_row is None else add_row
        self.remove_row = Subject() if remove_row is None else remove_row
        self.clear = Subject() if clear is None else clear
        self.edit_row_at_index = Subject(
        ) if edit_row_at_index is None else edit_row_at_index

        self.initial_rows = pr.v() if initial_rows is None else initial_rows
        self.initial_columns = pr.v(*columns)

        self.history = rx.merge(self.add_row, self.remove_row, self.clear,
                                self.edit_row_at_index)
        self._values_with_columns = self.history.pipe(
            ops.scan(
                self.reduce_table_state,
                (self.initial_rows, self.initial_columns),
            ))

        self.values = self._values_with_columns.pipe(ops.map(lambda x: x[0]))
        self.columns = self._values_with_columns.pipe(ops.map(lambda x: x[1]))
        self.values_with_history = self.values.pipe(ops.zip(self.history))
Exemplo n.º 2
0
    def test_it_cannot_move_backwards(self, data, empty):
        start = data.draw(square(board=empty[v(0, 1):]))
        end = v(start[0], start[1] - 1)

        board = empty.set(start, self.white())
        with self.assertRaises(core.IllegalMove):
            board.move(start=start, end=end)
Exemplo n.º 3
0
def _conform_interceptor(interceptor, name, nesting=False):
    """
    Conform one or more things to interceptors.

    :param interceptor: `Interceptor`, ``callable`` or flat iterable of
    either to conform.
    :param unicode name: Potential route name, one will be derived if ``None``.
    :rtype: Tuple[pvector, unicode]
    :return: Pair of interceptors and a route name.
    """
    if isinstance(interceptor, Interceptor):
        return v(interceptor), name or interceptor.name
    elif callable(interceptor):
        name = name or callable_name(interceptor)
        return v(handler(interceptor, name=name)), name
    elif isinstance(interceptor, collections.Iterable):
        if nesting is True:
            raise TypeError('Interceptors must not be nested', interceptor)
        results = zip(
            *[_conform_interceptor(i, name, True) for i in interceptor])
        if not results:
            raise ValueError('No interceptors specified')
        interceptors, names = results
        return freeze([i[0] for i in interceptors]), names[-1]
    else:
        raise TypeError('Cannot be adapted to an interceptor', interceptor)
Exemplo n.º 4
0
 def init(self, agents):
     players = v(*[
         m(type="player",
           index=i,
           agent=agent,
           team=Team.RED if i % 2 == 0 else Team.BLUE,
           x=0,
           y=0,
           has_ball=False,
           stance=0) for i, agent in enumerate(agents)
     ])
     teams = m(red=v(*[p.index for p in players if p.team == Team.RED]),
               blue=v(*[p.index for p in players if p.team == Team.BLUE]))
     ball = m(type='ball',
              on_field=True,
              x=int(self.field_width / 2) + 1,
              y=int(self.field_height / 2) + 1)
     pitch = m(width=self.field_width,
               height=self.field_height,
               goal_height=self.goal_height)
     state = SoccerState(current_player_id=0,
                         players=players,
                         teams=teams,
                         ball=ball,
                         pitch=pitch,
                         winner=None)
     state = state._update_reset(random_pos=self.random_pos)
     return state
Exemplo n.º 5
0
def test_freeze_nonstrict_no_recurse_in_pvectors():
    input = [1, v(2, [3])]
    result = freeze(input, strict=False)
    # PMap and PVector are == to their mutable equivalents
    assert result == input
    assert type(result) is type(v())
    assert type(result[1][1]) is list
Exemplo n.º 6
0
class ExtraData(object):
    """
    Metadata about commands
    """
    parser = attr.ib(default=caparg.command(''))
    dependencies = attr.ib(default=pyrsistent.v())
    aliases = attr.ib(default=pyrsistent.v())
    regular = attr.ib(default=False)
Exemplo n.º 7
0
    def test_it_can_move_forward(self, data, empty):
        start = data.draw(square(board=empty[:v(0, empty.height - 1)]))
        end = v(start[0], start[1] + 1)

        board = empty.set(start, self.white())
        moved = board.move(start=start, end=end)

        self.assertEqual(pmap(moved.pieces), pmap({end: self.white()}))
Exemplo n.º 8
0
def test_get_offer_resources(fake_offer, role):
    assert get_offer_resources(fake_offer, role) == ResourceSet(
        cpus=10 if role != 'none' else 0,
        mem=1024 if role != 'none' else 0,
        disk=1000 if role != 'none' else 0,
        gpus=1 if role != 'none' else 0,
        ports=v(m(begin=31200, end=31500)) if role != 'none' else v(),
    )
Exemplo n.º 9
0
def benchmarks(size):
    pys_list = make_list(*range(size))
    pyr_list = v(*range(size))
    mut_list = list(range(size))

    pys_vector_append = partial(pys_list.concat, make_list(1))
    pyr_vector_append = partial(pyr_list.append, 1)
    mut_vector_append = partial(mut_list.append, 1)

    mut_list = list(range(size))

    pys_vector_push = partial(pys_list.cons, 1)
    pyr_vector_push = partial(v(1).extend, pyr_list)
    mut_vector_push = partial(mut_list.insert, 0, 1)

    mut_list = list(range(size))

    pys_vector_mutate_beginning = partial(_pys_set, 0, 'new-value', pys_list)
    pyr_vector_mutate_beginning = partial(pyr_list.assoc, 0, 'new-value')
    mut_vector_mutate_beginning = partial(mut_list.__setitem__, 0, 'new-value')

    mut_list = list(range(size))

    middle_index = size // 2
    pys_vector_mutate_middle = partial(_pys_set, middle_index, 'new-value', pys_list)
    pyr_vector_mutate_middle = partial(pyr_list.assoc, middle_index, 'new-value')
    mut_vector_mutate_middle = partial(mut_list.__setitem__, middle_index, 'new-value')

    mut_list = list(range(size))

    pys_vector_mutate_end = partial(_pys_set, size-1, 'new-value', pys_list)
    pyr_vector_mutate_end = partial(pyr_list.assoc, size-1, 'new-value')
    mut_vector_mutate_end = partial(mut_list.__setitem__, size-1, 'new-value')

    benchmarks = [
        (MUTABLE, VECTOR_APPEND, mut_vector_append),
        (PYRSISTENT, VECTOR_APPEND, pyr_vector_append),
        (PYSISTENCE, VECTOR_APPEND, pys_vector_append),

        (MUTABLE, VECTOR_PUSH, mut_vector_push),
        (PYRSISTENT, VECTOR_PUSH, pyr_vector_push),
        (PYSISTENCE, VECTOR_PUSH, pys_vector_push),

        (MUTABLE, VECTOR_MUTATE_BEG, mut_vector_mutate_beginning),
        (PYRSISTENT, VECTOR_MUTATE_BEG, pyr_vector_mutate_beginning),
        (PYSISTENCE, VECTOR_MUTATE_BEG, pys_vector_mutate_beginning),

        (MUTABLE, VECTOR_MUTATE_MID, mut_vector_mutate_middle),
        (PYRSISTENT, VECTOR_MUTATE_MID, pyr_vector_mutate_middle),
        (PYSISTENCE, VECTOR_MUTATE_MID, pys_vector_mutate_middle),

        (MUTABLE, VECTOR_MUTATE_END, mut_vector_mutate_end),
        (PYRSISTENT, VECTOR_MUTATE_END, pyr_vector_mutate_end),
        (PYSISTENCE, VECTOR_MUTATE_END, pys_vector_mutate_end),
    ]
    return benchmarks
Exemplo n.º 10
0
    def __init__(self, timefunc=time.time):
        self.__data = m(products=m(), transactions=v(), summaries=v())

        self.__event_queue = Queue()    # sequential query execution
        self.__lock = Condition()       # passive waiting only
        self.__stop = Event()

        self.time = timefunc

        Thread(target=self.__run).start()
Exemplo n.º 11
0
 def init(self, agents):
     if len(agents) != 2:
         return ValueError("Connect 4 only accepts games with 2 agents.")
     board = v(*[v() for _ in range(self.width)])
     state = Connect4State(current_player_id=0,
                           width=self.width,
                           height=self.height,
                           connect_length=self.connect_length,
                           board=board,
                           winner=None)
     return state
Exemplo n.º 12
0
    def test_it_captures_diagonally(self, data, empty):
        middle = empty[v(1, 0):v(empty.width - 1, empty.height - 1)]
        start = data.draw(square(board=middle))
        end = data.draw(
            strategies.sampled_from([
                v(start[0] + 1, start[1] + 1),
                v(start[0] - 1, start[1] + 1),
            ], ), )

        board = empty.set(start, self.white()).set(end, self.black())
        moved = board.move(start=start, end=end)

        self.assertEqual(pmap(moved.pieces), pmap({end: self.white()}))
Exemplo n.º 13
0
def test_results_writer():
    """Does the results writer output what it should?"""
    results = m(episode=v(0, 1, 2), step_count=v(12, 22, 11))
    output_path = os.path.join(os.getcwd(), 'results.txt')
    results_descriptor = ResultsDescriptor(2, output_path, ['episode', 'step_count'])
    initialize_results(results_descriptor)
    write_results(results, results_descriptor)
    results_check = read_results(output_path)
    assert numpy.array_equal(results_check, numpy.array([[0., 12.], [1., 22.], [2., 11.]]))
    new_results = m(episode=v(3), step_count=v(12.33))
    write_results(new_results, results_descriptor)
    new_results_check = read_results(output_path)
    assert numpy.array_equal(new_results_check, numpy.array([[0., 12.], [1., 22.], [2., 11.], [3., 12.33]]))
Exemplo n.º 14
0
    def dependency(self,
                   name=None,
                   dependencies=pyrsistent.v(),
                   possible_dependencies=pyrsistent.v(),
                   regular=False):
        """
        Register as a dependency.

        """
        glue = (dependencies, possible_dependencies, regular)
        transform = gather.Wrapper.glue(glue)
        ret = self._collector.register(name, transform=transform)
        return ret
Exemplo n.º 15
0
def test_last_drop_take():
    l = list([1, 2, 3])
    assert_that(pvector(drop(2, l))).is_equal_to(v(3))
    assert_that(pvector(take(2, l))).is_equal_to(v(1, 2))
    assert_that(pmap(groupby(first,
                             ['ABC', 'ABA', 'BAB', 'BAA']))).is_equal_to(
                                 m(A=['ABC', 'ABA'], B=['BAB', 'BAA']))
    assert_that(pmap(groupby(identity,
                             ['ABC', 'ABA', 'BAB', 'BAA']))).is_equal_to(
                                 m(ABC=['ABC'],
                                   ABA=['ABA'],
                                   BAB=['BAB'],
                                   BAA=['BAA']))
Exemplo n.º 16
0
def test_allocate_task_resources(fake_task, offer_resources, available_ports):
    offer_resources = offer_resources.set('ports', available_ports)
    expected_port = available_ports[0].begin
    consumed, remaining = allocate_task_resources(fake_task, offer_resources)
    assert consumed == fake_task.set(
        ports=v(m(begin=expected_port, end=expected_port)))
    assert remaining == {
        'cpus': 0,
        'mem': 0,
        'disk': 0,
        'gpus': 0,
        'ports': v(m(begin=6, end=10)),
    }
Exemplo n.º 17
0
    def make_border(
        points: np.ndarray, edges: Complex
    ) -> Tuple[np.ndarray, Complex, Complex, PSet[Cycle], PSet[Cycle]]:
        def first_index(array: np.ndarray, value: np.ndarray) -> float:
            return next(i for i, _ in enumerate(array)
                        if np.linalg.norm(value - _) < EPSILON)

        first_index_points = partial(first_index, points)

        corners = v(v(-0.5, 0.5), v(-0.5, -0.5), v(0.5, -0.5), v(0.5, 0.5))

        ul, dl, dr, ur = pipe(corners, map(np.array), map(first_index_points))

        max_ind = len(points)

        cul = max_ind
        cdl = max_ind + 1
        cdr = max_ind + 2
        cur = max_ind + 3

        left_c = v(ul, cul, cdl, dl)
        right_c = v(dr, cdr, cur, ur)
        down_c = v(dl, cdl, cdr, dr)
        up_c = v(ur, cur, cul, ul)

        red_base_cs = s(left_c, right_c)
        blue_base_cs = s(up_c, down_c)

        def border_edges(pts: np.ndarray, es: Complex, coord: int,
                         side: float) -> Complex:
            return pset(edge for edge in es if all(
                np.linalg.norm(pts[vert][coord] - side) < EPSILON
                for vert in edge))

        border_edges_from_square_side = partial(border_edges, points, edges)

        left_faces = faces_from_edges(
            border_edges_from_square_side(0, -0.5)
            | outer_edges_from_cycle(left_c))
        right_faces = faces_from_edges(
            border_edges_from_square_side(0, 0.5)
            | outer_edges_from_cycle(right_c))
        down_faces = faces_from_edges(
            border_edges_from_square_side(1, -0.5)
            | outer_edges_from_cycle(down_c))
        up_faces = faces_from_edges(
            border_edges_from_square_side(1, 0.5)
            | outer_edges_from_cycle(up_c))

        red_base = closure(left_faces | right_faces)
        blue_base = closure(down_faces | up_faces)

        border_points = np.array(corners) * BORDER_SCALE
        aug_points = np.concatenate((points, border_points))

        return aug_points, blue_base, red_base, blue_base_cs, red_base_cs
Exemplo n.º 18
0
def gen_grammar_visitor(node: ParseTreeNode, string: str, left_offset: int,
                        run_data: ObjectParserRun,
                        result: ObjectParserResult) -> VisitorReturnType:
    """A custom visitor on the PEG grammer which does the delegation as necessary.
    It is a corroutine which yields delegations as needed. It returns metadata about
    whether or not it succeeded at each visit as well as a pvec of metadata which
    would need to be accepted as a valid parses assuming everything above it succeeds.

    This isn't a great description I know. If you are reading this and it is unclear
    nag @DNGros to clean it up.
    """
    #print("visiting", node, node.rule_name, "  :: ", string)
    if node.rule_name == "arg_identifier":
        slice_to_parse = (left_offset, left_offset + len(string))
        arg = run_data.get_arg_by_name(node.value)
        if arg is None:
            raise ValueError(
                f"Trying to parse grammar with arg {node.value} but not found")
        delegation = run_data.left_fill_arg(arg, slice_to_parse)
        parse_return = yield delegation
        if not parse_return.parse_success:
            parse_return = parse_return.add_fail(
                f"Stack Message: Fail on arg '{node.value}'")
        return parse_return, v(parse_return)
    elif node.rule_name == "str_match":
        return _visit_str_match(node, string, left_offset, result), v()
    elif node.rule_name == "sufix":
        out_return, things_to_accept = yield from _visit_sufix(
            node, string, left_offset, run_data, result)
        return out_return, things_to_accept
    else:
        remaining_string = string
        new_left_offset = left_offset
        acceptables = []
        if isinstance(node, arpeggio.NonTerminal):
            for child in node:
                visitv = yield from gen_grammar_visitor(
                    child, remaining_string, new_left_offset, run_data, result)
                parse_return, new_acceptables = visitv
                if not parse_return.parse_success:
                    #print("FAIL on", child, string)
                    return parse_return, v()
                acceptables.extend(new_acceptables)
                remaining_string = parse_return.remaining_string
                new_left_offset += parse_return.remaining_right_starti
        # TODO (DNGros): Figure out what to put in as what_parsed here
        new_return = ParseDelegationReturnMetadata.create_from_substring(
            None, string, remaining_string, left_offset)
        return new_return, acceptables
    def __init__(self):
        """
        Initialization of the Slot instance

        Attributes
        ----------
        expectations = pvector
        slot_names = pvector
        History = pvector
        value_classes = pmap
        """
        self.expectations = v()
        self.slot_names = v()
        self.History = v()
        self.value_classes = m()
Exemplo n.º 20
0
def tracing(stage, marker):
    """
    Trace factory.
    """
    return lambda context: context.transform([TRACE], lambda xs:
                                             (xs or v()).append(
                                                 (stage, marker)))
Exemplo n.º 21
0
    def __init__(
        self,
        downstream_executor,
        handler=standard_handler,
        format_string=DEFAULT_FORMAT,
    ):
        self.downstream_executor = downstream_executor
        self.TASK_CONFIG_INTERFACE = downstream_executor.TASK_CONFIG_INTERFACE
        self.handler = handler
        self.format_string = format_string

        self.src_queue = downstream_executor.get_event_queue()
        self.dest_queue = Queue()
        self.stopping = False

        self.staging_tasks = m()
        self.running_tasks = m()
        self.done_tasks = v()

        # A lock is needed to synchronize logging and event processing
        self.task_lock = Lock()

        self.event_thread = Thread(target=self.event_loop)
        self.event_thread.daemon = True
        self.event_thread.start()

        self.logging_thread = Thread(target=self.logging_loop)
        self.logging_thread.daemon = True
        self.logging_thread.start()
Exemplo n.º 22
0
def _flatten_sentence(sentence: ETree.Element) \
        -> List[Tuple[Text, Sequence[LevelAnnotation]]]:
    # TODO docs
    """
    Convert a `sentence` XML Element object into normal text.
    :param sentence: an sentence XML node
    :return: a list of strings with corresponding annotations
    """

    def isterminal(element: ETree.Element):
        return next(iter(element), None) is None

    def getanno(element: ETree.Element):
        return element.get(ANNO_TAG, None)

    stack = [(sentence, iter(sentence), v())]
    texts = [sentence.text]
    annotations = [stack[0][2]]
    while stack:
        node, children, anno = stack[-1]
        child = next(children, None)
        if child is None:
            stack.pop()
            texts.append(node.tail)
            annotations.append(anno[:-1])
            continue
        child_anno = anno.append(
            LevelAnnotation(len(anno), getanno(child), isterminal(child)))
        texts.append(child.text)
        annotations.append(child_anno)
        stack.append((child, iter(child), child_anno))

    return list(zip(texts, annotations))
Exemplo n.º 23
0
    def __init__(
        self,
        add=None,
        remove=None,
        reindex=None,
        clear=None,
        edit_at_index=None,
        initial_state=None,
    ):
        self.add = Subject() if add is None else add
        self.remove = Subject() if remove is None else remove
        self.reindex = Subject() if reindex is None else reindex
        self.clear = Subject() if clear is None else clear
        self.edit_at_index = Subject(
        ) if edit_at_index is None else edit_at_index

        self.initial_state = pr.v() if initial_state is None else initial_state

        self.history = rx.merge(self.add, self.remove, self.reindex,
                                self.clear, self.edit_at_index)

        self._values = self.history.pipe(
            ops.scan(self.reduce_list_state, self.initial_state))
        self.values = self._values
        self.values_with_history = self._values.pipe(
            ops.zip(self.history),
            # ops.replay(buffer_size=1)
        )
Exemplo n.º 24
0
 def renderHTTP(self, nevow_ctx):
     context = pmap({
         NEVOW_REQUEST: IRequest(nevow_ctx),
     })
     d = execute(context, v(nevow()) + self._interceptors)
     d.addCallback(lambda _: b'')
     return d
Exemplo n.º 25
0
 def test_empty(self):
     """
     Adding the first terminator creates the vector if necessary.
     """
     context = empty_context
     self.assertThat(terminate_when(context, self.always),
                     ContainsDict({TERMINATORS: Equals(v(self.always))}))
Exemplo n.º 26
0
def test_create_new_docker_task(
    ef,
    fake_offer,
    fake_task,
    gpus_count,
    containerizer,
    container,
):
    available_ports = [31200]
    task_id = fake_task.task_id
    task_metadata = ef_mdl.TaskMetadata(
        task_config=fake_task,
        task_state='fake_state',
        task_state_history=m(fake_state=time.time()))
    fake_task = fake_task.set(
        volumes=v(
            Dict(mode='RO',
                 container_path='fake_container_path',
                 host_path='fake_host_path')),
        gpus=gpus_count,
        containerizer=containerizer,
    )

    ef.task_metadata = ef.task_metadata.set(task_id, task_metadata)
    docker_task = ef.create_new_docker_task(fake_offer, fake_task,
                                            available_ports)

    new_docker_task = Dict(
        task_id=Dict(value=task_id),
        agent_id=Dict(value='fake_agent_id'),
        name='executor-{id}'.format(id=task_id),
        resources=[
            Dict(name='cpus',
                 type='SCALAR',
                 role='fake_role',
                 scalar=Dict(value=10.0)),
            Dict(name='mem',
                 type='SCALAR',
                 role='fake_role',
                 scalar=Dict(value=1024.0)),
            Dict(name='disk',
                 type='SCALAR',
                 role='fake_role',
                 scalar=Dict(value=1000.0)),
            Dict(name='gpus',
                 type='SCALAR',
                 role='fake_role',
                 scalar=Dict(value=gpus_count)),
            Dict(name='ports',
                 type='RANGES',
                 role='fake_role',
                 ranges=Dict(range=[Dict(begin=31200, end=31200)]))
        ],
        command=Dict(value='echo "fake"',
                     uris=[],
                     environment=Dict(variables=[])),
        container=container,
    )
    assert ef.task_metadata[task_id].agent_id == 'fake_agent_id'
    assert docker_task == new_docker_task
Exemplo n.º 27
0
        def get_value(self, namespace):
            """
            Get value out of a namespace

            Args:
                namespace (argparse.Namespace): the namespace

            Returns:
                a value
            """
            value = getattr(namespace, self._name, self._MISSING)
            ret = pyrsistent.m()
            if value is None and self._type == typing.List[str]:
                value = self._MISSING
            if value is not self._MISSING:
                ret = ret.set(self._name, value)
            elif self._have_default is True:
                if self._type == str:
                    ret = ret.set(self._name, '')
                elif self._type == typing.List[str]:
                    ret = ret.set(self._name, pyrsistent.v())
                else:  # pragma: no cover
                    raise NotImplementedError("cannot default value",
                                              self._name, self._type)
            return ret
Exemplo n.º 28
0
def merge_results(results):
    """
    Given a list of dictionary results from episodes and the interesting keys, merge them into a single dictionary.
    Example: [{episode_id: 1, steps: 22}, {episode_id: 2, steps: 30}] -> {episode_id: [1, 2], steps: [22, 30]}
    """
    seed_dictionary = pmap({key: v() for key, _ in results[0].items()})
    return pmap(reduce(lambda result1, y: {key: value.append(y[key]) for key, value in result1.items()}, [seed_dictionary] + results))
def test_process_event_enqueues_task_processing_events_pending_to_running(
        k8s_executor):
    mock_pod = mock.Mock(spec=V1Pod)
    mock_pod.metadata.name = "test.1234"
    mock_pod.status.phase = "Running"
    mock_pod.spec.node_name = "node-1-2-3-4"
    mock_event = PodEvent(
        type="MODIFIED",
        object=mock_pod,
        raw_object=mock.Mock(),
    )
    k8s_executor.task_metadata = pmap({
        mock_pod.metadata.name:
        KubernetesTaskMetadata(
            task_config=mock.Mock(spec=KubernetesTaskConfig),
            task_state=KubernetesTaskState.TASK_PENDING,
            task_state_history=v(),
        )
    })

    k8s_executor._process_pod_event(mock_event)

    assert k8s_executor.event_queue.qsize() == 1
    # in normal usage this would actually have 2 items, but we're obiviating the inital PENDING
    # state for this test
    assert len(k8s_executor.task_metadata[
        mock_pod.metadata.name].task_state_history) == 1
def test_process_event_enqueues_task_processing_events_no_state_transition(
    k8s_executor,
    phase,
    task_state,
):
    mock_pod = mock.Mock(spec=V1Pod)
    mock_pod.metadata.name = "test.1234"
    mock_pod.status.phase = phase
    mock_pod.status.host_ip = "1.2.3.4"
    mock_pod.spec.node_name = 'kubenode'
    mock_event = PodEvent(
        type="MODIFIED",
        object=mock_pod,
        raw_object=mock.Mock(),
    )
    k8s_executor.task_metadata = pmap({
        mock_pod.metadata.name:
        KubernetesTaskMetadata(
            task_config=mock.Mock(spec=KubernetesTaskConfig),
            task_state=task_state,
            task_state_history=v(),
        )
    })

    k8s_executor._process_pod_event(mock_event)

    assert k8s_executor.event_queue.qsize() == 0
    assert len(k8s_executor.task_metadata) == 1
    assert k8s_executor.task_metadata[
        mock_pod.metadata.name].task_state == task_state
    # in reality, this would have some entries, but we're not filling out task_state_history
    # for tests, so checking that the size is 0 is the same as checking that we didn't transition
    # to a new state
    assert len(k8s_executor.task_metadata[
        mock_pod.metadata.name].task_state_history) == 0
Exemplo n.º 31
0
def all_equivalence_classes(n, k):
    """ Generate all possible mappings of n elements into at most k equivalence 
    classes. There are $\sum_{k'=1}^{k} S_2(n, k')$ such mappings, where $S_2$ 
    is Stirling numbers of the second kind.
    https://en.wikipedia.org/wiki/Stirling_numbers_of_the_second_kind
    You could also call these truncated Bell numbers; Bell numbers go up to k=n.
    """
    # Ugly 2x faster version of _all_equivalence_classes
    # As optimized as it's going to get unless I figure out a new algorithm
    assignments = [(1, pyr.v(0))]
    range_k = range(k)
    for _ in range(1, n):
        new_assignments = []
        new_assignments_append = new_assignments.append
        for next_symbol, assignment in assignments:
            assignment_append = assignment.append
            if next_symbol < k:
                for symbol in range(next_symbol):
                    new_assignments_append(
                        (next_symbol, assignment_append(symbol)))
                new_assignments_append(
                    (next_symbol + 1, assignment_append(next_symbol)))
            else:
                for symbol in range_k:
                    new_assignments_append(
                        (next_symbol, assignment_append(symbol)))
        assignments = new_assignments
    return map(operator.itemgetter(1), assignments)
Exemplo n.º 32
0
def allocate_task_resources(
    task_config: MesosTaskConfig,
    offer_resources: ResourceSet,
) -> Tuple[MesosTaskConfig, ResourceSet]:
    """ Allocate a task's resources to a Mesos offer

    :param task: the specification for the task to allocate
    :param offer_resources: a mapping of resource name -> available resources
        (should come from :func:`get_offer_resources`)
    :returns: a pair of (`prepared_task_config`, `remaining_resources`), where
        `prepared_task_config` is the task_config object modified with the
        actual resources consumed
    """
    for res, val in offer_resources.items():
        if res not in _NUMERIC_RESOURCES:
            continue
        offer_resources = offer_resources.set(res, val - task_config[res])

    port = offer_resources.ports[0]['begin']
    if offer_resources.ports[0]['begin'] == offer_resources.ports[0]['end']:
        avail_ports = offer_resources.ports[1:]
    else:
        new_port_range = offer_resources.ports[0].set('begin', port + 1)
        avail_ports = offer_resources.ports.set(0, new_port_range)
    offer_resources = offer_resources.set('ports', avail_ports)
    task_config = task_config.set('ports', v(m(begin=port, end=port)))
    return task_config, offer_resources
    def __init__(
        self,
        name,
        role,
        task_staging_timeout_s=240,
        pool=None,
        max_task_queue_size=1000,
        translator=mesos_status_to_event,
        slave_blacklist_timeout_s=900,
        offer_backoff=10,
        suppress_delay=10,
        initial_decline_delay=1,
        task_reconciliation_delay=300,
    ):
        self.name = name
        # wait this long for a task to launch.
        self.task_staging_timeout_s = task_staging_timeout_s
        self.pool = pool
        self.role = role
        self.translator = translator
        self.slave_blacklist_timeout_s = slave_blacklist_timeout_s
        self.offer_backoff = offer_backoff

        # TODO: why does this need to be root, can it be "mesos plz figure out"
        self.framework_info = Dict(user='******',
                                   name=self.name,
                                   checkpoint=True,
                                   role=self.role)

        self.task_queue = Queue(max_task_queue_size)
        self.event_queue = Queue(max_task_queue_size)
        self.driver = None
        self.are_offers_suppressed = False
        self.suppress_after = int(time.time()) + suppress_delay
        self.decline_after = time.time() + initial_decline_delay
        self._task_reconciliation_delay = task_reconciliation_delay
        self._reconcile_tasks_at = time.time() + \
            self._task_reconciliation_delay

        self.offer_decline_filter = Dict(refuse_seconds=self.offer_backoff)
        self._lock = threading.RLock()
        self.blacklisted_slaves = v()
        self.task_metadata = m()

        self._initialize_metrics()
        self._last_offer_time = None
        self._terminal_task_counts = {
            'TASK_FINISHED': TASK_FINISHED_COUNT,
            'TASK_LOST': TASK_LOST_COUNT,
            'TASK_KILLED': TASK_KILLED_COUNT,
            'TASK_FAILED': TASK_FAILED_COUNT,
            'TASK_ERROR': TASK_ERROR_COUNT,
        }

        self.stopping = False
        task_kill_thread = threading.Thread(target=self._background_check,
                                            args=())
        task_kill_thread.daemon = True
        task_kill_thread.start()
Exemplo n.º 34
0
def test_results_writer():
    """Does the results writer output what it should?"""
    results = m(episode=v(0, 1, 2), step_count=v(12, 22, 11))
    output_path = os.path.join(os.getcwd(), 'results.txt')
    results_descriptor = ResultsDescriptor(2, output_path,
                                           ['episode', 'step_count'])
    initialize_results(results_descriptor)
    write_results(results, results_descriptor)
    results_check = read_results(output_path)
    assert numpy.array_equal(results_check,
                             numpy.array([[0., 12.], [1., 22.], [2., 11.]]))
    new_results = m(episode=v(3), step_count=v(12.33))
    write_results(new_results, results_descriptor)
    new_results_check = read_results(output_path)
    assert numpy.array_equal(
        new_results_check,
        numpy.array([[0., 12.], [1., 22.], [2., 11.], [3., 12.33]]))
Exemplo n.º 35
0
def test_pvector_field_create_from_nested_serialized_data():
    class Foo(PRecord):
        foo = field(type=str)

    class Bar(PRecord):
        bar = pvector_field(Foo)

    data = Bar(bar=v(Foo(foo="foo")))
    Bar.create(data.serialize()) == data
Exemplo n.º 36
0
    def __init__(self, pipe = None, new_stack = None):
        if not pipe:
            stack = v(m())
            dump = m()
        else:
            stack = pipe.stack
            dump = pipe.dump

        if new_stack:
            stack = new_stack

        self.stack = stack
        self.dump = dump
Exemplo n.º 37
0
def test_mutant_decorator():
    @mutant
    def fn(a_list, a_dict):
        assert a_list == v(1, 2, 3)
        assert isinstance(a_dict, type(m()))
        assert a_dict == {'a': 5}

        return [1, 2, 3], {'a': 3}

    pv, pm = fn([1, 2, 3], a_dict={'a': 5})

    assert pv == v(1, 2, 3)
    assert pm == m(a=3)
    assert isinstance(pm, type(m()))
Exemplo n.º 38
0
    def test_one_day(self):
        summary_time = datetime(year=1970, month=1, day=2).timestamp()
        self.scheduler = SummaryScheduler(self.db, first_summary_time=summary_time)

        buy(self.db, [0, 1, 2])
        buy(self.db, [0, 3, 4])
        self.change_time(hours=23, minutes=59)

        sleep(1)
        self.assertEqual(self.db.snapshot().summaries, v())

        self.change_time(minutes=1)
        sleep(1)
        self.assertEqual(self.db.snapshot().summaries[0], Summary(timestamp=summary_time, money_spent=1000))
Exemplo n.º 39
0
 def test_Recorder(self):
     result = Recorder()
     runner.run(
         tests=["virtue.tests.samples.one_successful_test"],
         reporter=result,
     )
     import virtue.tests.samples.one_successful_test
     self.assertEqual(
         result, Recorder(
             successes=v(
                 virtue.tests.samples.one_successful_test.Foo("test_foo"),
             ),
         ),
     )
Exemplo n.º 40
0
def test_freeze_recurse_in_dictionary_values():
    result = freeze({'a': [1]})
    assert result == m(a=v(1))
    assert type(result['a']) is type(v())
Exemplo n.º 41
0
def test_group_with_prefix():
    g = group(Ta)
    given = Pair(v(Nb), v(Na))
    after = Pair([Nb, Node(g, [Na])], [])
    assert g(given) == after
Exemplo n.º 42
0
def test_freeze_list():
    assert freeze([1, 2]) == v(1, 2)
Exemplo n.º 43
0
def stackful(computation, initial=v()):
    return run(computation, initial)
def test_substitution_when_module_registered_via_wild_card_match_and_function_imported_via_wildcard():
    x = []
    assert type(x) is type(v())
Exemplo n.º 45
0
def test_literalish_works():
    from pyrsistent import pvector, v
    assert v() is pvector()
    assert v(1, 2) == pvector([1, 2])
def test_substitution_when_module_registered_via_wild_card_match():
    x = []
    assert type(x) is type(v())
Exemplo n.º 47
0
def test_list_comprehension_becomes_a_pvector():
    x = [i for i in range(2)]

    assert type(x) is type(v())
    assert x == v(0, 1)
Exemplo n.º 48
0
def test_literal_list_becomes_a_pvector():
    x = [1, 2]

    assert type(x) is type(v())
    assert x == v(1, 2)
Exemplo n.º 49
0
def test_literal_list_with_function_call_becomes_a_pvector():
    x = [1, 2].append(3)

    assert type(x) is type(v())
    assert x == v(1, 2, 3)
Exemplo n.º 50
0
def test_function_list_becomes_a_list():
    x = list()

    assert type(x) is not type(v())
Exemplo n.º 51
0
    def fn(a_list, a_dict):
        assert a_list == v(1, 2, 3)
        assert isinstance(a_dict, type(m()))
        assert a_dict == {'a': 5}

        return [1, 2, 3], {'a': 3}
Exemplo n.º 52
0
 def __init__(self, timefunc=time.time):
     self.__data = m(products=m(), transactions=v(), summaries=v())
     self.time = timefunc
Exemplo n.º 53
0
def test_thaw_recurse_in_mapping_values():
    result = thaw(m(a=v(1)))
    assert result == {'a': [1]}
    assert type(result['a']) is list
Exemplo n.º 54
0
def test_freeze_recurse_in_lists():
    result = freeze(['a', {'b': 3}])
    assert result == v('a', m(b=3))
    assert type(result[1]) is type(m())
Exemplo n.º 55
0
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""
Information about how to play the game.
"""

from pyrsistent import m, v


CARDS = v(
    'Soldier',
    'Clown',
    'Knight',
    'Priestess',
    'Wizard',
    'General',
    'Minister',
    'Prince',
)


def iter_valid_plays_for_card(card, myself, others):
    """Iterate through a list of valid plays for the given card.

    The plays are only valid within the rules of the game. They don't pay
    attention to the current state. Thus, they might guess that a player has
    the Minister even though it has already been played.

    The order of iteration is irrelevant and not guaranteed to be stable.
    """
Exemplo n.º 56
0
def test_thaw_list():
    result = thaw(v(1, 2))
    assert result == [1, 2]
    assert type(result) is list
Exemplo n.º 57
0
def test_get_in():
    # This is not an extensive test. The doctest covers that fairly good though.
    get_in(m(a=v(1, 2, 3)), ['m', 1]) == 2
Exemplo n.º 58
0
def test_thaw_recurse_in_vectors():
    result = thaw(v('a', m(b=3)))
    assert result == ['a', {'b': 3}]
    assert type(result[1]) is dict
def test_substitution_when_module_registered_via_custom_matcher():
    x = []
    assert type(x) is type(v())