def __init__(self, p_game_state):
        super().__init__(p_game_state)

        their_goal = self.game_state.field.their_goal_pose

        node_pass_to_second_attack = self.create_node(Role.FIRST_ATTACK,
                                                      PassToPlayer(self.game_state,
                                                                   self.assigned_roles[Role.FIRST_ATTACK],
                                                                   args=[self.assigned_roles[Role.SECOND_ATTACK].id]))

        node_pass_to_middle = self.create_node(Role.FIRST_ATTACK,
                                               PassToPlayer(self.game_state,
                                                            self.assigned_roles[Role.FIRST_ATTACK],
                                                            args=[self.assigned_roles[Role.MIDDLE].id]))

        node_go_kick = self.create_node(Role.FIRST_ATTACK, GoKick(self.game_state,
                                                                  self.assigned_roles[Role.FIRST_ATTACK],
                                                                  their_goal))

        second_attack_is_best_receiver = partial(self.is_best_receiver, Role.SECOND_ATTACK)
        middle_is_best_receiver = partial(self.is_best_receiver, Role.MIDDLE)
        current_tactic_succeeded = partial(self.current_tactic_succeed, Role.FIRST_ATTACK)

        node_pass_to_second_attack.connect_to(node_pass_to_middle, when=second_attack_is_best_receiver)
        node_pass_to_second_attack.connect_to(node_go_kick, when=middle_is_best_receiver)
        node_pass_to_second_attack.connect_to(node_pass_to_second_attack, when=current_tactic_succeeded)
        node_pass_to_middle.connect_to(node_pass_to_second_attack, when=current_tactic_succeeded)
        node_go_kick.connect_to(node_pass_to_second_attack, when=current_tactic_succeeded)

        self.create_node(Role.GOALKEEPER, GoalKeeper(self.game_state, self.assigned_roles[Role.GOALKEEPER]))
Пример #2
0
    def test_returns(self):
        @V.returns(int)
        def f(a):
            return a

        @V.returns(V.Type(type(None)))
        def g(a=True):
            if a:
                return a
            else:
                pass

        valid = [
            partial(f, 1),
            partial(g, False),
        ]

        invalid = [
            partial(f, 1.0),
            partial(f, 'x'),
            partial(g, True),
        ]

        for fcall in valid:
            fcall()
        for fcall in invalid:
            self.assertRaises(V.ValidationError, fcall)
Пример #3
0
 def widget_from_django_field(cls, f, default=widgets.Widget):
     """
     Returns the widget that would likely be associated with each
     Django type.
     """
     result = default
     internal_type = f.get_internal_type()
     if internal_type in ('ManyToManyField', ):
         result = functools.partial(widgets.ManyToManyWidget,
                 model=f.rel.to)
     if internal_type in ('ForeignKey', 'OneToOneField', ):
         result = functools.partial(widgets.ForeignKeyWidget,
                 model=f.rel.to)
     if internal_type in ('DecimalField', ):
         result = widgets.DecimalWidget
     if internal_type in ('DateTimeField', ):
         result = widgets.DateTimeWidget
     elif internal_type in ('DateField', ):
         result = widgets.DateWidget
     elif internal_type in ('IntegerField', 'PositiveIntegerField',
             'PositiveSmallIntegerField', 'SmallIntegerField', 'AutoField'):
         result = widgets.IntegerWidget
     elif internal_type in ('BooleanField', 'NullBooleanField'):
         result = widgets.BooleanWidget
     return result
Пример #4
0
    def test_outlier_filtering(self):
        l = [11171.0, 119425.0, 270.5, 250.0, 258.5]
        df = pd.DataFrame(l, columns=["x"])
        filtering_f = partial(utils._sd_based_outlier_filtering,
                              factor=1.2)
        df2 = utils.outlier_filtering(df, filtering_col="x",
                                      filtering_f=filtering_f,
                                      is_recursive=True)
        expected = sorted(l)[:-2]
        self.assertEquals(len(df2), len(expected))
        self.assertEquals(expected[0], df2.x.min())
        self.assertEquals(expected[-1], df2.x.max())

        filtering_f = partial(utils._sd_based_outlier_filtering,
                              factor=2)
        df2 = utils.outlier_filtering(df, filtering_col="x",
                                      filtering_f=filtering_f,
                                      is_recursive=True)
        self.assertEquals(len(df2), len(l))
        self.assertEquals(np.min(l), df2.x.min())
        self.assertEquals(np.max(l), df2.x.max())

        filtering_f = lambda z: z <= 250.0
        df2 = utils.outlier_filtering(df, filtering_col="x",
                                      filtering_f=filtering_f,
                                      is_recursive=True)
        self.assertEquals(len(df2), 1)
        self.assertEquals(df2.x.min(), 250.0)
Пример #5
0
    def _gst_init(self):
        # self._videosink will receive the buffers so we can upload them to GPU
        if PY2:
            self._videosink = gst.element_factory_make('appsink', 'videosink')
            self._videosink.set_property('caps', gst.Caps(_VIDEO_CAPS))
        else:
            self._videosink = gst.ElementFactory.make('appsink', 'videosink')
            self._videosink.set_property('caps',
                 gst.caps_from_string(_VIDEO_CAPS))

        self._videosink.set_property('async', True)
        self._videosink.set_property('drop', True)
        self._videosink.set_property('qos', True)
        self._videosink.set_property('emit-signals', True)
        self._videosink.connect('new-' + BUF_SAMPLE, partial(
            _gst_new_buffer, ref(self)))

        # playbin, takes care of all, loading, playing, etc.
        # XXX playbin2 have some issue when playing some video or streaming :/
        #self._playbin = gst.element_factory_make('playbin2', 'playbin')
        if PY2:
            self._playbin = gst.element_factory_make('playbin', 'playbin')
        else:
            self._playbin = gst.ElementFactory.make('playbin', 'playbin')
        self._playbin.set_property('video-sink', self._videosink)

        # gstreamer bus, to attach and listen to gst messages
        self._bus = self._playbin.get_bus()
        self._bus.add_signal_watch()
        self._bus.connect('message', _on_gst_message)
        self._bus.connect('message::eos', partial(
            _on_gst_eos, ref(self)))
Пример #6
0
def exercise_handler(request, exercise, prev=None, next=None, **related_videos):
    """
    Display an exercise
    """
    lang = request.session[settings.LANGUAGE_COOKIE_NAME]
    exercise_root = os.path.join(settings.STATIC_ROOT, "js", "khan-exercises", "exercises")
    exercise_file = exercise["slug"] + ".html"
    exercise_template = exercise_file
    exercise_localized_template = os.path.join(lang, exercise_file)

    # Get the language codes for exercise teplates that exist
    exercise_path = partial(lambda lang, slug, eroot: os.path.join(eroot, lang, slug + ".html"), slug=exercise["slug"], eroot=exercise_root)
    code_filter = partial(lambda lang, eroot, epath: os.path.isdir(os.path.join(eroot, lang)) and os.path.exists(epath(lang)), eroot=exercise_root, epath=exercise_path)
    available_langs = set(["en"] + [lang_code for lang_code in os.listdir(exercise_root) if code_filter(lang_code)])

    # Return the best available exercise template
    exercise_lang = select_best_available_language(request.language, available_codes=available_langs)
    if exercise_lang == "en":
        exercise_template = exercise_file
    else:
        exercise_template = exercise_path(exercise_lang)[(len(exercise_root) + 1):]

    context = {
        "exercise": exercise,
        "title": exercise["title"],
        "exercise_template": exercise_template,
        "exercise_lang": exercise_lang,
        "related_videos": [v for v in related_videos.values() if v["available"]],
        "prev": prev,
        "next": next,
    }
    return context
Пример #7
0
    def __init__(self, *args, **kwargs):
        if not args:
            raise TypeError('__init__() takes at least 2 arguments (1 given)')
        func, args = args[0], args[1:]
        if not callable(func):
            raise TypeError("Input must be callable")

        # curry- or functools.partial-like object?  Unpack and merge arguments
        if (
            hasattr(func, 'func')
            and hasattr(func, 'args')
            and hasattr(func, 'keywords')
            and isinstance(func.args, tuple)
        ):
            _kwargs = {}
            if func.keywords:
                _kwargs.update(func.keywords)
            _kwargs.update(kwargs)
            kwargs = _kwargs
            args = func.args + args
            func = func.func

        if kwargs:
            self._partial = partial(func, *args, **kwargs)
        else:
            self._partial = partial(func, *args)

        self.__doc__ = getattr(func, '__doc__', None)
        self.__name__ = getattr(func, '__name__', '<curry>')
        self._sigspec = None
        self._has_unknown_args = None
Пример #8
0
 def testAgnosticUsage(self):
   """Graph/eager agnostic usage."""
   # Does create garbage when executing eagerly due to ops.Graph() creation.
   num_training_steps = 10
   checkpoint_directory = self.get_temp_dir()
   checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
   for training_continuation in range(3):
     with ops.Graph().as_default(), self.test_session(
         graph=ops.get_default_graph()), test_util.device(use_gpu=True):
       model = MyModel()
       optimizer = adam.AdamOptimizer(0.001)
       root = util.Checkpoint(
           optimizer=optimizer, model=model,
           global_step=training_util.get_or_create_global_step())
       checkpoint_path = checkpoint_management.latest_checkpoint(
           checkpoint_directory)
       status = root.restore(save_path=checkpoint_path)
       input_value = constant_op.constant([[3.]])
       train_fn = functools.partial(
           optimizer.minimize,
           functools.partial(model, input_value),
           global_step=root.global_step)
       if not context.executing_eagerly():
         train_fn = functools.partial(self.evaluate, train_fn())
       status.initialize_or_restore()
       for _ in range(num_training_steps):
         train_fn()
       root.save(file_prefix=checkpoint_prefix)
       self.assertEqual((training_continuation + 1) * num_training_steps,
                        self.evaluate(root.global_step))
       self.assertEqual(training_continuation + 1,
                        self.evaluate(root.save_counter))
Пример #9
0
    def test_scheduling(self):
        backend = RedisBackend()

        waiting_set_key = make_schedule_key(backend.namespace, SCHEDULE_STATE_WAITING)
        ready_set_key = make_schedule_key(backend.namespace, SCHEDULE_STATE_READY)

        n = 10

        for i in range(n):
            with backend.cluster.map() as client:
                client.zadd(waiting_set_key, i, 'timelines:{0}'.format(i))

        for i in range(n, n * 2):
            with backend.cluster.map() as client:
                client.zadd(ready_set_key, i, 'timelines:{0}'.format(i))

        get_waiting_set_size = functools.partial(get_set_size, backend.cluster, waiting_set_key)
        get_ready_set_size = functools.partial(get_set_size, backend.cluster, ready_set_key)

        with self.assertChanges(get_waiting_set_size, before=n, after=0), \
                self.assertChanges(get_ready_set_size, before=n, after=n * 2):
            results = list(zip(range(n), list(backend.schedule(n, chunk=5))))
            assert len(results) is n

            # Ensure scheduled entries are returned earliest first.
            for i, entry in results:
                assert entry.key == 'timelines:{0}'.format(i)
                assert entry.timestamp == float(i)
Пример #10
0
  def _create_gumbel_control_variate_quadratic(self, logQHard, temperature=None):
    '''Calculate gumbel control variate.
    '''
    if temperature is None:
      temperature = self.hparams.temperature

    h = 0
    extra = []
    for layer in xrange(self.hparams.n_layer):
      logQ, softSamples = self._recognition_network(sampler=functools.partial(
          self._random_sample_switch, switch_layer=layer, temperature=temperature))
      softELBO, _ = self._generator_network(softSamples, logQ)

      # Generate the softELBO_v (should be the same value but different grads)
      logQ_v, softSamples_v = self._recognition_network(sampler=functools.partial(
          self._random_sample_switch_v, switch_layer=layer, temperature=temperature))
      softELBO_v, _ = self._generator_network(softSamples_v, logQ_v)

      # Compute losses
      learning_signal = tf.stop_gradient(softELBO_v)

      # Control variate
      h += (tf.stop_gradient(learning_signal) * logQHard[layer]
            - softELBO + softELBO_v)

      extra.append((softELBO_v, -softELBO + softELBO_v))

    return h, extra
Пример #11
0
def areReceivers(s, strictRfc=True, nicklen=None, chantypes='#&+!',
        channellen=50):
    """Like 'isNick(x) or isChannel(x)' but for comma-separated list."""
    nick = functools.partial(isNick, strictRfc=strictRfc, nicklen=nicklen)
    chan = functools.partial(isChannel, chantypes=chantypes,
            channellen=channellen)
    return all([nick(x) or chan(x) for x in s.split(',')])
Пример #12
0
    def __init__(cls, name, bases, dct):
        super(MetaBaseReader, cls).__init__(name, bases, dct)

        format = dct.get('_format_name')
        if format is None:
            return

        fast = dct.get('_fast')
        if fast is not None:
            FAST_CLASSES[format] = cls

        FORMAT_CLASSES[format] = cls

        io_formats = ['ascii.' + format] + dct.get('_io_registry_format_aliases', [])

        if dct.get('_io_registry_suffix'):
            func = functools.partial(connect.io_identify, dct['_io_registry_suffix'])
            connect.io_registry.register_identifier(io_formats[0], Table, func)

        for io_format in io_formats:
            func = functools.partial(connect.io_read, io_format)
            connect.io_registry.register_reader(io_format, Table, func)

            if dct.get('_io_registry_can_write', True):
                func = functools.partial(connect.io_write, io_format)
                connect.io_registry.register_writer(io_format, Table, func)
Пример #13
0
  def _create_gumbel_control_variate(self, logQHard, temperature=None):
    '''Calculate gumbel control variate.
    '''
    if temperature is None:
      temperature = self.hparams.temperature

    logQ, softSamples = self._recognition_network(sampler=functools.partial(
        self._random_sample_soft, temperature=temperature))
    softELBO, _ = self._generator_network(softSamples, logQ)
    logQ = tf.add_n(logQ)

    # Generate the softELBO_v (should be the same value but different grads)
    logQ_v, softSamples_v = self._recognition_network(sampler=functools.partial(
        self._random_sample_soft_v, temperature=temperature))
    softELBO_v, _ = self._generator_network(softSamples_v, logQ_v)
    logQ_v = tf.add_n(logQ_v)

    # Compute losses
    learning_signal = tf.stop_gradient(softELBO_v)

    # Control variate
    h = (tf.stop_gradient(learning_signal) * tf.add_n(logQHard)
          - softELBO + softELBO_v)

    extra = (softELBO_v, -softELBO + softELBO_v)

    return h, extra
 def add_mode(self, name, mode_or_component, toggle_value = False, groups = set(), behaviour = None):
     """
     Adds a mode of the given name into the component.  The mode
     object should be a Mode or ControlSurfaceComponent instance.
     
     The 'toggle_value' is the light value the toggle_botton will
     be set to when the component is on this mode.
     
     If 'group' is not None, the mode will be put in the group
     identified by the passed object.  When several modes are grouped:
     
       * All the buttons in the group will light up when any of the
         modes withing the group is selected.
     
       * Any of the group buttons will cancel the current mode when
         the current mode belongs to the group.
     """
     if not name not in self._mode_map.keys():
         raise AssertionError
         if not isinstance(groups, set):
             groups = set(groups)
         mode = tomode(mode_or_component)
         task = self._tasks.add(Task.sequence(Task.wait(Defaults.MOMENTARY_DELAY), Task.run(lambda : self._get_mode_behaviour(name).press_delayed(self, name))))
         task.kill()
         slot = self.register_slot(listener=partial(self._on_mode_button_value, name), event='value', extra_kws=dict(identify_sender=True))
         self._mode_list.append(name)
         self._mode_map[name] = _ModeEntry(mode=mode, toggle_value=toggle_value, behaviour=behaviour, subject_slot=slot, momentary_task=task, groups=groups)
         button_setter = 'set_' + name + '_button'
         hasattr(self, button_setter) or setattr(self, button_setter, partial(self.set_mode_button, name))
Пример #15
0
    def __init__(self, filename, processes=None, engine=None, cleanup=True):
        cfg = ConfigParser()
        if not os.path.exists(filename):
            raise ValueError('file not found: %r' % filename)
        parsed = cfg.read([self._defaults, filename])
        assert len(parsed) == 2

        self.config_dir = os.path.dirname(filename)

        for section in self._sections:
            getters = {
                'string': partial(self._get_string, cfg, section),
                'quoted_string': partial(self._get_quoted_string, cfg, section),
                'lst': partial(self._get_list, cfg, section),
                'boolean': partial(cfg.getboolean, section),
                'items': partial(cfg.items, section),
            }
            getattr(self, '_parse_%s' % section)(**getters)

        if processes is None:
            processes = self._get_int(cfg, 'compile', 'processes', optional=True)
        if engine is None:
            engine = self._get_string(cfg, 'compile', 'engine', optional=True)

        self.processes = processes
        self.engine = engine
        self.cleanup = cleanup
Пример #16
0
 def load_from_string(self, string):
     if glo_passphrase_callback:
         self.key = crypto.load_privatekey(crypto.FILETYPE_PEM, string, functools.partial(glo_passphrase_callback, self, string) )
         self.m2key = M2Crypto.EVP.load_key_string(string, functools.partial(glo_passphrase_callback, self, string) )
     else:
         self.key = crypto.load_privatekey(crypto.FILETYPE_PEM, string)
         self.m2key = M2Crypto.EVP.load_key_string(string)
Пример #17
0
    def test_moved_object(self):
        obj_a = EventPageSpeaker(
            first_name="Father",
            last_name="Christmas",
            sort_order=1,
        )

        obj_b = EventPageSpeaker(
            first_name="Father",
            last_name="Christmas",
            sort_order=5,
        )

        comparison = self.comparison_class(
            EventPageSpeaker,
            [
                partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')),
                partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')),
            ],
            obj_a,
            obj_b,
        )

        self.assertFalse(comparison.is_addition())
        self.assertFalse(comparison.is_deletion())
        self.assertFalse(comparison.has_changed())
        self.assertEqual(comparison.get_position_change(), 4)
        self.assertEqual(comparison.get_num_differences(), 0)
Пример #18
0
    def request(self, *args, **kwargs):
        """Maintains the existing api for Session.request.

        Used by all of the higher level methods, e.g. Session.get.

        The background_callback param allows you to do some processing on the
        response in the background, e.g. call resp.json() so that json parsing
        happens in the background thread.
        """
        if self.session:
            func = self.session.request
        else:
            # avoid calling super to not break pickled method
            func = partial(Session.request, self)

        background_callback = kwargs.pop('background_callback', None)
        if background_callback:
            func = partial(wrap, self, func, background_callback)

        if isinstance(self.executor, ProcessPoolExecutor):
            # verify function can be pickled
            try:
                dumps(func)
            except (TypeError, PickleError):
                raise RuntimeError(PICKLE_ERROR)

        return self.executor.submit(func, *args, **kwargs)
Пример #19
0
    def connect(self, channel_id, bot_id, silent=False):
        """Connect to a Beam chat through a websocket."""

        self.connection_information = {
            "channel_id": channel_id,
            "bot_id": bot_id,
            "silent": silent
        }

        chat = self.get_chat(channel_id)

        self.servers = chat["endpoints"]
        self.server_offset = 0

        authkey = chat["authkey"]

        self.logger.debug("Connecting to: {server}.".format(
            server=self.servers[self.server_offset]))

        websocket_connection = websocket_connect(
            self.servers[self.server_offset])

        if silent:
            websocket_connection.add_done_callback(
                partial(self.authenticate, channel_id))
        else:
            websocket_connection.add_done_callback(
                partial(self.authenticate, channel_id, bot_id, authkey))
Пример #20
0
 def post(self):
     recipe = self.backend.get_recipe(int(self.get_argument("recipe")))
     if not recipe:
         raise tornado.web.HTTPError(404)
     if recipe["photo"] and recipe["author_id"] != self.current_user["id"]:
         raise tornado.web.HTTPError(403)
     full = images.resize_image(self.request.files.values()[0][0]["body"], max_width=800, max_height=800, quality=85)
     thumb = images.resize_image(
         self.request.files.values()[0][0]["body"], max_width=300, max_height=800, quality=85
     )
     resized = {"full": full, "thumb": thumb}
     if full["width"] < 300 or full["height"] < 300:
         self.set_error_message("Recipe images must be at least 300 pixels wide and " "300 pixels tall.")
         self.redirect(self.reverse_url("recipe", recipe["slug"]))
         return
     thumb["uploaded"] = False
     full["uploaded"] = False
     self.backend.s3.put_cdn_content(
         data=thumb["data"],
         mime_type=thumb["mime_type"],
         callback=functools.partial(self.on_upload, "thumb", recipe, resized),
     )
     self.backend.s3.put_cdn_content(
         data=full["data"],
         mime_type=full["mime_type"],
         callback=functools.partial(self.on_upload, "full", recipe, resized),
     )
Пример #21
0
def MakeHandlerClassExpr():

    # The expressions are checked in the given order, so the order must not be changed.
    allParameters = StringParm | ArrayParm | SimpleParm

    body = MakeListExpr(allParameters)
    handlerParam = KeywordHandlerParams + body("handlerBody") + Semicolon
    handlerParam.setFailAction(functools.partial(FailFunc, expected=StringHandlerParams))

    body = MakeListExpr(allParameters)
    addParam = KeywordAddHandlerParams + body("addBody") + Semicolon
    addParam.setFailAction(functools.partial(FailFunc, expected=StringAddHandlerParams))

    # The expressions can be given in any order; in addition, addParam is optional, if there are
    # no parameters to this function.
    allBody =  pyparsing.Optional(handlerParam) & pyparsing.Optional(addParam)

    all = ( pyparsing.Optional(pyparsing.cStyleComment)("comment")
            + KeywordHandler
            + Identifier("handlerType")
            + "{"
            + allBody
            + "}"
            + Semicolon )
    all.setParseAction(ProcessHandlerClassFunc)

    all.setFailAction(functools.partial(FailFunc, expected=StringHandler))
    return all
Пример #22
0
def get_stream_type(env, args):
    """Pick the right stream type based on `env` and `args`.
    Wrap it in a partial with the type-specific args so that
    we don't need to think what stream we are dealing with.

    """
    if not env.stdout_isatty and not args.prettify:
        Stream = partial(
            RawStream,
            chunk_size=RawStream.CHUNK_SIZE_BY_LINE
            if args.stream
            else RawStream.CHUNK_SIZE
        )
    elif args.prettify:
        Stream = partial(
            PrettyStream if args.stream else BufferedPrettyStream,
            env=env,
            conversion=Conversion(),
            formatting=Formatting(env=env, groups=args.prettify,
                                  color_scheme=args.style),
        )
    else:
        Stream = partial(EncodedStream, env=env)

    return Stream
Пример #23
0
def preview_module_system(request, preview_id, descriptor):
    """
    Returns a ModuleSystem for the specified descriptor that is specialized for
    rendering module previews.

    request: The active django request
    preview_id (str): An identifier specifying which preview this module is used for
    descriptor: An XModuleDescriptor
    """

    def preview_model_data(descriptor):
        "Helper method to create a DbModel from a descriptor"
        return DbModel(
            SessionKeyValueStore(request, descriptor._model_data),
            descriptor.module_class,
            preview_id,
            MongoUsage(preview_id, descriptor.location.url()),
        )

    course_id = get_course_for_item(descriptor.location).location.course_id

    return ModuleSystem(
        ajax_url=reverse('preview_dispatch', args=[preview_id, descriptor.location.url(), '']).rstrip('/'),
        # TODO (cpennington): Do we want to track how instructors are using the preview problems?
        track_function=lambda event_type, event: None,
        filestore=descriptor.system.resources_fs,
        get_module=partial(load_preview_module, request, preview_id),
        render_template=render_from_lms,
        debug=True,
        replace_urls=partial(static_replace.replace_static_urls, data_directory=None, course_id=course_id),
        user=request.user,
        xblock_model_data=preview_model_data,
        can_execute_unsafe_code=(lambda: can_execute_unsafe_code(course_id)),
    )
Пример #24
0
    def get_serializer_class(self):
        """
        Return the class to use for the serializer.
        Defaults to using `self.serializer_class`.

        You may want to override this if you need to provide different
        serializations depending on the incoming request.

        (Eg. admins get full serialization, others get basic serialization)
        """

        serializer_options = getattr(self,'serializer_options',{})				
        serializer_class = self.serializer_class
        if serializer_class is not None:
            return partial(serializer_class,**serializer_options)

        assert self.model is not None, \
            "'%s' should either include a 'serializer_class' attribute, " \
            "or use the 'model' attribute as a shortcut for " \
            "automatically generating a serializer class." \
            % self.__class__.__name__

        class DefaultSerializer(self.model_serializer_class):
            class Meta:
                model = self.model
        return partial(DefaultSerializer,**serializer_options)
def Prepare(benchmark_spec):
  """Prepare the virtual machines to run YCSB against Aerospike.

  Args:
    benchmark_spec: The benchmark specification. Contains all data that is
        required to run the benchmark.
  """
  vms = benchmark_spec.vms
  by_role = _GetVMsByRole(benchmark_spec.vms)

  loaders = by_role['loaders']
  assert loaders, vms

  # Aerospike cluster
  aerospike_vms = by_role['aerospike_vms']
  assert aerospike_vms, 'No aerospike VMs: {0}'.format(by_role)

  seed_ips = [vm.internal_ip for vm in aerospike_vms]
  aerospike_install_fns = [functools.partial(aerospike_server.ConfigureAndStart,
                                             vm, seed_node_ips=seed_ips)
                           for vm in aerospike_vms]
  ycsb_install_fns = [functools.partial(vm.Install, 'ycsb')
                      for vm in loaders]

  vm_util.RunThreaded(lambda f: f(), aerospike_install_fns + ycsb_install_fns)
Пример #26
0
def run(world,title,heuristics,timeout=7000):
    solutions = []
    queue = []
    world0 = copy.deepcopy(world)
    #print world0
    world0.title=(title + '_DFS')
    heappush(queue,(0,world0))
    
    print("Doing Depth First Search  on {}:".format(title))
    solutions.append(graphsearch(queue, depthFirst, timeout))

    queue = []
    world1 = copy.deepcopy(world)
    world1.title=(title + '_BFS')
    heappush(queue,(0,world1))
    print("Doing Breadth First Search on {}:".format(title))
    solutions.append(graphsearch(queue, breadthFirst, timeout))
    for h in heuristics:
        queue, hname = [], str(h).split(' ')[1]
        world2 = copy.deepcopy(world)
        world2.title=(title + hname + '_BestFirst')
        heappush(queue,(0,world2))
        bestFirst_h = functools.partial(bestFirst,heuristic=h)
        print("Doing Best First with heuristic {} on {}:".format(hname,title))
        solutions.append(graphsearch(queue, bestFirst_h, timeout))

        queue = []
        world4 = copy.deepcopy(world)
        world4.title=(title + hname + '_Astar')
        heappush(queue,(0,world4))
        aStar_h = functools.partial(aStar,heuristic=h)
        print("Doing A* with heuristic {} on {}:".format(hname,title))
        solutions.append(graphsearch(queue, aStar_h, timeout))
Пример #27
0
    def handle(self, *args, **options):
        _fields_to_update = ["Joomla Paid Count"]
        ereceipts = EReceiptsInterface(settings.ERECEIPTS_USER, settings.ERECEIPTS_PASSWORD)
        ereceipts.authenticate()

        if options['start_date']:
            start_date = DateDeux.fromisodate(options['start_date'])
            end_date = DateDeux.fromisodate(options['end_date'])
        else:
            start_date = end_date = DateDeux.today() - 1

        print("Using dates %s and %s" % (start_date, end_date))

        field_list = get_configuration("SYNC_ERECEIPTS_FIELDS")
        tally_code_key = get_configuration("SYNC_ERECEIPTS_TALLYCODE_JSONSTRING")
        program_master_abbreviations = get_program_master_abbreviations()
        _validator = partial(validate_tally_code, tally_code_key=tally_code_key, 
                            abbreviations=program_master_abbreviations)
        _translator = partial(translate_receipt, tally_code_key=tally_code_key)
        _updator = partial(update_schedule, tally_code_key=tally_code_key)                          

        for each_collection, each_report in [x.split("|") for x in get_configuration("SYNC_ERECEIPTS_COLLECTIONS").split("\r\n")]:
            receipts_list = ereceipts.get_receipts(start_date, end_date, each_collection, each_report)
            receipts_list_parsed = get_receipts_collection(receipts_list, field_list, tally_code_key)
            final_receipts_list = tuple(map(_translator, filter(_validator, receipts_list_parsed)))
            print(final_receipts_list)
            
            for receipt in final_receipts_list:
                #print(receipt)
                _result = _updator(receipt)
Пример #28
0
def parse(functions, stack, snippet, path='', template=None):
    recurse = functools.partial(parse, functions, stack, template=template)

    if isinstance(snippet, collections.Mapping):
        def mkpath(key):
            return '.'.join([path, six.text_type(key)])

        if len(snippet) == 1:
            fn_name, args = next(six.iteritems(snippet))
            Func = functions.get(fn_name)
            if Func is not None:
                try:
                    path = '.'.join([path, fn_name])
                    if issubclass(Func, function.Macro):
                        return Func(stack, fn_name, args,
                                    functools.partial(recurse, path=path),
                                    template)
                    else:
                        return Func(stack, fn_name, recurse(args, path))
                except (ValueError, TypeError, KeyError) as e:
                    raise exception.StackValidationFailed(
                        path=path,
                        message=six.text_type(e))

        return dict((k, recurse(v, mkpath(k)))
                    for k, v in six.iteritems(snippet))
    elif (not isinstance(snippet, six.string_types) and
          isinstance(snippet, collections.Iterable)):

        def mkpath(idx):
            return ''.join([path, '[%d]' % idx])

        return [recurse(v, mkpath(i)) for i, v in enumerate(snippet)]
    else:
        return snippet
Пример #29
0
    def do_prepare (self, args):
        super (JagsatApp, self).do_prepare (args)

        args.add ('m',  'map',        self._arg_map)
        args.add ('s',  'state',      self._arg_state)
        args.add (None, 'ratio-hack', self._arg_rhack)
        args.add (None, 'music-on', OptionConfFlag (
            GlobalConf ().child ('global-music'), True))
        args.add (None, 'music-off', OptionConfFlag (
            GlobalConf ().child ('global-music'), False))
        
        self.add_state ('sandbox',          Sandbox)
        self.add_state ('root',             RootState)
        self.add_state ('game',             GameState)
        self.add_state ('init_game',        InitGameState)
        self.add_state ('game_round',       GameRoundState)
        self.add_state ('ingame_menu',      IngameMenuState)
	self.add_state ('main_menu',        MainMenuState)
	self.add_state ('reinforce',        ReinforcementState)
	self.add_state ('attack',	    AttackState)
	self.add_state ('move',		    MovementState)
	self.add_state ('risk_attack',      RiskAttackState)
        self.add_state ('message',          RootMessageState)
        self.add_state ('dialog',           RootDialogState)
        self.add_state ('yes_no_dialog',    RootYesNoDialogState)
        self.add_state ('input_dialog',     RootInputDialogState)

        self.add_state ('test_reinforce', partial (GameState,
                                                   test_phase='reinforce'))
        self.add_state ('test_attack',    partial (GameState,
                                                   test_phase='attack'))
        self.add_state ('test_move',      partial (GameState,
                                                   test_phase='move'))
Пример #30
0
 def send_message(self, message, callback, checking_master=False):
     """ send a message over the wire; callback=None indicates a safe=False call where we write and forget about it"""
     
     self.usage_count +=1
     # TODO: handle reconnect
     if self.__callback is not None:
         raise ProgrammingError('connection already in use')
     
     if not self.__alive:
         if self.__autoreconnect:
             logging.warn('connection lost, reconnecting')
             self.__connect(functools.partial(Connection.send_message,
                 message=message, callback=callback))
             return
         else:
             raise InterfaceError('connection invalid. autoreconnect=False')
     
     self.__callback=callback
     # __request_id used by get_more()
     (self.__request_id, data) = message
     # logging.info('request id %d writing %r' % (self.__request_id, data))
     try:
         self.__stream.write(data)
         if callback:
             self.__stream.read_bytes(16, callback=functools.partial(self._parse_header, checking_master))
         else:
             self.__request_id = None
             self.__pool.cache(self)
             
     except IOError, e:
         self.__alive = False
         raise
import requests
from functools import partial
from pytest_bdd import scenario, given, when, then, parsers, scenarios
from qa.settings import PAGES_DICT
from qa.tests.environment import context, client


# Set file path here in case you have multiple scenarios.
# Part of path set in pytest.ini.
scenario = partial(scenario, 'features/requests_example.feature')
@scenario('Requests goes to expected page')

def test_requests():
    pass


@given(parsers.parse("I get {page_name} using requests"))
def get(context, client, page_name):
    context.current_url = context.human_readable_pages(page_name, context.host)
    context.response = client.get(context.current_url)
    return context.response


@then('the response should be successful')
def foo_is_foo(context, page_name):
    assert context.response.status_code is requests.codes.ok, \
        'Unexpectedly got a %d response code' % context.response.status_code
Пример #32
0
def run(schematization):
    """Console script for sandbox_fm"""
    click.echo("Make sure you start the SARndbox first")

    # calibration info
    data = {}
    with open('calibration.json') as f:
        calibration = json.load(f)
    data.update(calibration)
    with open('config.json') as f:
        configuration = json.load(f)

    data.update(configuration)

    # model
    model = bmi.wrapper.BMIWrapper('dflowfm')
    # initialize model schematization, changes directory
    model.initialize(str(pathlib.Path(schematization.name).absolute()))
    update_delft3d_initial_vars(data, model)
    dt = model.get_time_step()

    # compute the model bounding box that is shown on the screen
    model_bbox = matplotlib.path.Path(data['model_points'])
    # create an index to see which points/cells are visualized
    data['node_in_box'] = model_bbox.contains_points(np.c_[data['xk'],
                                                           data['yk']])
    data['cell_in_box'] = model_bbox.contains_points(np.c_[data['xzw'],
                                                           data['yzw']])

    img_bbox = matplotlib.path.Path([(40, 40), (40, 480), (600, 480),
                                     (600, 40)])
    xzw_box, yzw_box = transform(data['xzw'], data['yzw'], data['model2box'])
    xk_box, yk_box = transform(data['xk'], data['yk'], data['model2box'])
    print(xzw_box.min(), xzw_box.max())

    # for transformed coordinates see if they are on the screen
    data['cell_in_img_bbox'] = img_bbox.contains_points(np.c_[xzw_box,
                                                              yzw_box])
    data['node_in_img_bbox'] = img_bbox.contains_points(np.c_[xk_box, yk_box])

    if data.get('debug'):
        plt.scatter(data['xzw'],
                    data['yzw'],
                    c=data['cell_in_img_bbox'],
                    edgecolor='none')
        plt.show()
        plt.scatter(data['xzw'],
                    data['yzw'],
                    c=data['cell_in_box'],
                    edgecolor='none')
        plt.show()

    # images
    heights = calibrated_height_images(calibration["z_values"],
                                       calibration["z"])
    videos = video_images()
    # load model library
    height = next(heights)
    video = next(videos)

    data['height'] = height.copy()
    data['height'] = height
    data['video'] = video

    vis = Visualization()
    update_delft3d_vars(data, model)
    vis.initialize(data)

    vis.subscribers.append(
        # fill in the data parameter and subscribe to events
        functools.partial(process_events, data=data, model=model, vis=vis))

    # start model and run for a bit
    for i in range(10):
        model.update(dt)

    for i, (video, height) in enumerate(tqdm.tqdm(zip(videos, heights))):
        update_delft3d_vars(data, model)
        # update kinect
        data['height'] = height
        data['video'] = video

        # update visualization
        vis.update(data)
        # update model
        tic = time.time()
        model.update(dt)
        toc = time.time()
        print(toc - tic)
Пример #33
0
import sys
import importlib
import time
from functools import partial

import click
import redis
from redis import Redis
from redis.sentinel import Sentinel
from rq.defaults import (DEFAULT_CONNECTION_CLASS, DEFAULT_JOB_CLASS,
                         DEFAULT_QUEUE_CLASS, DEFAULT_WORKER_CLASS)
from rq.logutils import setup_loghandlers
from rq.utils import import_attribute
from rq.worker import WorkerStatus

red = partial(click.style, fg='red')
green = partial(click.style, fg='green')
yellow = partial(click.style, fg='yellow')


def read_config_file(module):
    """Reads all UPPERCASE variables defined in the given module file."""
    settings = importlib.import_module(module)
    return dict([(k, v) for k, v in settings.__dict__.items()
                 if k.upper() == k])


def get_redis_from_config(settings, connection_class=Redis):
    """Returns a StrictRedis instance from a dictionary of settings.
       To use redis sentinel, you must specify a dictionary in the configuration file.
       Example of a dictionary with keys without values:
Пример #34
0
 def start_supervisor(self):
     self.supervisor_thread = threading.Thread(target=self.supervisor.run)
     self.supervisor_thread.start()
     self.wait_for(
         functools.partial(self.supervisor.serving_process.started,
                           count=1))
Пример #35
0
 def scan_text_edit(self, parent):
     parent.addButton(':icons/microphone.png', partial(self._recv, parent),
                      _("Read from microphone"))
Пример #36
0
 def settings_widget(self, window):
     return EnterButton(_('Settings'), partial(self.settings_dialog,
                                               window))
    def setupUi(self, Dialog3b, num_dimensions):
        _translate = QtCore.QCoreApplication.translate
        Dialog3b.setObjectName("Dialog3b")
        Dialog3b.resize(300,50*num_dimensions)
        Dialog3b.setWindowTitle(_translate("Dialog3b", "Coordinate and Bounds Initialization"))

        self.gridLayout = QtWidgets.QGridLayout(Dialog3b)
        self.gridLayout.setObjectName("gridLayout")
        self.num_dimensions = num_dimensions
        self.bounds_flag = True

        self.coords = []
        self.min = []
        self.max = []

        self.init_label = QtWidgets.QLabel(Dialog3b)
        self.init_label.setObjectName("init_label")
        self.gridLayout.addWidget(self.init_label, 0, 1, 1, 1)
        self.init_label.setText(_translate("Dialog3b", "Initial Value"))

        self.min_label = QtWidgets.QLabel(Dialog3b)
        self.min_label.setObjectName("min_label")
        self.gridLayout.addWidget(self.min_label, 0, 2, 1, 1)
        self.min_label.setText(_translate("Dialog3b", "Minimum Value"))

        self.max_label = QtWidgets.QLabel(Dialog3b)
        self.max_label.setObjectName("max_label")
        self.gridLayout.addWidget(self.max_label, 0, 3, 1, 1)
        self.max_label.setText(_translate("Dialog3b", "Maximum Value"))

        for i in range(num_dimensions):
            gen_label = QtWidgets.QLabel(self)
            gen_label.setText("Coordinate %s"%i)
            gen_init_input = QtWidgets.QLineEdit(self)
            gen_init_input.setText("0.0") # Default value
            gen_min_input = QtWidgets.QLineEdit(self)
            gen_min_input.setText("-10.0") # Default value
            gen_max_input = QtWidgets.QLineEdit(self)
            gen_max_input.setText("10.0") # Default value

            self.coords.append({"coord_value":gen_init_input})
            self.min.append({"min_value":gen_min_input})
            self.max.append({"max_value":gen_max_input})

            self.gridLayout.addWidget(gen_label, 1+i, 0)
            self.gridLayout.addWidget(gen_init_input, 1+i, 1)
            self.gridLayout.addWidget(gen_min_input, 1+i, 2)
            self.gridLayout.addWidget(gen_max_input, 1+i, 3)

        self.OK_button = QtWidgets.QPushButton(Dialog3b)
        self.OK_button.setObjectName("OK_button")
        self.OK_button.clicked.connect(partial(report_init_coords,self))
        self.gridLayout.addWidget(self.OK_button, num_dimensions+1, 1, 1, 1) # fix positioning
        self.OK_button.setText(_translate("Dialog3b", "OK"))

        self.exit_button = QtWidgets.QPushButton(Dialog3b)
        self.exit_button.setObjectName("exit_button")
        self.exit_button.clicked.connect(Dialog3b.close)
        self.gridLayout.addWidget(self.exit_button, num_dimensions+1, 2, 1, 1) # fix positioning
        self.exit_button.setText(_translate("Dialog3b", "Exit"))
        self.exit_button.setEnabled(False)
Пример #38
0
def _likelihood_partial(num_pos, num_neg, CyberNet, s0, T, truenet=None,  directsamps=1000):
    return partial(get_likelihoods, num_pos=num_pos, num_neg = num_neg, CyberNet=CyberNet, s0=s0,
                   T=T, truenet=truenet, directsamps=directsamps)
    def setup_left_panel(self):
        """Setup the UI for left panel.

        Generate all exposure, combobox, and edit button.
        """
        hazard = self.parent.step_kw_subcategory.selected_subcategory()
        left_panel_heading = QLabel(tr('Classifications'))
        left_panel_heading.setFont(big_font)
        self.left_layout.addWidget(left_panel_heading)

        inner_left_layout = QGridLayout()

        row = 0
        for exposure in exposure_all:
            special_case = False
            if not setting('developer_mode'):
                # Filter out unsupported exposure for the hazard
                if exposure in hazard['disabled_exposures']:
                    # Remove from the storage if the exposure is disabled
                    if self.layer_mode == layer_mode_continuous:
                        if exposure['key'] in self.thresholds:
                            self.thresholds.pop(exposure['key'])
                    else:
                        if exposure['key'] in self.value_maps:
                            self.value_maps.pop(exposure['key'])
                    continue
            # Trick for EQ raster for population #3853
            if exposure == exposure_population and hazard == hazard_earthquake:
                if is_raster_layer(self.parent.layer):
                    if self.layer_mode == layer_mode_continuous:
                        self.use_default_thresholds = True
                        special_case = True
                        # Set classification for EQ Raster for Population
                        self.thresholds[exposure_population['key']] = {
                            earthquake_mmi_scale['key']: {
                                'classes':
                                default_classification_thresholds(
                                    earthquake_mmi_scale),
                                'active':
                                True
                            }
                        }

            # Add label
            # Hazard on Exposure Classifications
            label = tr(
                '{hazard_name} on {exposure_name} Classifications').format(
                    hazard_name=hazard['name'], exposure_name=exposure['name'])
            exposure_label = QLabel(label)

            # Add combo box
            exposure_combo_box = QComboBox()
            hazard_classifications = hazard.get('classifications')
            exposure_combo_box.addItem(tr('No classifications'))
            exposure_combo_box.setItemData(0, None, Qt.UserRole)

            current_index = 0
            i = 0
            # Iterate through all available hazard classifications
            for hazard_classification in hazard_classifications:
                # Skip if the classification is not for the exposure
                if 'exposures' in hazard_classification:
                    if exposure not in hazard_classification['exposures']:
                        continue
                exposure_combo_box.addItem(hazard_classification['name'])
                exposure_combo_box.setItemData(i + 1, hazard_classification,
                                               Qt.UserRole)
                if self.layer_mode == layer_mode_continuous:
                    current_hazard_classifications = self.thresholds.get(
                        exposure['key'])
                else:
                    current_hazard_classifications = self.value_maps.get(
                        exposure['key'])
                if current_hazard_classifications:
                    current_hazard_classification = \
                        current_hazard_classifications.get(
                            hazard_classification['key'])
                    if current_hazard_classification:
                        is_active = current_hazard_classification.get('active')
                        if is_active:
                            current_index = i + 1
                i += 1
            # Set current classification
            exposure_combo_box.setCurrentIndex(current_index)

            # Add edit button
            exposure_edit_button = QPushButton(tr('Edit'))

            # For special case. Raster EQ on Population.
            if special_case:
                mmi_index = exposure_combo_box.findText(
                    earthquake_mmi_scale['name'])
                exposure_combo_box.setCurrentIndex(mmi_index)
                exposure_combo_box.setEnabled(False)
                exposure_edit_button.setEnabled(False)
                tool_tip_message = tr(
                    'InaSAFE use default classification for Raster Earthquake '
                    'hazard on population.')
                exposure_label.setToolTip(tool_tip_message)
                exposure_combo_box.setToolTip(tool_tip_message)
                exposure_edit_button.setToolTip(tool_tip_message)

            else:
                if current_index == 0:
                    # Disable if there is no classification chosen.
                    exposure_edit_button.setEnabled(False)
                exposure_edit_button.clicked.connect(
                    partial(self.edit_button_clicked,
                            edit_button=exposure_edit_button,
                            exposure_combo_box=exposure_combo_box,
                            exposure=exposure))
                exposure_combo_box.currentIndexChanged.connect(
                    partial(self.classifications_combo_box_changed,
                            exposure=exposure,
                            exposure_combo_box=exposure_combo_box,
                            edit_button=exposure_edit_button))

            # Arrange in layout
            inner_left_layout.addWidget(exposure_label, row, 0)
            inner_left_layout.addWidget(exposure_combo_box, row, 1)
            inner_left_layout.addWidget(exposure_edit_button, row, 2)

            # Adding to step's attribute
            self.exposures.append(exposure)
            self.exposure_combo_boxes.append(exposure_combo_box)
            self.exposure_edit_buttons.append(exposure_edit_button)
            self.exposure_labels.append(label)
            if special_case:
                self.special_case_index = len(self.exposures) - 1

            row += 1

        self.left_layout.addLayout(inner_left_layout)
        # To push the inner_left_layout up
        self.left_layout.addStretch(1)
def main():
    hamiltonian0 = functools.partial(hamiltonian, N=20, M=0, t1=1, t2=0.03, phi=pi/2)  # 使用偏函数,固定一些参数
    k = np.linspace(0, 2*pi, 300)
    plot_bands_one_dimension(k, hamiltonian0)
# ------------------------------------

import pytest
import functools
from io import BytesIO
from datetime import date, time
from azure.core.exceptions import ClientAuthenticationError, ServiceRequestError, HttpResponseError
from azure.core.credentials import AzureKeyCredential
from azure.ai.formrecognizer._generated.models import AnalyzeOperationResult
from azure.ai.formrecognizer._response_handlers import prepare_prebuilt_models
from azure.ai.formrecognizer import FormRecognizerClient, FormContentType, FormRecognizerApiVersion
from testcase import FormRecognizerTest
from preparers import GlobalClientPreparer as _GlobalClientPreparer
from preparers import FormRecognizerPreparer

GlobalClientPreparer = functools.partial(_GlobalClientPreparer,
                                         FormRecognizerClient)


class TestIdDocumentsFromUrl(FormRecognizerTest):
    @FormRecognizerPreparer()
    def test_polling_interval(self, formrecognizer_test_endpoint,
                              formrecognizer_test_api_key):
        client = FormRecognizerClient(
            formrecognizer_test_endpoint,
            AzureKeyCredential(formrecognizer_test_api_key),
            polling_interval=7)
        self.assertEqual(client._client._config.polling_interval, 7)

        poller = client.begin_recognize_id_documents_from_url(
            self.id_document_url_jpg, polling_interval=6)
        poller.wait()
    def setup_thresholds_panel(self, classification):
        """Setup threshold panel in the right panel.

        :param classification: Classification definition.
        :type classification: dict
        """
        # Set text in the label
        layer_purpose = self.parent.step_kw_purpose.selected_purpose()
        layer_subcategory = self.parent.step_kw_subcategory.\
            selected_subcategory()

        if is_raster_layer(self.parent.layer):
            active_band = self.parent.step_kw_band_selector.selected_band()
            layer_extent = self.parent.layer.extent()
            statistics = self.parent.layer.dataProvider().bandStatistics(
                active_band, QgsRasterBandStats.All, layer_extent, 0)
            description_text = continuous_raster_question % (
                layer_purpose['name'], layer_subcategory['name'],
                classification['name'], statistics.minimumValue,
                statistics.maximumValue)
        else:
            field_name = self.parent.step_kw_field.selected_fields()
            field_index = self.parent.layer.fields().lookupField(field_name)
            min_value_layer = self.parent.layer.minimumValue(field_index)
            max_value_layer = self.parent.layer.maximumValue(field_index)
            description_text = continuous_vector_question % (
                layer_purpose['name'], layer_subcategory['name'], field_name,
                classification['name'], min_value_layer, max_value_layer)

        # Set description
        description_label = QLabel(description_text)
        description_label.setWordWrap(True)
        self.right_layout.addWidget(description_label)

        if self.thresholds:
            thresholds = self.thresholds
        else:
            thresholds = self.parent.get_existing_keyword('thresholds')
        selected_unit = self.parent.step_kw_unit.selected_unit()['key']

        self.threshold_classes = OrderedDict()
        classes = classification.get('classes')
        # Sort by value, put the lowest first
        classes = sorted(classes, key=lambda the_key: the_key['value'])

        grid_layout_thresholds = QGridLayout()

        for i, the_class in enumerate(classes):
            class_layout = QHBoxLayout()

            # Class label
            class_label = QLabel(the_class['name'])

            # Min label
            min_label = QLabel(tr('Min >'))

            # Min value as double spin
            min_value_input = QDoubleSpinBox()
            # TODO(IS) We can set the min and max depends on the unit, later
            min_value_input.setMinimum(0)
            min_value_input.setMaximum(999999)

            if thresholds.get(self.active_exposure['key']):
                exposure_thresholds = thresholds.get(
                    self.active_exposure['key'])
                if exposure_thresholds.get(classification['key']):
                    exposure_thresholds_classifications = exposure_thresholds\
                        .get(classification['key'])
                    min_value_input.setValue(
                        exposure_thresholds_classifications['classes'][
                            the_class['key']][0])
                else:
                    default_min = the_class['numeric_default_min']
                    if isinstance(default_min, dict):
                        default_min = the_class['numeric_default_min'][
                            selected_unit]
                    min_value_input.setValue(default_min)
            else:
                default_min = the_class['numeric_default_min']
                if isinstance(default_min, dict):
                    default_min = the_class['numeric_default_min'][
                        selected_unit]
                min_value_input.setValue(default_min)
            min_value_input.setSingleStep(0.1)

            # Max label
            max_label = QLabel(tr('Max <='))

            # Max value as double spin
            max_value_input = QDoubleSpinBox()
            # TODO(IS) We can set the min and max depends on the unit, later
            max_value_input.setMinimum(0)
            max_value_input.setMaximum(999999)
            if thresholds.get(self.active_exposure['key']):
                exposure_thresholds = thresholds.get(
                    self.active_exposure['key'])
                if exposure_thresholds.get(classification['key']):
                    exposure_thresholds_classifications = exposure_thresholds \
                        .get(classification['key'])
                    max_value_input.setValue(
                        exposure_thresholds_classifications['classes'][
                            the_class['key']][1])
                else:
                    default_max = the_class['numeric_default_max']
                    if isinstance(default_max, dict):
                        default_max = the_class['numeric_default_max'][
                            selected_unit]
                    max_value_input.setValue(default_max)
            else:
                default_max = the_class['numeric_default_max']
                if isinstance(default_max, dict):
                    default_max = the_class['numeric_default_max'][
                        selected_unit]
                max_value_input.setValue(default_max)
            max_value_input.setSingleStep(0.1)

            # Add to class_layout
            class_layout.addWidget(min_label)
            class_layout.addWidget(min_value_input)
            class_layout.addWidget(max_label)
            class_layout.addWidget(max_value_input)

            class_layout.setStretch(0, 1)
            class_layout.setStretch(1, 2)
            class_layout.setStretch(2, 1)
            class_layout.setStretch(3, 2)

            # Add to grid_layout
            grid_layout_thresholds.addWidget(class_label, i, 0)
            grid_layout_thresholds.addLayout(class_layout, i, 1)

            self.threshold_classes[the_class['key']] = [
                min_value_input, max_value_input
            ]

        grid_layout_thresholds.setColumnStretch(0, 1)
        grid_layout_thresholds.setColumnStretch(0, 2)

        def min_max_changed(double_spin_index, mode):
            """Slot when min or max value change.

            :param double_spin_index: The index of the double spin.
            :type double_spin_index: int

            :param mode: The flag to indicate the min or max value.
            :type mode: int
            """
            if mode == MAX_VALUE_MODE:
                current_max_value = list(
                    self.threshold_classes.values())[double_spin_index][1]
                target_min_value = list(
                    self.threshold_classes.values())[double_spin_index + 1][0]
                if current_max_value.value() != target_min_value.value():
                    target_min_value.setValue(current_max_value.value())
            elif mode == MIN_VALUE_MODE:
                current_min_value = list(
                    self.threshold_classes.values())[double_spin_index][0]
                target_max_value = list(
                    self.threshold_classes.values())[double_spin_index - 1][1]
                if current_min_value.value() != target_max_value.value():
                    target_max_value.setValue(current_min_value.value())

        # Set behaviour
        for k, v in list(self.threshold_classes.items()):
            index = list(self.threshold_classes.keys()).index(k)
            if index < len(self.threshold_classes) - 1:
                # Max value changed
                v[1].valueChanged.connect(
                    partial(min_max_changed,
                            double_spin_index=index,
                            mode=MAX_VALUE_MODE))
            if index > 0:
                # Min value
                v[0].valueChanged.connect(
                    partial(min_max_changed,
                            double_spin_index=index,
                            mode=MIN_VALUE_MODE))

        grid_layout_thresholds.setSpacing(0)

        self.right_layout.addLayout(grid_layout_thresholds)
Пример #43
0
    def initAppletDrawerUi(self):
        """
        Overridden from base class (LayerViewerGui)
        """
        op = self.topLevelOperatorView
        
        def configure_update_handlers( qt_signal, op_slot ):
            qt_signal.connect( self.configure_operator_from_gui )
            op_slot.notifyDirty( self.configure_gui_from_operator )
            self.__cleanup_fns.append( partial( op_slot.unregisterDirty, self.configure_gui_from_operator ) )

        def control_layout( label_text, widget ):
            row_layout = QHBoxLayout()
            row_layout.addWidget( QLabel(label_text) )
            row_layout.addSpacerItem( QSpacerItem(10, 0, QSizePolicy.Expanding) )
            row_layout.addWidget(widget)
            return row_layout

        drawer_layout = QVBoxLayout()

        channel_button = QPushButton()
        self.channel_menu = QMenu(self) # Must retain menus (in self) or else they get deleted.
        channel_button.setMenu(self.channel_menu)
        channel_button.clicked.connect(channel_button.showMenu)
        def populate_channel_menu(*args):
            if sip.isdeleted(channel_button):
                return
            self.channel_menu.clear()
            self.channel_actions = []
            for ch in range(op.Input.meta.getTaggedShape()['c']):
                action = QAction("Channel {}".format(ch), self.channel_menu)
                action.setCheckable(True)
                self.channel_menu.addAction(action)
                self.channel_actions.append(action)
                configure_update_handlers( action.toggled, op.ChannelSelections )
        populate_channel_menu()
        op.Input.notifyMetaChanged( populate_channel_menu )
        self.__cleanup_fns.append( partial( op.Input.unregisterMetaChanged, populate_channel_menu ) )
        channel_button.setToolTip("Boundary channel index in the probability map")
        drawer_layout.addLayout( control_layout( "Input Channel", channel_button ) )
        self.channel_button = channel_button

        threshold_box = QDoubleSpinBox()
        threshold_box.setDecimals(2)
        threshold_box.setMinimum(0.00)
        threshold_box.setMaximum(1.0)
        threshold_box.setSingleStep(0.1)
        configure_update_handlers( threshold_box.valueChanged, op.Pmin )
        threshold_box.setToolTip("Boundary probability threshold")
        drawer_layout.addLayout( control_layout( "Threshold", threshold_box ) )
        self.threshold_box = threshold_box

        membrane_size_box = QSpinBox()
        membrane_size_box.setMinimum(0)
        membrane_size_box.setMaximum(1000000)
        configure_update_handlers( membrane_size_box.valueChanged, op.MinMembraneSize )
        membrane_size_box.setToolTip("Size filter for boundary pieces, in pixels")
        drawer_layout.addLayout( control_layout( "Min Boundary Size", membrane_size_box ) )
        self.membrane_size_box = membrane_size_box

        seed_presmoothing_box = QDoubleSpinBox()
        seed_presmoothing_box.setDecimals(1)
        seed_presmoothing_box.setMinimum(0.0)
        seed_presmoothing_box.setMaximum(10.0)
        seed_presmoothing_box.setSingleStep(0.1)
        configure_update_handlers( seed_presmoothing_box.valueChanged, op.SigmaMinima )
        seed_presmoothing_box.setToolTip("Smooth the distance transform map with this sigma")
        drawer_layout.addLayout( control_layout( "Presmooth before Seeds", seed_presmoothing_box ) )
        self.seed_presmoothing_box = seed_presmoothing_box

        seed_method_combo = QComboBox()
        seed_method_combo.addItem("Connected")
        seed_method_combo.addItem("Clustered")
        configure_update_handlers( seed_method_combo.currentIndexChanged, op.GroupSeeds )
        seed_method_combo.setToolTip("Connected: combine directly adjacent pixels into seeds (more superpixels). Clustered: group pixels into seeds by distance heuristic (less superpixels)")
        drawer_layout.addLayout( control_layout( "Seed Labeling", seed_method_combo ) )
        self.seed_method_combo = seed_method_combo

        superpixel_size_box = QSpinBox()
        superpixel_size_box.setMinimum(0)
        superpixel_size_box.setMaximum(1000000)
        configure_update_handlers( superpixel_size_box.valueChanged, op.MinSegmentSize )
        superpixel_size_box.setToolTip("Minimal size of a superpixel")
        drawer_layout.addLayout( control_layout( "Min Superpixel Size", superpixel_size_box ) )
        self.superpixel_size_box = superpixel_size_box

        preserve_pmaps_box = QCheckBox()
        configure_update_handlers( preserve_pmaps_box.toggled, op.PreserveMembranePmaps )
        preserve_pmaps_box.setToolTip("Preserve thin structures. Use that option when some of your foreground objects have long and thin parts")
        drawer_layout.addLayout( control_layout( "Preserve Thin Structures", preserve_pmaps_box ) )
        self.preserve_pmaps_box = preserve_pmaps_box

        enable_debug_box = QCheckBox()
        configure_update_handlers( enable_debug_box.toggled, op.EnableDebugOutputs )
        drawer_layout.addLayout( control_layout( "Show Debug Layers", enable_debug_box ) )
        self.enable_debug_box = enable_debug_box

        op.Superpixels.notifyReady(self.configure_gui_from_operator)
        op.Superpixels.notifyUnready(self.configure_gui_from_operator)
        self.__cleanup_fns.append( partial( op.Superpixels.unregisterReady, self.configure_gui_from_operator ) )
        self.__cleanup_fns.append( partial( op.Superpixels.unregisterUnready, self.configure_gui_from_operator ) )

        self.update_ws_button = QPushButton("Update Watershed", clicked=self.onUpdateWatershedsButton)
        drawer_layout.addWidget( self.update_ws_button )

        drawer_layout.setSpacing(0)
        drawer_layout.addSpacerItem( QSpacerItem(0, 10, QSizePolicy.Minimum, QSizePolicy.Expanding) )
        
        # Finally, the whole drawer widget
        drawer = QWidget(parent=self)
        drawer.setLayout(drawer_layout)

        # Save these members for later use
        self._drawer = drawer

        # Initialize everything with the operator's initial values
        self.configure_gui_from_operator()
Пример #44
0
        for team in game.teams:
            try:
                hero = team.get_all_heroes()[i]
            except IndexError:
                heroes_info.extend(6 * ['-'])
            else:
                heroes_info.extend([
                    hero.name, hero.hp, hero.defence,
                    hero.shield, hero.speed, hero.area
                ])
        msg += _wrap_content(heroes_info, line_separator=False)
    msg += _get_line_separator()
    print(msg)


print_error = partial(cprint, color='red', attrs=['bold'])
print_yellow = partial(cprint, color='yellow', attrs=['bold'])


def get_row_str(proportions, data):
    data = list(zip(proportions, data))
    content = [''.join(str(x[1]).center(x[0])) for x in data]
    return "|" + "|".join(content) + "|"


def get_line_str(proportions):
    chars = [''.join(p * ['-']) for p in proportions]
    line = '+' + '+'.join(chars) + '+'
    return line

Пример #45
0
 def get_optimizer_class(self):
     if self._hp.optimizer == 'adam':
         optim = partial(Adam, betas=(self._hp.adam_beta, 0.999))
     else:
         raise ValueError("Optimizer '{}' not supported!".format(self._hp.optimizer))
     return optim
Пример #46
0
 def configure_update_handlers( qt_signal, op_slot ):
     qt_signal.connect( self.configure_operator_from_gui )
     op_slot.notifyDirty( self.configure_gui_from_operator )
     self.__cleanup_fns.append( partial( op_slot.unregisterDirty, self.configure_gui_from_operator ) )
Пример #47
0
    def open(self, path_info, mode="r", encoding=None):
        if hasattr(self, "_generate_download_url"):
            get_url = partial(self._generate_download_url, path_info)
            return open_url(get_url, mode=mode, encoding=encoding)

        raise RemoteActionNotImplemented("open", self.scheme)
Пример #48
0
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.

import functools
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np

import multiviews.cameras as cameras
from utils.transforms import get_affine_transform, affine_transform, affine_transform_pts

tv1, tv2, _ = torch.__version__.split('.')
tv = int(tv1) * 10 + int(tv2) * 1
if tv >= 13:  # api change since 1.3.0 for grid_sample
    grid_sample = functools.partial(F.grid_sample, align_corners=True)
else:
    grid_sample = F.grid_sample


def infer(unary, pairwise, body, config, **kwargs):
    """
    Args:
        unary: a list of unary terms for all JOINTS
        pairwise: a list of pairwise terms of all EDGES
        body: tree structure human body
    Returns:
        pose3d_as_cube_idx: 3d pose as cube index
    """
    # current_device = torch.device('cuda:{}'.format(pairwise.items()[0].get_device()))
    current_device = kwargs['current_device']
Пример #49
0
def confidence_split_graph(project_id, skeleton_ids, confidence_threshold,
        relations=None, source_rel="presynaptic_to", target_rel="postsynaptic_to"):
    """ Assumes 0 < confidence_threshold <= 5. """
    if not skeleton_ids:
        raise ValueError("No skeleton IDs provided")

    cursor = connection.cursor()
    skids = ",".join(str(int(skid)) for skid in skeleton_ids)

    if not relations:
        relations = get_relation_to_id_map(project_id, (source_rel, target_rel), cursor)
    source_rel_id, target_rel_id = relations[source_rel], relations[target_rel]

    # Fetch synapses of all skeletons
    cursor.execute('''
    SELECT skeleton_id, treenode_id, connector_id, relation_id, confidence
    FROM treenode_connector
    WHERE project_id = %s
      AND skeleton_id IN (%s)
      AND (relation_id = %s OR relation_id = %s)
    ''' % (int(project_id), skids, source_rel_id, target_rel_id))

    stc = defaultdict(list)
    for row in cursor.fetchall():
        stc[row[0]].append(row[1:]) # skeleton_id vs (treenode_id, connector_id, relation_id, confidence)

    # Fetch all treenodes of all skeletons
    cursor.execute('''
    SELECT skeleton_id, id, parent_id, confidence
    FROM treenode
    WHERE project_id = %s
      AND skeleton_id IN (%s)
    ORDER BY skeleton_id
    ''' % (project_id, skids))

    # Dictionary of connector_id vs relation_id vs list of sub-skeleton ID
    connectors = defaultdict(partial(defaultdict, list))

    # All nodes of the graph
    nodeIDs = []

    # Read out into memory only one skeleton at a time
    current_skid = None
    tree = None
    for row in cursor.fetchall():
        if row[0] == current_skid:
            # Build the tree, breaking it at the low-confidence edges
            if row[2] and row[3] >= confidence_threshold:
                    tree.add_edge(row[2], row[1])
            continue

        if tree:
            nodeIDs.extend(split_by_confidence(current_skid, tree, stc[current_skid], connectors))

        # Start the next tree
        current_skid = row[0]
        tree = nx.DiGraph()
        if row[2] and row[3] > confidence_threshold:
            tree.add_edge(row[2], row[1])

    if tree:
        nodeIDs.extend(split_by_confidence(current_skid, tree, stc[current_skid], connectors))

    # Create the edges of the graph from the connectors, which was populated as a side effect of 'split_by_confidence'
    edges = defaultdict(partial(defaultdict, make_new_synapse_count_array)) # pre vs post vs count
    for c in six.itervalues(connectors):
        for pre in c[source_rel_id]:
            for post in c[target_rel_id]:
                edges[pre[0]][post[0]][min(pre[1], post[1]) - 1] += 1

    return {
        'nodes': nodeIDs,
        'edges': [(s, t, count)
                for s, edge in six.iteritems(edges)
                for t, count in six.iteritems(edge)]
    }
Пример #50
0
def load_devices(*devices, pprint=False, namespace=None, use_cache=True,
                 threaded=False, post_load=None, **kwargs):
    """
    Load a series of devices into a namespace

    Parameters
    ----------
    *devices :
        List of happi containers to load

    pprint: bool, optional
        Print results of device loads

    namespace : object, optional
        Namespace to collect loaded devices in. By default this will be a
        ``types.SimpleNamespace``

    use_cache : bool, optional
        If set to ``False``, we'll ignore the cache and always make new
        devices.

    threaded : bool, optional
        Set to True to create each device in a background thread.  Note that
        this assumes that no two devices provided are the same. You are not
        guaranteed to load from the cache correctly if you ask for the same
        device to be loaded twice in the same threaded load.

    post_load : function, optional
        Function of one argument to run on each device after instantiation.
        This is your opportunity to check for good device health during the
        threaded load.

    kwargs:
        Are passed to :func:`.from_container`
    """
    # Create our namespace if we were not given one
    namespace = namespace or types.SimpleNamespace()
    name_list = [container.name for container in devices]
    if threaded:
        # Pre-import because imports in threads have race conditions
        for device in devices:
            try:
                import_class(device.device_class)
            except Exception:
                # Just wait for the normal error handling later
                pass
        global main_event_loop
        if main_event_loop is None:
            main_event_loop = asyncio.get_event_loop()
        pool = ThreadPool(len(devices))
        opt_load = partial(load_device, pprint=pprint, use_cache=use_cache,
                           threaded=True, post_load=post_load, **kwargs)
        loaded_list = pool.map(opt_load, devices)
    else:
        loaded_list = []
        for device in devices:
            loaded = load_device(device, pprint=pprint, use_cache=use_cache,
                                 threaded=False, post_load=post_load, **kwargs)
            loaded_list.append(loaded)
    for dev, name in zip(loaded_list, name_list):
        attr = create_alias(name)
        setattr(namespace, attr, dev)
    return namespace
Пример #51
0
def dual_split_graph(project_id, skeleton_ids, confidence_threshold, bandwidth,
        expand, relations=None, source_link="presynaptic_to",
        target_link="postsynaptic_to"):
    """ Assumes bandwidth > 0 and some skeleton_id in expand. """
    cursor = connection.cursor()
    skeleton_ids = set(skeleton_ids)
    expand = set(expand)

    if not skeleton_ids:
        raise ValueError("No skeleton IDs provided")

    # assumes all skeleton_id in expand are also present in skeleton_ids

    skids = ",".join(str(int(skid)) for skid in skeleton_ids)

    if not relations:
        relations = get_relation_to_id_map(project_id, (source_link, target_link), cursor)
    source_rel_id, target_rel_id = relations[source_link], relations[target_link]

    # Fetch synapses of all skeletons
    cursor.execute('''
    SELECT skeleton_id, treenode_id, connector_id, relation_id, confidence
    FROM treenode_connector
    WHERE project_id = %s
      AND skeleton_id IN (%s)
      AND relation_id IN (%s,%s)
    ''' % (int(project_id), ",".join(str(int(skid)) for skid in skeleton_ids),
           source_rel_id, target_rel_id))

    stc = defaultdict(list)
    for row in cursor.fetchall():
        stc[row[0]].append(row[1:]) # skeleton_id vs (treenode_id, connector_id, relation_id)

    # Dictionary of connector_id vs relation_id vs list of sub-skeleton ID
    connectors = defaultdict(partial(defaultdict, list))

    # All nodes of the graph (with or without edges. Includes those representing synapse domains)
    nodeIDs = []

    not_to_expand = skeleton_ids - expand

    if confidence_threshold > 0 and not_to_expand:
        # Now fetch all treenodes of only skeletons in skeleton_ids (the ones not to expand)
        cursor.execute('''
        SELECT skeleton_id, id, parent_id, confidence
        FROM treenode
        WHERE project_id = %s
          AND skeleton_id IN (%s)
        ORDER BY skeleton_id
        ''' % (project_id, ",".join(str(int(skid)) for skid in not_to_expand)))

        # Read out into memory only one skeleton at a time
        current_skid = None
        tree = None
        for row in cursor.fetchall():
            if row[0] == current_skid:
                # Build the tree, breaking it at the low-confidence edges
                if row[2] and row[3] >= confidence_threshold:
                        tree.add_edge(row[2], row[1])
                continue

            if tree:
                nodeIDs.extend(split_by_confidence(current_skid, tree, stc[current_skid], connectors))

            # Start the next tree
            current_skid = row[0]
            tree = nx.DiGraph()
            if row[2] and row[3] > confidence_threshold:
                tree.add_edge(row[2], row[1])

        if tree:
            nodeIDs.extend(split_by_confidence(current_skid, tree, stc[current_skid], connectors))
    else:
        # No need to split.
        # Populate connectors from the connections among them
        for skid in not_to_expand:
            nodeIDs.append(skid)
            for c in stc[skid]:
                connectors[c[1]][c[2]].append((skid, c[3]))


    # Now fetch all treenodes of all skeletons to expand
    cursor.execute('''
    SELECT skeleton_id, id, parent_id, confidence, location_x, location_y, location_z
    FROM treenode
    WHERE project_id = %s
      AND skeleton_id IN (%s)
    ORDER BY skeleton_id
    ''' % (project_id, ",".join(str(int(skid)) for skid in expand)))

    # list of edges among synapse domains
    intraedges = []

    # list of branch nodes, merely structural
    branch_nodeIDs = []

    # reset
    current_skid = None
    tree = None
    locations = None
    for row in cursor.fetchall():
        if row[0] == current_skid:
            # Build the tree, breaking it at the low-confidence edges
            locations[row[1]] = row[4:]
            if row[2] and row[3] >= confidence_threshold:
                    tree.add_edge(row[2], row[1])
            continue

        if tree:
            ns, bs = split_by_both(current_skid, tree, locations, bandwidth, stc[current_skid], connectors, intraedges)
            nodeIDs.extend(ns)
            branch_nodeIDs.extend(bs)

        # Start the next tree
        current_skid = row[0]
        tree = nx.DiGraph()
        locations = {}
        locations[row[1]] = row[4:]
        if row[2] and row[3] > confidence_threshold:
            tree.add_edge(row[2], row[1])

    if tree:
        ns, bs = split_by_both(current_skid, tree, locations, bandwidth, stc[current_skid], connectors, intraedges)
        nodeIDs.extend(ns)
        branch_nodeIDs.extend(bs)


    # Create the edges of the graph
    edges = defaultdict(partial(defaultdict, make_new_synapse_count_array)) # pre vs post vs count
    for c in six.itervalues(connectors):
        for pre in c[source_rel_id]:
            for post in c[target_rel_id]:
                edges[pre[0]][post[0]][min(pre[1], post[1]) - 1] += 1

    return {
        'nodes': nodeIDs,
        'edges': [(s, t, count)
                for s, edge in six.iteritems(edges)
                for t, count in six.iteritems(edge)],
        'branch_nodes': branch_nodeIDs,
        'intraedges': intraedges
    }
Пример #52
0
 def setUp(self):
     self.message = Message(0, User(0, "Testuser"), datetime.now(), Chat(0, 'private'))
     self.e = functools.partial(MessageEntity, offset=0, length=0)
Пример #53
0
    http://docs.python.org/3.4/library/itertools.html#itertools-recipes
    """
    return next(islice(iterable, n, None), default)


def first_true(iterable, default=False, pred=None):
    """Returns the first true value in the iterable.
    If no true value is found, returns *default*
    http://docs.python.org/3.4/library/itertools.html#itertools-recipes
    """
    return next(filter(pred, iterable), default)


# widely-spreaded shortcuts to get first item, all but first item,
# second item, and first item of first item from iterator respectively
head = first = partial(flip(nth), 0)
tail = rest = partial(drop, 1)
second = F(rest) >> first
ffirst = F(first) >> first

# shortcut to remove all falsey items from iterable
compact = partial(filter, None)

# filterfalse under alias 'reject'
reject = filterfalse

# shortcuts to 1. return True if f(x) is logical true for every x in
# iterable (False otherwise), and 2. return the first logical true
# value of f(x) for any x in iterable (None otherwise) respectively
every = F(partial(map)) >> all
some = F(partial(map)) >> compact >> first
Пример #54
0
def _skeleton_graph(project_id, skeleton_ids, confidence_threshold, bandwidth,
        expand, compute_risk, cable_spread, path_confluence,
        with_overall_counts=False, relation_map=None, link_types=None):

    by_link_type = bool(link_types)
    if not by_link_type:
        link_types = ['synaptic-connector']

    if not expand:
        # Prevent expensive operations that will do nothing
        bandwidth = 0

    cursor = connection.cursor()
    relation_map = get_relation_to_id_map(project_id, cursor=cursor)

    result = None
    for link_type in link_types:
        pair = KNOWN_LINK_PAIRS.get(link_type)
        if not pair:
            raise ValueError("Unknown link type: " + link_type)

        source_rel = pair['source']
        target_rel = pair['target']

        if 0 == bandwidth:
            if 0 == confidence_threshold:
                graph = basic_graph(project_id, skeleton_ids, relation_map,
                        source_rel, target_rel)
            else:
                graph = confidence_split_graph(project_id, skeleton_ids,
                        confidence_threshold, relation_map, source_rel, target_rel)
        else:
            graph = dual_split_graph(project_id, skeleton_ids, confidence_threshold,
                    bandwidth, expand, relation_map)

        if with_overall_counts:
            preId = relation_map[source_rel]
            postId = relation_map[target_rel]
            query_params = list(skeleton_ids) + [preId, postId, preId, postId]

            skeleton_id_template = ','.join('(%s)' for _ in skeleton_ids)
            cursor.execute('''
            SELECT tc1.skeleton_id, tc2.skeleton_id,
                tc1.relation_id, tc2.relation_id,
                LEAST(tc1.confidence, tc2.confidence)
            FROM treenode_connector tc1
            JOIN (VALUES {}) skeleton(id)
                ON tc1.skeleton_id = skeleton.id
            JOIN treenode_connector tc2
                ON tc1.connector_id = tc2.connector_id
            WHERE tc1.id != tc2.id
                AND tc1.relation_id IN (%s, %s)
                AND tc2.relation_id IN (%s, %s)
            '''.format(skeleton_id_template), query_params)

            query_skeleton_ids = set(skeleton_ids)
            overall_counts = defaultdict(partial(defaultdict, make_new_synapse_count_array))
            # Iterate through each pre/post connection
            for skid1, skid2, rel1, rel2, conf in cursor.fetchall():
                # Increment number of links to/from skid1 with relation rel1.
                overall_counts[skid1][rel1][conf - 1] += 1

            # Attach counts and a map of relation names to their IDs.
            graph['overall_counts'] = overall_counts
            graph['relation_map'] = {
                source_rel: preId,
                target_rel: postId
            }

        if by_link_type:
            if not result:
                result = {}
            result[link_type] = graph
        else:
            result = graph

    return result
import numpy as np
from matplotlib import pyplot as plt
from matplotlib import gridspec

from skimage.data import chelsea, hubble_deep_field
from skimage.metrics import mean_squared_error as mse
from skimage.metrics import peak_signal_noise_ratio as psnr
from skimage.restoration import (calibrate_denoiser,
                                 denoise_wavelet,
                                 denoise_tv_chambolle, denoise_nl_means,
                                 estimate_sigma)
from skimage.util import img_as_float, random_noise
from skimage.color import rgb2gray
from functools import partial

_denoise_wavelet = partial(denoise_wavelet, rescale_sigma=True)

image = img_as_float(chelsea())
sigma = 0.3
noisy = random_noise(image, var=sigma ** 2)

# Parameters to test when calibrating the denoising algorithm
sigma_range = np.arange(0.05, 0.5, 0.05)

parameter_ranges = {'sigma': np.arange(0.1, 0.3, 0.02),
                    'wavelet': ['db1', 'db2'],
                    'convert2ycbcr': [True, False],
                    'multichannel': [True]}

# Denoised image using default parameters of `denoise_wavelet`
default_output = denoise_wavelet(noisy, multichannel=True, rescale_sigma=True)
Пример #56
0
 async def main():
     task = asyncio.create_task(a())
     task.add_done_callback(callback)
     task.add_done_callback(partial(callback2, n='input'))
     await task
Пример #57
0
def generate_robust_all_seg_for_train():
    max_seq_length = 512
    encoder = AllSegmentAsDoc(max_seq_length)
    worker_factory = partial(RobustWorker, RobustPairwiseTrainGen(encoder, max_seq_length))
    runner = JobRunner(sydney_working_dir, 4, "robust_all_passage", worker_factory)
    runner.start()
def main(argv):
  tf.compat.v2.enable_v2_behavior()
  if len(argv) > 1:
    raise app.UsageError('Expected no command-line arguments, '
                         'got: {}'.format(argv))

  vocab_size = 10000
  tag_size = 500
  batch_size = 128
  shuffle_buffer_size = 1000
  max_element_per_client = 1000
  logging.info('experiment_name')
  logging.info(FLAGS.experiment_name)
  steps_per_epoch = 10 if 'debug' in FLAGS.experiment_name else 500000
  max_validation_client = 10 if 'debug' in FLAGS.experiment_name else 1000

  # Avoid a long line
  client_datasets = tff.simulation.datasets.stackoverflow.load_data()
  (train_clientdata, valid_clientdata, test_clientdata) = client_datasets

  word_vocab = so_preprocessing.create_word_vocab(vocab_size)
  tag_vocab = so_preprocessing.create_tag_vocab(tag_size)

  ids = np.arange(
      len(train_clientdata.client_ids) + len(valid_clientdata.client_ids),
      dtype=np.int64) + 1
  str_ids = train_clientdata.client_ids + valid_clientdata.client_ids
  client_id_encodings = tf.lookup.StaticVocabularyTable(
      tf.lookup.KeyValueTensorInitializer(str_ids, ids),
      num_oov_buckets=1)

  preprocess_fn = so_preprocessing.create_preprocess_fn(
      word_vocab, tag_vocab, shuffle_buffer_size=1000)

  def to_embedding_id(client_id):
    return client_id_encodings.lookup(client_id)

  def parse_dataset(clientdata):
    def _create_dataset_with_id(client_id):
      client_number_id = to_embedding_id(client_id)
      def add_id(x):
        x['client_id'] = client_number_id
        return x

      # pylint: disable=protected-access
      return clientdata._create_dataset(client_id).take(
          max_element_per_client).map(add_id)

    client_ids = clientdata.client_ids
    nested_dataset = tf.data.Dataset.from_tensor_slices(client_ids)
    centralized_train = nested_dataset.flat_map(_create_dataset_with_id)

    centralized_train = preprocess_fn(centralized_train)
    print(centralized_train.element_spec)
    return centralized_train

  train_dataset = parse_dataset(train_clientdata).shuffle(
      shuffle_buffer_size).batch(batch_size)
  test_dataset = parse_dataset(test_clientdata).batch(batch_size).take(1000)

  model_builder = functools.partial(
      so_tagging_model.create_logistic_basis_model,
      vocab_tokens_size=vocab_size,
      vocab_tags_size=tag_size,
      num_basis=FLAGS.num_basis,
      )

  loss_builder = functools.partial(
      tf.keras.losses.BinaryCrossentropy,
      from_logits=False,
      reduction=tf.keras.losses.Reduction.SUM)

  metrics = [tf.keras.metrics.Precision(name='precision'),
             tf.keras.metrics.Recall(top_k=5, name='recall_at_5')]

  basisnet = model_builder()
  basisnet.summary()
  basisnet.compile(optimizer='adam', loss=loss_builder(), metrics=metrics)

  def _create_split_dataset_with_id(client_id):
    def add_id(x):
      x['client_id'] = to_embedding_id(client_id)
      return x

    # pylint: disable=protected-access
    client_ds = valid_clientdata._create_dataset(client_id).map(add_id)
    total_size = client_ds.reduce(0, lambda x, _: x + 1)

    num_elements_ten_percent = tf.cast((total_size - 1) / 10, dtype=tf.int64)
    num_elements_half = tf.cast((total_size - 1) / 2, dtype=tf.int64)

    train_set = client_ds.take(num_elements_half)
    train_split_set = client_ds.take(num_elements_ten_percent)
    test_set = client_ds.skip(num_elements_half)

    train_dataset = preprocess_fn(train_set).batch(batch_size)
    train_split_dataset = preprocess_fn(train_split_set).batch(batch_size)
    test_dataset = preprocess_fn(test_set).batch(batch_size)

    return train_dataset, train_split_dataset, test_dataset

  sample_client_ids = np.random.choice(valid_clientdata.client_ids,
                                       max_validation_client)

  def per_evaluation(basisnet, model_builder, fix_basis=True):
    all_clients_acc_before = []
    all_clients_acc = []
    all_clients_split_acc = []

    for clnt_id in sample_client_ids[:100]:
      local_basisnet = model_builder()
      local_basisnet.compile(
          optimizer='adam', loss=loss_builder(), metrics=metrics)
      local_basisnet.set_weights(basisnet.get_weights())

      if fix_basis:
        # only fine-tune the embedding
        logging.info('Fix basis')
        for layer in local_basisnet.layers:
          if layer.name != 'client_embedding':
            layer.trainable = False

        tf_clnt_id = tf.constant(clnt_id)
        # Avoid a long line
        datasets = _create_split_dataset_with_id(tf_clnt_id)
        train_dataset, train_split_dataset, test_dataset = datasets

      bf_acc = local_basisnet.evaluate(test_dataset)[-1]
      all_clients_acc_before.append(bf_acc)
      logging.info(bf_acc)

      local_basisnet.fit(
          train_dataset, epochs=10, verbose=0)
      all_clients_acc.append(local_basisnet.evaluate(test_dataset)[-1])

      local_basisnet.set_weights(basisnet.get_weights())
      local_basisnet.fit(
          train_split_dataset, epochs=10, verbose=0)
      # Fine-tune with a smaller split of the training data. Here is 20%.
      all_clients_split_acc.append(local_basisnet.evaluate(test_dataset)[-1])

    logging.info(all_clients_acc)
    logging.info(np.mean(all_clients_acc))

    logging.info(all_clients_split_acc)
    logging.info(np.mean(all_clients_split_acc))

    logging.info(all_clients_split_acc)
    logging.info(np.mean(all_clients_split_acc))

  for ep in range(2):
    basisnet.fit(
        train_dataset,
        epochs=1,
        validation_data=test_dataset,
        steps_per_epoch=steps_per_epoch
    )
    basisnet.save_weights(
        FLAGS.modeldir+'/so_tagging%s_basis_%d_ep%d.ckpt' %
        (FLAGS.experiment_name, FLAGS.num_basis, ep))
    per_evaluation(basisnet, model_builder, fix_basis=True)
Пример #59
0
    def __init__(self, name, address, reset=False, **kwargs):
        super().__init__(name, address, terminator='\n', **kwargs)

        self._trigger_sent = False

        # Unfortunately the strings have to contain quotation marks and a
        # newline character, as this is how the instrument returns it.
        self._mode_map = {
            'ac current': '"CURR:AC"',
            'dc current': '"CURR:DC"',
            'ac voltage': '"VOLT:AC"',
            'dc voltage': '"VOLT:DC"',
            '2w resistance': '"RES"',
            '4w resistance': '"FRES"',
            'temperature': '"TEMP"',
            'frequency': '"FREQ"',
        }

        self.add_parameter('mode',
                           get_cmd='SENS:FUNC?',
                           set_cmd="SENS:FUNC {}",
                           val_mapping=self._mode_map)

        # Mode specific parameters
        self.add_parameter('nplc',
                           get_cmd=partial(self._get_mode_param, 'NPLC',
                                           float),
                           set_cmd=partial(self._set_mode_param, 'NPLC'),
                           vals=Numbers(min_value=0.01, max_value=10))

        # TODO: validator, this one is more difficult since different modes
        # require different validation ranges
        self.add_parameter('range',
                           get_cmd=partial(self._get_mode_param, 'RANG',
                                           float),
                           set_cmd=partial(self._set_mode_param, 'RANG'),
                           vals=Numbers())

        self.add_parameter('auto_range_enabled',
                           get_cmd=partial(self._get_mode_param, 'RANG:AUTO',
                                           parse_output_bool),
                           set_cmd=partial(self._set_mode_param, 'RANG:AUTO'),
                           vals=Bool())

        self.add_parameter('digits',
                           get_cmd=partial(self._get_mode_param, 'DIG', int),
                           set_cmd=partial(self._set_mode_param, 'DIG'),
                           vals=Ints(min_value=4, max_value=7))

        self.add_parameter('averaging_type',
                           get_cmd=partial(self._get_mode_param, 'AVER:TCON',
                                           parse_output_string),
                           set_cmd=partial(self._set_mode_param, 'AVER:TCON'),
                           vals=Enum('moving', 'repeat'))

        self.add_parameter('averaging_count',
                           get_cmd=partial(self._get_mode_param, 'AVER:COUN',
                                           int),
                           set_cmd=partial(self._set_mode_param, 'AVER:COUN'),
                           vals=Ints(min_value=1, max_value=100))

        self.add_parameter('averaging_enabled',
                           get_cmd=partial(self._get_mode_param, 'AVER:STAT',
                                           parse_output_bool),
                           set_cmd=partial(self._set_mode_param, 'AVER:STAT'),
                           vals=Bool())

        # Global parameters
        self.add_parameter('display_enabled',
                           get_cmd='DISP:ENAB?',
                           get_parser=parse_output_bool,
                           set_cmd='DISP:ENAB {}',
                           set_parser=int,
                           vals=Bool())

        self.add_parameter('trigger_continuous',
                           get_cmd='INIT:CONT?',
                           get_parser=parse_output_bool,
                           set_cmd='INIT:CONT {}',
                           set_parser=int,
                           vals=Bool())

        self.add_parameter('trigger_count',
                           get_cmd='TRIG:COUN?',
                           get_parser=int,
                           set_cmd='TRIG:COUN {}',
                           vals=MultiType(Ints(min_value=1, max_value=9999),
                                          Enum('inf',
                                               'default',
                                               'minimum',
                                               'maximum')))

        self.add_parameter('trigger_delay',
                           get_cmd='TRIG:DEL?',
                           get_parser=float,
                           set_cmd='TRIG:DEL {}',
                           unit='s',
                           vals=Numbers(min_value=0, max_value=999999.999))

        self.add_parameter('trigger_source',
                           get_cmd='TRIG:SOUR?',
                           set_cmd='TRIG:SOUR {}',
                           val_mapping={
                               'immediate': 'IMM',
                               'timer': 'TIM',
                               'manual': 'MAN',
                               'bus': 'BUS',
                               'external': 'EXT',
                           })

        self.add_parameter('trigger_timer',
                           get_cmd='TRIG:TIM?',
                           get_parser=float,
                           set_cmd='TRIG:TIM {}',
                           unit='s',
                           vals=Numbers(min_value=0.001, max_value=999999.999))

        self.add_parameter('amplitude',
                           unit='arb.unit',
                           get_cmd=self._read_next_value)

        self.add_function('reset', call_cmd='*RST')

        if reset:
            self.reset()

        # Set the data format to have only ascii data without units and channels
        self.write('FORM:DATA ASCII')
        self.write('FORM:ELEM READ')

        self.connect_message()
Пример #60
0
class NotSupportedCliException(Exception):
    pass


def process_range(xpu, inp):
    start, end = map(int, inp)
    if start > end:
        end, start = start, end
    return map(lambda x: '{}{}'.format(xpu, x), range(start, end + 1))


REGEX = [
    (re.compile(r'^gpu(\d+)$'), lambda x: ['gpu%s' % x[0]]),
    (re.compile(r'^(\d+)$'), lambda x: ['gpu%s' % x[0]]),
    (re.compile(r'^gpu(\d+)-(?:gpu)?(\d+)$'),
     functools.partial(process_range, 'gpu')),
    (re.compile(r'^(\d+)-(\d+)$'), functools.partial(process_range, 'gpu')),
]


def parse_devices(input_devices):
    """Parse user's devices input str to standard format.
    e.g. [gpu0, gpu1, ...]

    """
    ret = []
    for d in input_devices.split(','):
        for regex, func in REGEX:
            m = regex.match(d.lower().strip())
            if m:
                tmp = func(m.groups())