def from_config(cls, config_dict, type_str, uuid, merge_default=True): """ Instantiate a new instance of this class given the desired type, uuid, and JSON-compliant configuration dictionary. :param type_str: Type of descriptor. This is usually the name of the content descriptor that generated this vector. :type type_str: str :param uuid: Unique ID reference of the descriptor. :type uuid: collections.Hashable :param config_dict: JSON compliant dictionary encapsulating a configuration. :type config_dict: dict :param merge_default: Merge the given configuration on top of the default provided by ``get_default_config``. :type merge_default: bool :return: Constructed instance from the provided config. :rtype: DescriptorElement """ c = {} merge_dict(c, config_dict) c['type_str'] = type_str c['uuid'] = uuid return super(DescriptorElement, cls).from_config(c, merge_default)
def test_get_config(self): self.assertEqual( MemoryDescriptorIndex().get_config(), MemoryDescriptorIndex.get_default_config() ) self.assertEqual( MemoryDescriptorIndex(None).get_config(), MemoryDescriptorIndex.get_default_config() ) empty_elem = DataMemoryElement() self.assertEqual( MemoryDescriptorIndex(empty_elem).get_config(), merge_dict(MemoryDescriptorIndex.get_default_config(), { 'cache_element': {'type': 'DataMemoryElement'} }) ) dict_pickle_bytes = pickle.dumps({1: 1, 2: 2, 3: 3}, -1) cache_elem = DataMemoryElement(bytes=dict_pickle_bytes) self.assertEqual( MemoryDescriptorIndex(cache_elem).get_config(), merge_dict(MemoryDescriptorIndex.get_default_config(), { 'cache_element': { 'DataMemoryElement': { 'bytes': dict_pickle_bytes }, 'type': 'DataMemoryElement' } }) )
def from_config(cls, config_dict, type_name, uuid, merge_default=True): """ Instantiate a new instance of this class given the configuration JSON-compliant dictionary encapsulating initialization arguments. This method should not be called via super unless and instance of the class is desired. :param config_dict: JSON compliant dictionary encapsulating a configuration. :type config_dict: dict :param type_name: Name of the type of classifier this classification was generated by. :type type_name: str :param uuid: Unique ID reference of the classification :type uuid: collections.Hashable :param merge_default: Merge the given configuration on top of the default provided by ``get_default_config``. :type merge_default: bool :return: Constructed instance from the provided config. :rtype: ClassificationElement """ c = {} merge_dict(c, config_dict) c['type_name'] = type_name c['uuid'] = uuid return super(ClassificationElement, cls).from_config(c, merge_default)
def load_config(config_path, defaults=None): """ Load the JSON configuration dictionary from the specified filepath. If the given path does not point to a valid file, we return an empty dictionary or the default dictionary if one was provided, returning False as our second return argument. :param config_path: Path to the (valid) JSON configuration file. :type config_path: str :param defaults: Optional default configuration dictionary to merge loaded configuration into. If provided, it will be modified in place. :type defaults: dict | None :return: The result configuration dictionary and if we successfully loaded a JSON dictionary from the given filepath. :rtype: (dict, bool) """ if defaults is None: defaults = {} loaded = False if config_path and os.path.isfile(config_path): with open(config_path) as cf: merge_dict(defaults, json.load(cf)) loaded = True return defaults, loaded
def test_nested(self): a = { 'a': 1, 'b': { 'c': 2, 'd': { 'e': 3 }, }, 'f': { 'g': 4, 'h': { 'i': 5 } }, } b = {'b': {'c': 6}, 'f': {'h': {'i': 7}}, 'j': 8} expected = { 'a': 1, 'b': { 'c': 6, 'd': { 'e': 3 }, }, 'f': { 'g': 4, 'h': { 'i': 7 } }, 'j': 8, } merge_dict(a, b) ntools.assert_equal(a, expected)
def from_config(cls, config_dict, merge_default=True): """ Instantiate a new instance of this class given the configuration JSON-compliant dictionary encapsulating initialization arguments. This method should not be called via super unless and instance of the class is desired. :param config_dict: JSON compliant dictionary encapsulating a configuration. :type config_dict: dict :param merge_default: Merge the given configuration on top of the default provided by ``get_default_config``. :type merge_default: bool :return: Constructed instance from the provided config. :rtype: MRPTNearestNeighborsIndex """ if merge_default: cfg = cls.get_default_config() merge_dict(cfg, config_dict) else: cfg = config_dict cfg['descriptor_set'] = \ plugin.from_plugin_config(cfg['descriptor_set'], get_descriptor_index_impls()) return super(MRPTNearestNeighborsIndex, cls).from_config(cfg, False)
def make_response_json(message, return_code=200, **params): """ Basic message constructor for returning JSON from a flask routing function :param message: String descriptive message to send back. :type message: str :param return_code: HTTP return code for this message. Default is 200. :type return_code: int :param params: Other key-value data to include in response JSON. :type params: JSON-compliant :return: Flask response and HTTP status code pair. :rtype: (flask.Response, int) """ r = { "message": message, "time": { "unix": time.time(), "utc": time.asctime(time.gmtime()), } } merge_dict(r, params) return flask.jsonify(**r), return_code
def get_default_config(cls): """ Generate and return a default configuration dictionary for this class. This will be primarily used for generating what the configuration dictionary would look like for this class without instantiating it. :return: Default configuration dictionary for the class. :rtype: dict """ c = super(NearestNeighborServiceServer, cls).get_default_config() merge_dict( c, { "descriptor_factory": DescriptorElementFactory.get_default_config(), "descriptor_generator": plugin.make_config(get_descriptor_generator_impls()), "nn_index": plugin.make_config(get_nn_index_impls()), "descriptor_index": plugin.make_config(get_descriptor_index_impls()), "update_descriptor_index": False, }) return c
def from_config(cls, config_dict, merge_default=True): """ Instantiate a new instance of this class given the configuration JSON-compliant dictionary encapsulating initialization arguments. This method should not be called via super unless and instance of the class is desired. :param config_dict: JSON compliant dictionary encapsulating a configuration. :type config_dict: dict :param merge_default: Merge the given configuration on top of the default provided by ``get_default_config``. :type merge_default: bool :return: Constructed instance from the provided config. :rtype: Configurable """ # The simple case is that the class doesn't require any special # parameters other than those that can be provided via the JSON # specification, which we cover here. If an implementation needs # something more special, they can override this function. if merge_default: merged_config = cls.get_default_config() merge_dict(merged_config, config_dict) config_dict = merged_config return cls(**config_dict)
def test_get_config(self): ntools.assert_equal(MemoryDescriptorIndex().get_config(), MemoryDescriptorIndex.get_default_config()) ntools.assert_equal( MemoryDescriptorIndex(None).get_config(), MemoryDescriptorIndex.get_default_config()) empty_elem = DataMemoryElement() ntools.assert_equal( MemoryDescriptorIndex(empty_elem).get_config(), merge_dict(MemoryDescriptorIndex.get_default_config(), {'cache_element': { 'type': 'DataMemoryElement' }})) dict_pickle_bytes = pickle.dumps({1: 1, 2: 2, 3: 3}, -1) cache_elem = DataMemoryElement(bytes=dict_pickle_bytes) ntools.assert_equal( MemoryDescriptorIndex(cache_elem).get_config(), merge_dict( MemoryDescriptorIndex.get_default_config(), { 'cache_element': { 'DataMemoryElement': { 'bytes': dict_pickle_bytes }, 'type': 'DataMemoryElement' } }))
def from_config(cls, config_dict, merge_default=True): """ Instantiate a new instance of this class given the configuration JSON-compliant dictionary encapsulating initialization arguments. This method should not be called via super unless and instance of the class is desired. :param config_dict: JSON compliant dictionary encapsulating a configuration. :type config_dict: dict :param merge_default: Merge the given configuration on top of the default provided by ``get_default_config``. :type merge_default: bool :return: Constructed instance from the provided config. :rtype: ClassificationElementFactory """ if merge_default: mc = cls.get_default_config() merge_dict(mc, config_dict) config_dict = mc return ClassificationElementFactory( get_classification_element_impls()[config_dict['type']], config_dict[config_dict['type']] )
def from_config(cls, config_dict, merge_default=True): """ Instantiate a new instance of this class given the configuration JSON-compliant dictionary encapsulating initialization arguments. This method should not be called via super unless and instance of the class is desired. :param config_dict: JSON compliant dictionary encapsulating a configuration. :type config_dict: dict :param merge_default: Merge the given configuration on top of the default provided by ``get_default_config``. :type merge_default: bool :return: Constructed instance from the provided config. :rtype: DescriptorElementFactory """ if merge_default: merged_config = cls.get_default_config() merge_dict(merged_config, config_dict) config_dict = merged_config return DescriptorElementFactory( get_descriptor_element_impls()[config_dict['type']], config_dict[config_dict['type']])
def get_config(self): c = merge_dict(self.get_default_config(), { "pickle_protocol": self.pickle_protocol, }) if self.cache_element: merge_dict(c['cache_element'], plugin.to_plugin_config(self.cache_element)) return c
def get_default_config(cls): c = super(IqrService, cls).get_default_config() c_rel_index = plugin.make_config( get_relevancy_index_impls() ) merge_dict(c_rel_index, iqr_session.DFLT_REL_INDEX_CONFIG) merge_dict(c, { "iqr_service": { "positive_seed_neighbors": 500, "plugin_notes": { "relevancy_index_config": "The relevancy index config provided should not have " "persistent storage configured as it will be used in " "such a way that instances are created, built and " "destroyed often.", "descriptor_index": "This is the index from which given positive and " "negative example descriptors are retrieved from. " "Not used for nearest neighbor querying. " "This index must contain all descriptors that could " "possibly be used as positive/negative examples and " "updated accordingly.", "neighbor_index": "This is the neighbor index to pull initial near-" "positive descriptors from.", "classifier_config": "The configuration to use for training and using " "classifiers for the /classifier endpoint. " "When configuring a classifier for use, don't fill " "out model persistence values as many classifiers " "may be created and thrown away during this service's " "operation.", "classification_factory": "Selection of the backend in which classifications " "are stored. The in-memory version is recommended " "because normal caching mechanisms will not account " "for the variety of classifiers that can potentially " "be created via this utility.", }, "plugins": { "relevancy_index_config": c_rel_index, "descriptor_index": plugin.make_config( get_descriptor_index_impls() ), "neighbor_index": plugin.make_config(get_nn_index_impls()), "classifier_config": plugin.make_config(get_classifier_impls()), "classification_factory": ClassificationElementFactory.get_default_config(), } } }) return c
def get_config(self): c = merge_dict(self.get_default_config(), { 'leaf_size': self.leaf_size, 'random_seed': self.random_seed, }) if self.cache_element: c['cache_element'] = merge_dict( c['cache_element'], plugin.to_plugin_config(self.cache_element)) return c
def make_response_json(message, **params): r = { "message": message, "time": { "unix": time.time(), "utc": time.asctime(time.gmtime()), } } merge_dict(r, params) return flask.jsonify(**r)
def from_config(cls, config_dict, merge_default=True): """ Override to just pass the configuration dictionary to constructor """ # Repeated from super method due to overriding how constructor is called if merge_default: merged = cls.get_default_config() merge_dict(merged, config_dict) config_dict = merged return cls(config_dict)
def get_default_config(cls): c = super(IqrService, cls).get_default_config() c_rel_index = plugin.make_config(get_relevancy_index_impls()) merge_dict(c_rel_index, iqr_session.DFLT_REL_INDEX_CONFIG) merge_dict( c, { "iqr_service": { "positive_seed_neighbors": 500, "plugin_notes": { "relevancy_index_config": "The relevancy index config provided should not have " "persistent storage configured as it will be used in " "such a way that instances are created, built and " "destroyed often.", "descriptor_index": "This is the index from which given positive and " "negative example descriptors are retrieved from. " "Not used for nearest neighbor querying. " "This index must contain all descriptors that could " "possibly be used as positive/negative examples and " "updated accordingly.", "neighbor_index": "This is the neighbor index to pull initial near-" "positive descriptors from.", "classifier_config": "The configuration to use for training and using " "classifiers for the /classifier endpoint. " "When configuring a classifier for use, don't fill " "out model persistence values as many classifiers " "may be created and thrown away during this service's " "operation.", "classification_factory": "Selection of the backend in which classifications " "are stored. The in-memory version is recommended " "because normal caching mechanisms will not account " "for the variety of classifiers that can potentially " "be created via this utility.", }, "plugins": { "relevancy_index_config": c_rel_index, "descriptor_index": plugin.make_config(get_descriptor_index_impls()), "neighbor_index": plugin.make_config(get_nn_index_impls()), "classifier_config": plugin.make_config(get_classifier_impls()), "classification_factory": ClassificationElementFactory.get_default_config(), } } }) return c
def train(self, class_examples=None, **kwds): """ Train the supervised classifier model. If a model is already loaded, we will raise an exception in order to prevent accidental overwrite. If the same label is provided to both ``class_examples`` and ``kwds``, the examples given to the reference in ``kwds`` will prevail. *NOTE:* **This abstract method provides generalized error checking and combines input mappings into a single dictionary which we return. Thus, this should be called via ``super`` in implementing methods.** :param class_examples: Dictionary mapping class labels to iterables of DescriptorElement training examples. :type class_examples: dict[collections.Hashable, collections.Iterable[smqtk.representation.DescriptorElement]] :param kwds: Keyword assignment of labels to iterables of DescriptorElement training examples. Keyword provided iterables are used in place of class iterables provided in ``class_examples`` when there are conflicting keys. :type kwds: dict[str, collections.Iterable[smqtk.representation.DescriptorElement]] :raises ValueError: There were no class examples provided. :raises ValueError: Less than 2 classes were given. :raises RuntimeError: A model already exists in this instance.Following through with training would overwrite this model. Throwing an exception for information protection. """ if self.has_model(): raise RuntimeError("Instance currently has a model. Halting " "training to prevent overwrite of existing " "trained model.") if class_examples is None: class_examples = {} merged = {} merge_dict(merged, class_examples) merge_dict(merged, kwds) if not merged: raise ValueError("No class examples were provided.") elif len(merged) < 2: raise ValueError("Need 2 or more classes for training. Given %d." % len(merged)) # TODO(paul.tunison): Check that the same values/descriptors are not # assigned to multiple labels? return merged
def from_config(cls, config_dict, merge_default=True): """ Instantiate a new instance of this class given the configuration JSON-compliant dictionary encapsulating initialization arguments. This method should not be called via super unless and instance of the class is desired. :param config_dict: JSON compliant dictionary encapsulating a configuration. :type config_dict: dict :param merge_default: Merge the given configuration on top of the default provided by ``get_default_config``. :type merge_default: bool :return: Constructed instance from the provided config. :rtype: LSHNearestNeighborIndex """ # Controlling merge here so we can control known comment stripping from # default config. if merge_default: merged = cls.get_default_config() merge_dict(merged, config_dict) else: merged = config_dict merged['lsh_functor'] = \ plugin.from_plugin_config(merged['lsh_functor'], get_lsh_functor_impls()) merged['descriptor_index'] = \ plugin.from_plugin_config(merged['descriptor_index'], get_descriptor_index_impls()) # Hash index may be None for a default at-query-time linear indexing if merged['hash_index'] and merged['hash_index']['type']: merged['hash_index'] = \ plugin.from_plugin_config(merged['hash_index'], get_hash_index_impls()) else: cls.get_logger().debug( "No HashIndex impl given. Passing ``None``.") merged['hash_index'] = None # remove possible comment added by default generator if 'hash_index_comment' in merged: del merged['hash_index_comment'] merged['hash2uuids_kvstore'] = \ plugin.from_plugin_config(merged['hash2uuids_kvstore'], get_key_value_store_impls()) return super(LSHNearestNeighborIndex, cls).from_config(merged, False)
def from_config(cls, config_dict, merge_default=True): """ Instantiate a new instance of this class given the configuration JSON-compliant dictionary encapsulating initialization arguments. This method should not be called via super unless and instance of the class is desired. :param config_dict: JSON compliant dictionary encapsulating a configuration. :type config_dict: dict :param merge_default: Merge the given configuration on top of the default provided by ``get_default_config``. :type merge_default: bool :return: Constructed instance from the provided config. :rtype: LSHNearestNeighborIndex """ if merge_default: cfg = cls.get_default_config() merge_dict(cfg, config_dict) else: cfg = config_dict cfg['descriptor_set'] = plugin.from_plugin_config( cfg['descriptor_set'], get_descriptor_index_impls() ) cfg['uid2idx_kvs'] = plugin.from_plugin_config( cfg['uid2idx_kvs'], get_key_value_store_impls() ) cfg['idx2uid_kvs'] = plugin.from_plugin_config( cfg['idx2uid_kvs'], get_key_value_store_impls() ) if (cfg['index_element'] and cfg['index_element']['type']): index_element = plugin.from_plugin_config( cfg['index_element'], get_data_element_impls()) cfg['index_element'] = index_element else: cfg['index_element'] = None if (cfg['index_param_element'] and cfg['index_param_element']['type']): index_param_element = plugin.from_plugin_config( cfg['index_param_element'], get_data_element_impls()) cfg['index_param_element'] = index_param_element else: cfg['index_param_element'] = None return super(FaissNearestNeighborsIndex, cls).from_config(cfg, False)
def from_config(cls, config_dict, merge_default=True): """ Instantiate a new instance of this class given the configuration JSON-compliant dictionary encapsulating initialization arguments. This method should not be called via super unless and instance of the class is desired. :param config_dict: JSON compliant dictionary encapsulating a configuration. :type config_dict: dict :param merge_default: Merge the given configuration on top of the default provided by ``get_default_config``. :type merge_default: bool :return: Constructed instance from the provided config. :rtype: LSHNearestNeighborIndex """ # Controlling merge here so we can control known comment stripping from # default config. if merge_default: merged = cls.get_default_config() merge_dict(merged, config_dict) else: merged = config_dict merged['lsh_functor'] = \ plugin.from_plugin_config(merged['lsh_functor'], get_lsh_functor_impls()) merged['descriptor_index'] = \ plugin.from_plugin_config(merged['descriptor_index'], get_descriptor_index_impls()) # Hash index may be None for a default at-query-time linear indexing if merged['hash_index'] and merged['hash_index']['type']: merged['hash_index'] = \ plugin.from_plugin_config(merged['hash_index'], get_hash_index_impls()) else: cls.get_logger().debug("No HashIndex impl given. Passing ``None``.") merged['hash_index'] = None # remove possible comment added by default generator if 'hash_index_comment' in merged: del merged['hash_index_comment'] merged['hash2uuids_kvstore'] = \ plugin.from_plugin_config(merged['hash2uuids_kvstore'], get_key_value_store_impls()) return super(LSHNearestNeighborIndex, cls).from_config(merged, False)
def test_subset_merge(self): a = { 'a': 1, 'b': 2, } b = {'a': 3} expected = { 'a': 3, 'b': 2, } merge_dict(a, b) ntools.assert_equal(a, expected)
def test_disjoint_update(self): a = { 'a': 1, 'b': 2, } b = {'c': 3} expected = { 'a': 1, 'b': 2, 'c': 3, } merge_dict(a, b) ntools.assert_equal(a, expected)
def get_default_config(cls): c = super(IqrSearchApp, cls).get_default_config() merge_dict(c, { "mongo": { "server": "127.0.0.1:27017", "database": "smqtk", }, # Each entry in this mapping generates a new tab in the GUI "iqr_tabs": [ IqrSearch.get_default_config(), ] }) return c
def get_default_config(cls): c = super(IqrSearchDispatcher, cls).get_default_config() merge_dict(c, { "mongo": { "server": "127.0.0.1:27017", "database": "smqtk", }, # Each entry in this mapping generates a new tab in the GUI "iqr_tabs": { "__default__": IqrSearch.get_default_config(), }, }) return c
def test_merge_dict_deepcopy(self): # dict merger with deepcopy merge_dict(self.a, self.b, deep_copy=True) nose.tools.assert_equal(self.a, self.expected) # set values that are mutable structures should be the same instances as # what's in ``b``. nose.tools.assert_equal(self.a['nested']['l'], self.b['nested']['l']) nose.tools.assert_is_not(self.a['nested']['l'], self.b['nested']['l']) nose.tools.assert_equal(self.a['nested']['even_deeper']['j'], self.b['nested']['even_deeper']['j']) nose.tools.assert_is_not(self.a['nested']['even_deeper']['j'], self.b['nested']['even_deeper']['j'])
def test_subset_merge(self): a = { 'a': 1, 'b': 2, } b = { 'a': 3 } expected = { 'a': 3, 'b': 2, } merge_dict(a, b) ntools.assert_equal(a, expected)
def train(self, class_examples=None, **kwds): """ Train the supervised classifier model. If a model is already loaded, we will raise an exception in order to prevent accidental overwrite. If the same label is provided to both ``class_examples`` and ``kwds``, the examples given to the reference in ``kwds`` will prevail. *NOTE:* **This abstract method provides generalized error checking and combines input mappings into a single dictionary which we return. Thus, this should be called via ``super`` in implementing methods.** :param class_examples: Dictionary mapping class labels to iterables of DescriptorElement training examples. :type class_examples: dict[collections.Hashable, collections.Iterable[smqtk.representation.DescriptorElement]] :param kwds: Keyword assignment of labels to iterables of DescriptorElement training examples. :type kwds: dict[str, collections.Iterable[smqtk.representation.DescriptorElement]] :raises ValueError: There were no class examples provided. :raises ValueError: Less than 2 classes were given. :raises RuntimeError: A model already exists in this instance.Following through with training would overwrite this model. Throwing an exception for information protection. """ if self.has_model(): raise RuntimeError("Instance currently has a model. Halting " "training to prevent overwrite of existing " "trained model.") if class_examples is None: class_examples = {} merged = {} merge_dict(merged, class_examples) merge_dict(merged, kwds) if not merged: raise ValueError("No class examples were provided.") elif len(merged) < 2: raise ValueError("Need 2 or more classes for training. Given %d", len(merged)) return merged
def test_merge_dict_shallow(self): # basic dict merger merge_dict(self.a, self.b) nose.tools.assert_equal(self.a, self.expected) # set values that are mutable structures should be the same instances as # what's in ``b``. nose.tools.assert_equal(self.a['nested']['l'], self.b['nested']['l']) nose.tools.assert_is(self.a['nested']['l'], self.b['nested']['l']) nose.tools.assert_equal(self.a['nested']['even_deeper']['j'], self.b['nested']['even_deeper']['j']) nose.tools.assert_is(self.a['nested']['even_deeper']['j'], self.b['nested']['even_deeper']['j'])
def test_disjoint_update(self): a = { 'a': 1, 'b': 2, } b = { 'c': 3 } expected = { 'a': 1, 'b': 2, 'c': 3, } merge_dict(a, b) ntools.assert_equal(a, expected)
def from_config(cls, config_dict, merge_default=True): """ Instantiate a new instance of this class given the configuration JSON-compliant dictionary encapsulating initialization arguments. :param config_dict: JSON compliant dictionary encapsulating a configuration. :type config_dict: dict :param merge_default: Merge the given configuration on top of the default provided by ``get_default_config``. :type merge_default: bool :return: Constructed instance from the provided config. :rtype: KVSDataSet """ if merge_default: config_dict = merge_dict(cls.get_default_config(), config_dict) # Convert KVStore config to instance for constructor. kvs_inst = plugin.from_plugin_config(config_dict['kvstore'], get_key_value_store_impls()) config_dict['kvstore'] = kvs_inst return super(KVSDataSet, cls).from_config(config_dict, False)
def test_partial_update(self): a = { 'a': 1, 'b': 2, } b = { 'a': 3, 'c': 4, } expected = { 'a': 3, 'b': 2, 'c': 4, } merge_dict(a, b) ntools.assert_equal(a, expected)
def from_config(cls, config_dict, merge_default=True): """ Instantiate a new instance of this class given the configuration JSON-compliant dictionary encapsulating initialization arguments. :param config_dict: JSON compliant dictionary encapsulating a configuration. :type config_dict: dict :param merge_default: Merge the given configuration on top of the default provided by ``get_default_config``. :type merge_default: bool :return: Constructed instance from the provided config. :rtype: MemoryDescriptorIndex """ if merge_default: config_dict = merge_dict(cls.get_default_config(), config_dict) # Optionally construct cache element from sub-config. if config_dict['cache_element'] \ and config_dict['cache_element']['type']: e = plugin.from_plugin_config(config_dict['cache_element'], get_data_element_impls()) config_dict['cache_element'] = e else: config_dict['cache_element'] = None return super(MemoryDescriptorIndex, cls).from_config(config_dict, False)
def from_config(cls, c, merge_default=True): """ Instantiate a new instance of this class given the configuration JSON-compliant dictionary encapsulating initialization arguments. This method should not be called via super unless an instance of the class is desired. :param c: JSON compliant dictionary encapsulating a configuration. :type c: dict :param merge_default: Merge the given configuration on top of the default provided by ``get_default_config``. :type merge_default: bool :return: Constructed instance from the provided config. :rtype: DataMemorySet """ if merge_default: c = merge_dict(cls.get_default_config(), c) cache_element = None if c['cache_element'] and c['cache_element']['type']: cache_element = plugin.from_plugin_config(c['cache_element'], get_data_element_impls()) c['cache_element'] = cache_element return super(DataMemorySet, cls).from_config(c, False)
def get_config(self): """ This implementation has no configuration properties. :return: JSON type compliant configuration dictionary. :rtype: dict """ c = merge_dict(self.get_default_config(), { "pickle_protocol": self.pickle_protocol, }) if self.cache_element: c['cache_element'] = merge_dict( c['cache_element'], plugin.to_plugin_config(self.cache_element)) return c
def test_merge_dict_deepcopy(self): # dict merger with deepcopy merge_dict(self.a, self.b, deep_copy=True) self.assertEqual(self.a, self.expected) # set values that are mutable structures should be the same instances as # what's in ``b``. self.assertEqual(self.a['nested']['l'], self.b['nested']['l']) self.assertIsNot(self.a['nested']['l'], self.b['nested']['l']) self.assertEqual(self.a['nested']['even_deeper']['j'], self.b['nested']['even_deeper']['j']) self.assertIsNot(self.a['nested']['even_deeper']['j'], self.b['nested']['even_deeper']['j'])
def get_config(self): c = self.get_default_config() if self.cache_element: c['cache_element'] = merge_dict(c['cache_element'], plugin.to_plugin_config( self.cache_element)) return c
def test_merge_dict_shallow(self): # basic dict merger merge_dict(self.a, self.b) self.assertEqual(self.a, self.expected) # set values that are mutable structures should be the same instances as # what's in ``b``. self.assertEqual(self.a['nested']['l'], self.b['nested']['l']) self.assertIs(self.a['nested']['l'], self.b['nested']['l']) self.assertEqual(self.a['nested']['even_deeper']['j'], self.b['nested']['even_deeper']['j']) self.assertIs(self.a['nested']['even_deeper']['j'], self.b['nested']['even_deeper']['j'])
def from_config(cls, config_dict, merge_default=True): """ Instantiate a new instance of this class given the configuration JSON-compliant dictionary encapsulating initialization arguments. This method should not be called via super unless an instance of the class is desired. :param config_dict: JSON compliant dictionary encapsulating a configuration. :type config_dict: dict :param merge_default: Merge the given configuration on top of the default provided by ``get_default_config``. :type merge_default: bool :return: Constructed instance from the provided config. :rtype: LinearHashIndex """ if merge_default: config_dict = merge_dict(cls.get_default_config(), config_dict) cache_element = None if config_dict['cache_element'] \ and config_dict['cache_element']['type']: cache_element = \ plugin.from_plugin_config(config_dict['cache_element'], get_data_element_impls()) config_dict['cache_element'] = cache_element return super(LinearHashIndex, cls).from_config(config_dict, False)
def from_config(cls, config_dict, merge_default=True): """ Instantiate a new instance of this class given the configuration JSON-compliant dictionary encapsulating initialization arguments. This method should not be called via super unless an instance of the class is desired. :param config_dict: JSON compliant dictionary encapsulating a configuration. :type config_dict: dict :param merge_default: Merge the given configuration on top of the default provided by ``get_default_config``. :type merge_default: bool :return: Constructed instance from the provided config. :rtype: SkLearnBallTreeHashIndex """ if merge_default: config_dict = merge_dict(cls.get_default_config(), config_dict) # Parse ``cache_element`` configuration if set. cache_element = None if config_dict['cache_element'] and \ config_dict['cache_element']['type']: cache_element = \ plugin.from_plugin_config(config_dict['cache_element'], get_data_element_impls()) config_dict['cache_element'] = cache_element return super(SkLearnBallTreeHashIndex, cls).from_config(config_dict, False)
def get_default_config(cls): """ Generate and return a default configuration dictionary for this class. This will be primarily used for generating what the configuration dictionary would look like for this class without instantiating it. :return: Default configuration dictionary for the class. :rtype: dict """ c = super(DescriptorServiceServer, cls).get_default_config() merge_dict(c, { "descriptor_factory": DescriptorElementFactory.get_default_config(), "descriptor_generators": { "example": plugin.make_config(get_descriptor_generator_impls()) } }) return c
def test_overrides(self): a = { 'a': 1, 'b': 2, } b = { 'b': { 'c': 3 }, } expected = { 'a': 1, 'b': { 'c': 3, } } merge_dict(a, b) ntools.assert_equal(a, expected)
def test_nested(self): a = { 'a': 1, 'b': { 'c': 2, 'd': { 'e': 3 }, }, 'f': { 'g': 4, 'h': { 'i': 5 } }, } b = { 'b': {'c': 6}, 'f': {'h': {'i': 7}}, 'j': 8 } expected = { 'a': 1, 'b': { 'c': 6, 'd': { 'e': 3 }, }, 'f': { 'g': 4, 'h': { 'i': 7 } }, 'j': 8, } merge_dict(a, b) ntools.assert_equal(a, expected)
def get_config(self): # If no cache elements (set to None), return default plugin configs. c = merge_dict(self.get_default_config(), { "bit_length": self.bit_length, "itq_iterations": self.itq_iterations, "normalize": self.normalize, "random_seed": self.random_seed, }) if self.mean_vec_cache_elem: c['mean_vec_cache'] = \ plugin.to_plugin_config(self.mean_vec_cache_elem) if self.rotation_cache_elem: c['rotation_cache'] = \ plugin.to_plugin_config(self.rotation_cache_elem) return c
def get_default_config(cls): """ Generate and return a default configuration dictionary for this class. It is not be guaranteed that the configuration dictionary returned from this method is valid for construction of an instance of this class. :return: Default configuration dictionary for the class. :rtype: dict """ c = super(KVSDataSet, cls).get_default_config() c['kvstore'] = merge_dict( plugin.make_config(get_key_value_store_impls()), plugin.to_plugin_config(c['kvstore']) ) return c
def test_configuration_with_caches(self): expected_mean_vec = numpy.array([1, 2, 3]) expected_rotation = numpy.eye(3) expected_mean_vec_bytes = BytesIO() # noinspection PyTypeChecker numpy.save(expected_mean_vec_bytes, expected_mean_vec) expected_mean_vec_bytes = expected_mean_vec_bytes.getvalue() expected_rotation_bytes = BytesIO() # noinspection PyTypeChecker numpy.save(expected_rotation_bytes, expected_rotation) expected_rotation_bytes = expected_rotation_bytes.getvalue() new_parts = { 'mean_vec_cache': { 'DataMemoryElement': { 'bytes': expected_mean_vec_bytes }, 'type': 'DataMemoryElement' }, 'rotation_cache': { 'DataMemoryElement': { 'bytes': expected_rotation_bytes }, 'type': 'DataMemoryElement' }, 'bit_length': 153, 'itq_iterations': 7, 'normalize': 2, 'random_seed': 58, } c = merge_dict(ItqFunctor.get_default_config(), new_parts) itq = ItqFunctor.from_config(c) # Checking that loaded parameters were correctly set and cache elements # correctly return intended vector/matrix. numpy.testing.assert_equal(itq.mean_vec, [1, 2, 3]) numpy.testing.assert_equal(itq.rotation, [[1, 0, 0], [0, 1, 0], [0, 0, 1]]) self.assertEqual(itq.bit_length, 153) self.assertEqual(itq.itq_iterations, 7) self.assertEqual(itq.normalize, 2) self.assertEqual(itq.random_seed, 58)