def store(cls, env, context_stack, chunk_size=1024): """Store the data within the given application context within the environment dict for later retrieval @param env the environment dict to be used for the soon-to-be-started process @param context_stack a ContextStack instance from which to store all data @param chunk_size the size of each chunk to be stored within the environment""" source = cls._encode(context_stack.settings().data()) if source: sc = StringChunker() keys = sc.split(source, chunk_size, env) env[cls.storage_environment_variable] = cls.key_sep.join(keys) # end handle source too big to be stored # store process data as well cls._store_yaml_data(cls.process_information_environment_variable, env, context_stack.settings().value_by_schema(process_schema)) # Store ConfigHierarchy hashmap for restoring it later # merge and store hash_map = OrderedDict() for einstance in context_stack.stack(): if isinstance(einstance, StackAwareHierarchicalContext): hash_map.update(einstance.hash_map()) # end update hash_map # end for each env on stack # Always store it, even if empty env[cls.config_file_hash_map_environment_variable] = cls._encode(hash_map)
def test_diff(self): log = logging.getLogger("bkvstore") existing_value = None new_value = OrderedDict( [("project_root", OrderedDict([("fs_path", "/mnt/projects")])), ("python_libs", "/foo/bar")] ) delegate = KeyValueStoreModifierDiffDelegate("", log) TwoWayDiff().diff(delegate, existing_value, new_value) # NOTE: These can loose their ordering ! # For some reason, this only really happens in py3, which as an entirely different yaml/ordereddict # implmementation by_key = lambda k: k[0] assert sorted(delegate.result().items(), key=by_key) == sorted(new_value.items(), key=by_key)
class StackAwareHierarchicalContext(HierarchicalContext): """A context which will assure a configuration file is never loaded twice. This can happen if paths have common roots, which is the case almost always. To prevent duplicate loads, which in turn may yield somewhat unexpected application settings, this implementation uses the current applications stack to find other Contexts of our type. """ __slots__ = ('_hash_map', '_app') def __init__(self, directory, application=None, **kwargs): """Initialize this instance. Additionally, you may specify the application to use. If unspecified, the global one will be used instead""" super(StackAwareHierarchicalContext, self).__init__(directory, **kwargs) self._hash_map = OrderedDict() self._app = application def _iter_application_contexts(self): """@return iterator yielding environments of our type on the stack, which are not us""" for ctx in (self._app or bapp.main()).context().stack(): # we should be last, but lets not assume that if ctx is self or not isinstance(ctx, StackAwareHierarchicalContext): continue yield ctx # end for each environment def _filter_files(self, files): """@note our implementation will compare file hashes in our own hash map with ones of other instances of this type on the stack to assure we don't accidentally load the same file @note This method will update our _hash_map member""" # NOTE: it's important to stay within the ascii range (thus hexdigest()), as this mep at some # point gets encoded. In py2, there's just bytes, in py3, it will be tempted to interpret these # as strings, without having a chance to find a suitable encoding for config_file in files: self._hash_map[hashlib.md5(open(config_file, 'rb').read()).hexdigest()] = config_file # end for each file # subtract all existing hashes our_files = set(self._hash_map.keys()) for env in self._iter_application_contexts(): our_files -= set(env._hash_map.keys()) # end for each environment # return all remaining ones # Make sure we don't change the sorting order ! return list(self._hash_map[key] for key in self._hash_map if key in our_files) # ------------------------- # @name Interface # @{ def hash_map(self): """@return a dictionary of a mapping of md5 binary strings to the path of the loaded file""" return self._hash_map
def construct_yaml_map(self, node): data = OrderedDict() yield data value = self.construct_mapping(node) data.update(value)
def test_settings(self, rw_dir): """Tests for settings and JSon serializer in the same moment""" target = rw_dir / 'foo' + PersistentSettings.StreamSerializerType.file_extension assert not target.isfile() schema_dict = OrderedDict() schema = KeyValueStoreSchema(RootKey, { 'foo' : dict(baz = int, foz = float,), 'bar' : str, 'ordered' : list,}) data = OrderedDict({ 'foo' : OrderedDict(dict(baz = 5, foz = 4.0)), 'bar' : 'hello', 'ordered' : [1, 2]}) # Data could come from an actual kvstore if we like settings = PersistentSettings(data, target, take_ownership = False) data.bar = 5 assert settings.data().bar != data.bar, 'should have made a copy' settings_data = settings.value_by_schema(schema) data.bar = 'hello' assert settings_data.bar == data.bar assert settings_data.ordered == data.ordered settings_data.bar = 5 settings_data.foo.baz = 10 # note the incorrect type ! Its tranformed to a string settings_data.ordered = [3, 4, 5] settings.set_value_by_schema(schema, settings_data) assert settings.changes() assert 'foz' not in settings.changes().foo assert settings.save_changes(open(target, 'w')) is settings, "It should be easy to save changes" assert settings.settings_data(open(target)), "should have something to work with" settings_data = settings.value_by_schema(schema) prev_size = target.stat().st_size assert target.isfile() and prev_size, "Should have created file by now" # changes should still be there, and file should be identical assert settings.changes() and settings.save_changes(open(target, 'w')) assert target.stat().st_size == prev_size # Now we pretend to be a in a new session and want to reload our settings new_settings = PersistentSettings(data, target, take_ownership = False) new_settings_data = new_settings.value_by_schema(schema) # NOTE: order is lost when diffing ! we use sets there ... . For now no problem # assert new_settings_data == settings_data assert new_settings_data.foo.baz == settings_data.foo.baz assert new_settings_data.bar == settings_data.bar assert new_settings_data.ordered == settings_data.ordered # Changes should be equivalent nsc = new_settings.changes() osc = settings.changes() assert set(nsc.keys()) == set(osc.keys()), "Order seems to change, but not the contents" assert nsc.foo == osc.foo assert nsc.bar == osc.bar assert nsc.ordered == osc.ordered # Now, instead of a space save, do a full one, which will cause all data to be written unconditionally. # This will forever override every base value settings.save_changes(open(target, 'w'), sparse=False) assert target.stat().st_size > prev_size # When reading it back, it should still be exactly the same, except that we will never see changes coming # through from the base new_settings = PersistentSettings(data, target, take_ownership = False) # we faithfully assume the other fields match as well assert new_settings.changes().ordered == settings.changes().ordered
def __init__(self, directory, application=None, **kwargs): """Initialize this instance. Additionally, you may specify the application to use. If unspecified, the global one will be used instead""" super(StackAwareHierarchicalContext, self).__init__(directory, **kwargs) self._hash_map = OrderedDict() self._app = application