def load_and_merge_safely(path_or_stream): """Load a file and merge the result using our delegate""" try: # YES: THEY RETURN NONE IF THERE WAS NOTHING, INSTEAD OF DICT. GOD DAMNED ! Interface change ! stream = path_or_stream stream_path = None if not hasattr(path_or_stream, 'read'): stream_path = path_or_stream stream = open(path_or_stream, 'rb') # end open stream as needed data = stream.read() use_cache = self._use_cache() if use_cache: cache_file = cache_base / \ hashlib.md5(isinstance(data, str) and data.encode(DEFAULT_ENCODING) or data).hexdigest() # end if isinstance(data, bytes): # usually, this would be the case, but we don't always open the stream ourselves data = data.decode(DEFAULT_ENCODING) # end try: if not use_cache: raise OSError # end data = pickle.load(open(cache_file, 'rb')) except (OSError, IOError): data = streamer.deserialize(PyStringIO(data)) if use_cache: open(cache_file, 'wb').write(pickle.dumps(data)) # end handle minimal IO caches if hasattr(stream, 'close'): stream.close() # end handle stream close # Add the path of the loaded configuration to allow referencing it in configuration. # This allows configuration to be relative to the configuration file ! if stream_path and self.store_settings_paths: kvpath = KVPath(stream_path.realpath()) data.setdefault(stream_path.ext()[1:], dict())[stream_path.namebase()] = kvpath # end place anchor except (OSError, IOError): self.log.error("Could not load %s file at '%s'", streamer.file_extension, path_or_stream, exc_info=True) return except Exception: self.log.error("Invalid %s file at '%s'", streamer.file_extension, path_or_stream, exc_info=True) return # end handle exceptions # only in the first run, we have no result as basis yet self.log.debug("loaded and merged %s file '%s'", streamer.file_extension, path_or_stream) base = delegate.result() if base is NoValue: base = self.KeyValueStoreModifierDiffDelegateType.DictType() # end set base self.TwoWayDiffAlgorithmType().diff(delegate, base, data)
def graphite_submit(carbon_host, sample_list, port=CARBON_PORT): """Send the given sample_list to the given carbon_host @param carbon_host sufficiently qualified host name or ip quatruple as string @param port to connect to, with a suitable default @param sample_list a list in the following format: [(path, (unix_timestamp, numeric))]""" # make sure payload doesn't get too big - therefore we will just chunk it up into 1000 items, allowing # each sample to be 1000 bytes max_size = (2**20) - 100 cs = 1000 for cursor in xrange(0, len(sample_list), cs): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((socket.gethostbyname(carbon_host), port)) try: payload = pickle.dumps(sample_list[cursor:cursor+cs]) message = pack('!L', len(payload)) + payload sock.sendall(message) finally: sock.close()
def _encode(cls, data): """@return encoded version of data, suitable to be stored in the environment""" # make sure we pickle with protocol 2, to allow running python3 for bootstrap, # which launches python2 # We also have to be sure it's a string object, in order to be working in an environment dict return binascii.b2a_base64(zlib.compress(pickle.dumps(data, 2), 9)).decode()