def process_chunks(self, chunks): data = chunks[-1].data if len(data) > util.MAX_LINE_SIZE: msg = "Summary data exceeds maximum size of {}. Dropping it.".format( util.to_human_size(util.MAX_LINE_SIZE)) wandb.termerror(msg, repeat=False) util.sentry_message(msg) return False return {"offset": 0, "content": [data]}
def process_chunks(self, chunks): chunk_id = self._chunk_id # TODO: chunk_id is getting reset on each request... self._chunk_id += len(chunks) chunk_data = [] for chunk in chunks: if len(chunk.data) > util.MAX_LINE_SIZE: msg = "Metric data exceeds maximum size of {} ({})".format( util.to_human_size(util.MAX_LINE_SIZE), util.to_human_size(len(chunk.data)), ) wandb.termerror(msg, repeat=False) util.sentry_message(msg) else: chunk_data.append(chunk.data) return { "offset": chunk_id, "content": chunk_data, }
def test_to_human_size(): assert util.to_human_size(1000, units=util.POW_2_BYTES) == "1000.0B" assert util.to_human_size(1000000, units=util.POW_2_BYTES) == "976.6KiB" assert util.to_human_size(5000000, units=util.POW_2_BYTES) == "4.8MiB" assert util.to_human_size(1000) == "1000.0B" assert util.to_human_size(1000000) == "1000.0KB" assert util.to_human_size(5000000) == "5.0MB"