예제 #1
0
 def process_chunks(self, chunks):
     data = chunks[-1].data
     if len(data) > util.MAX_LINE_SIZE:
         msg = "Summary data exceeds maximum size of {}. Dropping it.".format(
             util.to_human_size(util.MAX_LINE_SIZE))
         wandb.termerror(msg, repeat=False)
         util.sentry_message(msg)
         return False
     return {"offset": 0, "content": [data]}
예제 #2
0
파일: file_stream.py 프로젝트: gampx/client
 def process_chunks(self, chunks):
     data = chunks[-1].data
     if len(data) > MAX_LINE_SIZE:
         msg = 'Summary data exceeds maximum size of {} bytes. Dropping it.'.format(
             MAX_LINE_SIZE)
         wandb.termerror(msg, repeat=False)
         util.sentry_message(msg)
         return False
     return {'offset': 0, 'content': [data]}
예제 #3
0
    def process_chunks(self, chunks):
        chunk_id = self._chunk_id
        self._chunk_id += len(chunks)
        chunk_data = []
        for chunk in chunks:
            if len(chunk.data) > MAX_LINE_SIZE:
                msg = 'Metric data exceeds maximum size of {} bytes. Dropping it.'.format(MAX_LINE_SIZE)
                wandb.termerror(msg, repeat=False)
                util.sentry_message(msg)
            else:
                chunk_data.append(chunk.data)

        return {
            'offset': chunk_id,
            'content': chunk_data,
        }
예제 #4
0
    def process_chunks(self, chunks):
        chunk_id = self._chunk_id
        # TODO: chunk_id is getting reset on each request...
        self._chunk_id += len(chunks)
        chunk_data = []
        for chunk in chunks:
            if len(chunk.data) > util.MAX_LINE_SIZE:
                msg = "Metric data exceeds maximum size of {} ({})".format(
                    util.to_human_size(util.MAX_LINE_SIZE),
                    util.to_human_size(len(chunk.data)),
                )
                wandb.termerror(msg, repeat=False)
                util.sentry_message(msg)
            else:
                chunk_data.append(chunk.data)

        return {
            "offset": chunk_id,
            "content": chunk_data,
        }