def _handle_archive_key_entire(state, message, data): log = logging.getLogger("_handle_archive_key_entire") log.info( "%s %s %s %s" % (message["collection-id"], message["key"], message["timestamp-repr"], message["segment-num"]) ) sequence_num = 0 reply = { "message-type": "archive-key-final-reply", "client-tag": message["client-tag"], "message-id": message["message-id"], "result": None, "error-message": None, } if len(data) != message["segment-size"]: error_message = "size mismatch (%s != %s) %s %s %s %s" % ( len(data), message["segment-size"], message["collection-id"], message["key"], message["timestamp-repr"], message["segment-num"], ) log.error(error_message) state["event-push-client"].error("size-mismatch", error_message) reply["result"] = "size-mismatch" reply["error-message"] = "segment size does not match expected value" state["resilient-server"].send_reply(reply) return expected_segment_md5_digest = b64decode(message["segment-md5-digest"]) segment_md5 = hashlib.md5() segment_md5.update(data) if segment_md5.digest() != expected_segment_md5_digest: error_message = "md5 mismatch %s %s %s %s" % ( message["collection-id"], message["key"], message["timestamp-repr"], message["segment-num"], ) log.error(error_message) state["event-push-client"].error("md5-mismatch", error_message) reply["result"] = "md5-mismatch" reply["error-message"] = "segment md5 does not match expected value" state["resilient-server"].send_reply(reply) return source_node_id = state["node-id-dict"][message["source-node-name"]] if message["handoff-node-name"] is None: handoff_node_id = None else: handoff_node_id = state["node-id-dict"][message["handoff-node-name"]] state["writer"].start_new_segment( message["collection-id"], message["key"], message["unified-id"], message["timestamp-repr"], message["conjoined-part"], message["segment-num"], source_node_id, handoff_node_id, ) state["writer"].store_sequence( message["collection-id"], message["key"], message["unified-id"], message["timestamp-repr"], message["conjoined-part"], message["segment-num"], message["segment-size"], message["zfec-padding-size"], expected_segment_md5_digest, message["segment-adler32"], sequence_num, data, ) Statgrabber.accumulate("nimbusio_write_requests", 1) Statgrabber.accumulate("nimbusio_write_bytes", len(data)) state["writer"].finish_new_segment( message["collection-id"], message["unified-id"], message["timestamp-repr"], message["conjoined-part"], message["segment-num"], message["file-size"], message["file-adler32"], b64decode(message["file-hash"]), _extract_meta(message), ) reply["result"] = "success" state["resilient-server"].send_reply(reply)
reply["error-message"] = str(instance) state["resilient-server"].send_reply(reply) return segment_md5 = hashlib.md5() segment_md5.update(data_content) if segment_md5.digest() != str(sequence_row.hash): error_message = "md5 mismatch %s" % (state_key, ) log.error(error_message) state["event-push-client"].error("md5-mismatch", error_message) reply["result"] = "md5-mismatch" reply["error-message"] = "segment md5 does not match expected value" state["resilient-server"].send_reply(reply) return Statgrabber.accumulate('nimbusio_read_requests', 1) Statgrabber.accumulate('nimbusio_read_bytes', len(data_content)) state_entry = _retrieve_state_tuple( generator=sequence_generator, sequence_row_count=sequence_row_count, sequence_read_count=1, timeout=time.time() + _retrieve_timeout ) # save stuff we need to recall in state if state_entry.sequence_read_count == state_entry.sequence_row_count: reply["completed"] = True else: reply["completed"] = False state["active-requests"][state_key] = state_entry
def _handle_archive_key_next(state, message, data): log = logging.getLogger("_handle_archive_key_next") log.info( "%s %s %s %s" % (message["collection-id"], message["key"], message["timestamp-repr"], message["segment-num"]) ) reply = { "message-type": "archive-key-next-reply", "client-tag": message["client-tag"], "message-id": message["message-id"], "result": None, "error-message": None, } if len(data) != message["segment-size"]: error_message = "size mismatch (%s != %s) %s %s %s %s" % ( len(data), message["segment-size"], message["collection-id"], message["key"], message["timestamp-repr"], message["segment-num"], ) log.error(error_message) state["event-push-client"].error("size-mismatch", error_message) reply["result"] = "size-mismatch" reply["error-message"] = "segment size does not match expected value" state["resilient-server"].send_reply(reply) return expected_segment_md5_digest = b64decode(message["segment-md5-digest"]) segment_md5 = hashlib.md5() segment_md5.update(data) if segment_md5.digest() != expected_segment_md5_digest: error_message = "md5 mismatch %s %s %s %s" % ( message["collection-id"], message["key"], message["timestamp-repr"], message["segment-num"], ) log.error(error_message) state["event-push-client"].error("md5-mismatch", error_message) reply["result"] = "md5-mismatch" reply["error-message"] = "segment md5 does not match expected value" state["resilient-server"].send_reply(reply) return state["writer"].store_sequence( message["collection-id"], message["key"], message["unified-id"], message["timestamp-repr"], message["conjoined-part"], message["segment-num"], message["segment-size"], message["zfec-padding-size"], expected_segment_md5_digest, message["segment-adler32"], message["sequence-num"], data, ) Statgrabber.accumulate("nimbusio_write_requests", 1) Statgrabber.accumulate("nimbusio_write_bytes", len(data)) reply["result"] = "success" state["resilient-server"].send_reply(reply)
#!/usr/bin/python import Statgrabber import time timer = Statgrabber.start('simpleclient elapsed time') for x in range(0,5): Statgrabber.count('foo') for x in range(0,5): Statgrabber.average('bar',x) for x in range(0,5): # Test spaces, also Statgrabber.accumulate('b a z',x) time.sleep(1.0) timer.finish()