def get_timestamp(self, commitment, timeout=None): """Get a timestamp for a given commitment Raises KeyError if the calendar doesn't have that commitment """ req = urllib.request.Request( self.url + '/timestamp/' + binascii.hexlify(commitment).decode('utf8'), headers=self.request_headers) try: with urllib.request.urlopen(req, timeout=timeout) as resp: if resp.status == 200: # FIXME: Not a particularly nice way of handling this, but it'll do # the job for now. resp_bytes = resp.read(10000) if len(resp_bytes) > 10000: raise Exception( "Calendar response exceeded size limit") ctx = BytesDeserializationContext(resp_bytes) return Timestamp.deserialize(ctx, commitment) else: raise Exception("Unknown response from calendar: %d" % resp.status) except urllib.error.HTTPError as exp: if exp.code == 404: raise CommitmentNotFoundError(get_sanitised_resp_msg(exp)) else: raise exp
def deserialize_ascii_armored_timestamp(git_commit, gpg_sig): stamp_start = gpg_sig.find(ASCII_ARMOR_HEADER) if stamp_start == -1: return (None, None, None) stamp_end = gpg_sig.find(b'\n' + ASCII_ARMOR_FOOTER) if stamp_end == -1: return (None, None, None) base64_encoded_stamp = gpg_sig[stamp_start + len(ASCII_ARMOR_HEADER):stamp_end] initial_msg = hash_signed_commit(git_commit, gpg_sig[0:stamp_start]) try: serialized_stamp = base64.standard_b64decode(base64_encoded_stamp) major_version = serialized_stamp[0] minor_version = serialized_stamp[1] if major_version != 1: logging.error("Can't verify timestamp; major version %d not known" % major_version) sys.exit(1) logging.debug("Git timestamp is version %d.%d" % (major_version, minor_version)) ctx = BytesDeserializationContext(serialized_stamp[2:]) timestamp = Timestamp.deserialize(ctx, initial_msg) return (major_version, minor_version, timestamp) except Exception as err: logging.error("Bad timestamp: %r" % err) return (None, None, None)
def __get_timestamp(self, msg): """Get a timestamp, non-recursively""" serialized_timestamp = self.db.Get(msg) ctx = BytesDeserializationContext(serialized_timestamp) timestamp = Timestamp(msg) for i in range(ctx.read_varuint()): attestation = TimeAttestation.deserialize(ctx) assert attestation not in timestamp.attestations timestamp.attestations.add(attestation) for i in range(ctx.read_varuint()): op = Op.deserialize(ctx) assert op not in timestamp.ops timestamp.ops.add(op) return timestamp
def from_db(self, d): self.path = d["path"] self.status = d["status"] self.agt = bytes.fromhex(d["agt"]) if d["agt"] else d["agt"] self.txid = d["txid"] self.block = d["block"] self.date = d["date"] self.detached_timestamp = DetachedTimestampFile.deserialize(BytesDeserializationContext(b64string_to_bytes(d["detached_timestamp"]))) post_deserialize(self.detached_timestamp.timestamp)
def bin_to_ots(self, proof_binary, filename): try: ctx = BytesDeserializationContext(proof_binary) self.detached_timestamp = DetachedTimestampFile.deserialize(ctx) except BadMagicError: self.dialog("Error! %r is not a timestamp file." % filename, "critical") except DeserializationError as exp: self.dialog("Invalid timestamp file %r: %s" % (filename, exp), "critical") except Exception as exp: # which errors occur here? self.dialog("Invalid file %r: %s" % (filename, exp), "critical")
def submit(self, digest, timeout=None): """Submit a digest to the calendar Returns a Timestamp committing to that digest """ req = urllib.request.Request(self.url + '/digest', data=digest, headers=self.request_headers) with urllib.request.urlopen(req, timeout=timeout) as resp: if resp.status != 200: raise Exception("Unknown response from calendar: %d" % resp.status) # FIXME: Not a particularly nice way of handling this, but it'll do # the job for now. resp_bytes = resp.read(10000) if len(resp_bytes) > 10000: raise Exception("Calendar response exceeded size limit") ctx = BytesDeserializationContext(resp_bytes) return Timestamp.deserialize(ctx, digest)
def bytes_to_kv_map(kv_bytes): ctx = BytesDeserializationContext(kv_bytes) new_kv_map = {} while True: try: key_len = ctx.read_varuint() key = ctx.read_bytes(key_len) value_len = ctx.read_varuint() value = ctx.read_bytes(value_len) new_kv_map[key] = value except TruncationError: break return new_kv_map
def loop(self): logging.info("Starting loop for %s" % self.calendar_url) try: logging.debug("Opening %s" % self.up_to_path) with open(self.up_to_path, 'r') as up_to_fd: last_known = int(up_to_fd.read().strip()) except FileNotFoundError as exp: last_known = -1 logging.info("Checking calendar " + str(self.calendar_url) + ", last_known commitment:" + str(last_known)) if self.btc_net == 'testnet': bitcoin.SelectParams('testnet') elif self.btc_net == 'regtest': bitcoin.SelectParams('regtest') while True: start_time = time.time() backup_url = urljoin(self.calendar_url, "/experimental/backup/%d" % (last_known + 1)) logging.debug("Asking " + str(backup_url)) try: r = requests.get(backup_url) except Exception as err: logging.error( "Exception asking %s error message %s, sleeping for %d seconds" % (str(backup_url), str(err), SLEEP_SECS)) time.sleep(SLEEP_SECS) continue if r.status_code != 200: logging.info("%s not found, sleeping for %d seconds" % (backup_url, SLEEP_SECS)) time.sleep(SLEEP_SECS) continue kv_map = Backup.bytes_to_kv_map(r.content) attestations = {} ops = {} for key, value in kv_map.items(): # print("--- key=" + b2x(key) + " value=" + b2x(value)) ctx = BytesDeserializationContext(value) for _a in range(ctx.read_varuint()): attestation = TimeAttestation.deserialize(ctx) attestations[key] = attestation for _b in range(ctx.read_varuint()): op = Op.deserialize(ctx) ops[key] = op proxy = bitcoin.rpc.Proxy() # Verify all bitcoin attestation are valid logging.debug("Total attestations: " + str(len(attestations))) for key, attestation in attestations.items(): if attestation.__class__ == BitcoinBlockHeaderAttestation: blockhash = proxy.getblockhash(attestation.height) block_header = proxy.getblockheader(blockhash) # the following raise an exception and block computation if the attestation does not verify attested_time = attestation.verify_against_blockheader( key, block_header) logging.debug("Verifying " + b2x(key) + " result " + str(attested_time)) # verify all ops connects to an attestation logging.debug("Total ops: " + str(len(ops))) for key, op in ops.items(): current_key = key current_op = op while True: next_key = current_op(current_key) if next_key in ops: current_key = next_key current_op = ops[next_key] else: break assert next_key in attestations batch = leveldb.WriteBatch() for key, value in kv_map.items(): batch.Put(key, value) self.db.db.Write(batch, sync=True) last_known = last_known + 1 try: with open(self.up_to_path, 'w') as up_to_fd: up_to_fd.write('%d\n' % last_known) except FileNotFoundError as exp: logging.error(str(exp)) break elapsed_time = time.time() - start_time logging.info("Took %ds for %s" % (elapsed_time, str(backup_url)))
def deserialize(file_timestamp_bytes): ctx = BytesDeserializationContext(file_timestamp_bytes) return DetachedTimestampFile.deserialize(ctx)