def open(cls, fd): """Open an existing timestamp log fd must be positioned at the start of the log; the header will be immediately read and DeserializationError raised if incorrect. """ ctx = StreamDeserializationContext(fd) actual_magic = ctx.read_bytes(len(cls.HEADER_MAGIC)) if cls.HEADER_MAGIC != actual_magic: raise opentimestamps.core.serialize.BadMagicError(cls.HEADER_MAGIC, actual_magic) file_hash_op = CryptOp.deserialize(ctx) return cls(fd, file_hash_op)
def open(cls, fd): """Open an existing timestamp log fd must be positioned at the start of the log; the header will be immediately read and DeserializationError raised if incorrect. """ ctx = StreamDeserializationContext(fd) actual_magic = ctx.read_bytes(len(cls.HEADER_MAGIC)) if cls.HEADER_MAGIC != actual_magic: raise BadMagicError(cls.HEADER_MAGIC, actual_magic) file_hash_op = CryptOp.deserialize(ctx) return cls(fd, file_hash_op)
def __iter__(self): """Iterate through all timestamps in the timestamp log""" while True: try: reader = PacketReader(self.fd) except PacketMissingError: break ctx = StreamDeserializationContext(reader) try: length = ctx.read_varuint() file_hash = ctx.read_bytes(self.file_hash_op.DIGEST_LENGTH) timestamp = Timestamp.deserialize(ctx, file_hash) yield (length, timestamp) except DeserializationError as exp: # FIXME: should provide a way to get insight into these errors pass
def __getitem__(self, commitment): if self.path is None: raise KeyError elif len(commitment) > 64: # FIXME: hack to avoid filename-too-long errors raise KeyError try: with open(self.__commitment_to_filename(commitment), 'rb') as stamp_fd: ctx = StreamDeserializationContext(stamp_fd) stamp = Timestamp.deserialize(ctx, commitment) return stamp except FileNotFoundError: raise KeyError
def ots_verify(filename_ots): ''' verify an ots file ''' try: with open(filename_ots, 'rb') as ots_fd: ctx = StreamDeserializationContext(ots_fd) detached_timestamp = DetachedTimestampFile.deserialize(ctx) except BadMagicError: msg = "Error! %r is not a timestamp file." % filename_ots logging.error(msg) raise except DeserializationError as exp: msg = "Invalid timestamp file %r: %s" % (filename_ots, exp) logging.error(msg) raise else: if not filename_ots.endswith('.ots'): logging.error('Timestamp filename does not end in .ots') raise Exception target_filename = filename_ots[:-4] msg = "Assuming target filename is %r" % target_filename logging.debug(msg) try: target_fd = open(target_filename, 'rb') except IOError as exp: msg = 'Could not open target: %s' % exp logging.error(msg) raise msg = "Hashing file, algorithm %s" % detached_timestamp.file_hash_op.TAG_NAME logging.debug(msg) actual_file_digest = detached_timestamp.file_hash_op.hash_fd(target_fd) target_fd.close() msg = "Got digest %s" % b2x(actual_file_digest) logging.debug(msg) if actual_file_digest != detached_timestamp.file_digest: msg = "Expected digest %s" % b2x(detached_timestamp.file_digest) logging.debug(msg) logging.error("File does not match original!") return ("CORRUPTED", None) good, results = verify_timestamp(detached_timestamp.timestamp) if good: return ("UPGRADED", results) return ("PENDING", None)
def ots_upgrade(filename): ''' upgrade function ''' msg = "Upgrading %s" % filename logging.debug(msg) try: with open(filename, 'rb') as old_stamp_fd: ctx = StreamDeserializationContext(old_stamp_fd) detached_timestamp = DetachedTimestampFile.deserialize(ctx) except IOError as exp: msg = "Could not read file %s: %s" % (filename, exp) logging.error(msg) raise except BadMagicError: msg = "Error! %r is not a timestamp file" % filename logging.error(msg) raise except DeserializationError as exp: msg = "Invalid timestamp file %r: %s" % (filename, exp) logging.error(msg) raise changed = upgrade_timestamp(detached_timestamp.timestamp) if changed: try: with open(old_stamp_fd.name, 'wb') as new_stamp_fd: ctx = StreamSerializationContext(new_stamp_fd) detached_timestamp.serialize(ctx) except IOError as exp: msg = "Could not upgrade timestamp %s: %s" % (old_stamp_fd.name, exp) logging.error(msg) raise if is_timestamp_complete(detached_timestamp.timestamp): logging.info("Success! Timestamp complete") return ('UPGRADED', get_attestations_list(detached_timestamp.timestamp)) logging.warning("Failed! Timestamp not complete") return ('PENDING', None)
def verify_command(fd, target_fd, args): ctx = StreamDeserializationContext(fd) try: detached_timestamp = DetachedTimestampFile.deserialize(ctx) except BadMagicError: logging.error("Error! %r is not a timestamp file." % fd.name) return False except DeserializationError as exp: logging.error("Invalid timestamp file %r: %s" % (fd.name, exp)) return False if target_fd is None: # Target not specified, so assume it's the same name as the # timestamp file minus the .ots extension. if not fd.name.endswith('.ots'): logging.error('Timestamp filename does not end in .ots') return False target_filename = fd.name[:-4] logging.info("Assuming target filename is %r" % target_filename) try: target_fd = open(target_filename, 'rb') except IOError as exp: logging.error('Could not open target: %s' % exp) return False logging.debug("Hashing file, algorithm %s" % detached_timestamp.file_hash_op.TAG_NAME) actual_file_digest = detached_timestamp.file_hash_op.hash_fd(target_fd) logging.debug("Got digest %s" % b2x(actual_file_digest)) if actual_file_digest != detached_timestamp.file_digest: logging.debug("Expected digest %s" % b2x(detached_timestamp.file_digest)) logging.error("File does not match original!") return False return verify_timestamp(detached_timestamp.timestamp, parse_ots_args(args))