def create(cls, fd, file_hash_op):
        """Create a new timestamp log

        Writes the header appropriately.
        """
        ctx = StreamSerializationContext(fd)

        ctx.write_bytes(cls.HEADER_MAGIC)
        file_hash_op.serialize(ctx)

        return cls(fd, file_hash_op)
示例#2
0
    def create(cls, fd, file_hash_op):
        """Create a new timestamp log

        Writes the header appropriately.
        """
        ctx = StreamSerializationContext(fd)

        ctx.write_bytes(cls.HEADER_MAGIC)
        file_hash_op.serialize(ctx)

        return cls(fd, file_hash_op)
    def append(self, length, timestamp):
        """Add a new timestamp to the log"""
        if len(timestamp.msg) != self.file_hash_op.DIGEST_LENGTH:
            raise ValueError(
                "Timestamp msg length does not match expected digest length; %d != %d"
                % (len(timestamp.msg), self.file_hash_op.DIGEST_LENGTH)
            )

        with PacketWriter(self.fd) as packet_fd:
            ctx = StreamSerializationContext(packet_fd)
            ctx.write_varuint(length)
            ctx.write_bytes(timestamp.msg)
            timestamp.serialize(ctx)
示例#4
0
    def get_timestamp(self):
        commitment = self.path[len('/timestamp/'):]

        try:
            commitment = binascii.unhexlify(commitment)
        except binascii.Error:
            self.send_response(400)
            self.send_header('Content-type', 'text/plain')
            self.send_header('Cache-Control', 'public, max-age=31536000') # this will never not be an error!
            self.end_headers()
            self.wfile.write(b'commitment must be hex-encoded bytes')
            return

        try:
            timestamp = self.calendar[commitment]
        except KeyError:
            self.send_response(404)
            self.send_header('Content-type', 'text/plain')

            # Pending?
            reason = self.calendar.stamper.is_pending(commitment)
            if reason:
                reason = reason.encode()

                # The commitment is pending, so its status will change soonish
                # as blocks are found.
                self.send_header('Cache-Control', 'public, max-age=60')

            else:
                # The commitment isn't in this calendar at all. Clients only
                # get specific commitments from servers, so in the current
                # implementation there's no reason why this response would ever
                # change.
                #
                # FIXME: unfortunately, this isn't actually true, as the
                # stamper may return `Not Found` for a commitment that was just
                # added, as commitments aren't actually added directly to the
                # pending data structure, but rather, added to the journal and
                # only then added to pending. So for now, set a reasonably
                # short cache control header.
                #
                # See https://github.com/opentimestamps/opentimestamps-server/issues/10
                # for more info.
                self.send_header('Cache-Control', 'public, max-age=60')
                reason = b'Not found'

            self.end_headers()
            self.wfile.write(reason)
            return

        self.send_response(200)

        # Since only Bitcoin attestations are currently made, once a commitment
        # is timestamped by Bitcoin this response will never change.
        self.send_header('Cache-Control', 'public, max-age=3600')

        self.send_header('Content-type', 'application/octet-stream')
        self.end_headers()

        timestamp.serialize(StreamSerializationContext(self.wfile))
示例#5
0
    def get_timestamp(self):
        commitment = self.path[len('/timestamp/'):]

        try:
            commitment = binascii.unhexlify(commitment)
        except binascii.Error:
            self.send_response(400)
            self.send_header('Content-type', 'text/plain')
            self.send_header('Cache-Control', 'public, max-age=31536000') # this will never not be an error!
            self.end_headers()
            self.wfile.write(b'commitment must be hex-encoded bytes')
            return

        try:
            timestamp = self.calendar[commitment]
        except KeyError:
            self.send_response(404)
            self.send_header('Content-type', 'text/plain')
            self.send_header('Cache-Control', 'public, max-age=60')
            reason = b'Not found'

            self.end_headers()
            self.wfile.write(reason)
            return

        self.send_response(200)

        # Since only Bitcoin attestations are currently made, once a commitment
        # is timestamped by Bitcoin this response will never change.
        self.send_header('Cache-Control', 'public, max-age=3600')

        self.send_header('Content-type', 'application/octet-stream')
        self.end_headers()

        timestamp.serialize(StreamSerializationContext(self.wfile))
示例#6
0
def stamp_command(fd, args):
    # Create initial commitment ops for all files
    merkle_roots = []

    try:
        file_timestamp = DetachedTimestampFile.from_fd(OpSHA256(), fd)
    except OSError as exp:
        logging.error("Could not read %r: %s" % (fd.name, exp))
        return

    # Add nonce
    nonce_appended_stamp = file_timestamp.timestamp.ops.add(
        OpAppend(os.urandom(16)))
    merkle_root = nonce_appended_stamp.ops.add(OpSHA256())
    merkle_roots.append(merkle_root)
    merkle_tip = make_merkle_tree(merkle_roots)

    create_timestamp(merkle_tip, CALENDAR_URLS, parse_ots_args(args))

    try:
        with open("%s.ots" % fd.name, "wb") as timestamp_fd:
            ctx = StreamSerializationContext(timestamp_fd)
            file_timestamp.serialize(ctx)
    except IOError as exp:
        logger.error("Failed to create timestamp: %s" % exp)
        return
示例#7
0
    def __save(self, timestamp):
        if self.path is None:
            return

        # FIXME: should do this atomically
        path = self.__commitment_to_filename(timestamp.msg)
        os.makedirs(os.path.dirname(path), exist_ok=True)
        with open(self.__commitment_to_filename(timestamp.msg), 'wb') as stamp_fd:
            ctx = StreamSerializationContext(stamp_fd)
            timestamp.serialize(ctx)
示例#8
0
    def get_timestamp(self):
        commitment = self.path[len('/timestamp/'):]

        try:
            commitment = binascii.unhexlify(commitment)
        except binascii.Error:
            self.send_response(400)
            self.send_header('Content-type', 'text/plain')
            self.send_header(
                'Cache-Control',
                'public, max-age=31536000')  # this will never not be an error!
            self.end_headers()
            self.wfile.write(b'commitment must be hex-encoded bytes')
            return

        try:
            timestamp = self.calendar[commitment]
        except KeyError:
            self.send_response(404)
            self.send_header('Content-type', 'text/plain')

            # Pending?
            reason = self.calendar.stamper.is_pending(commitment)
            if reason:
                reason = reason.encode()

                # The commitment is pending, so its status will change soonish
                # as blocks are found.
                self.send_header('Cache-Control', 'public, max-age=60')

            else:
                # The commitment isn't in this calendar at all. Clients only
                # get specific commitments from servers, so in the current
                # implementation there's no reason why this response would ever
                # change.
                self.send_header('Cache-Control', 'public, max-age=3600')
                reason = b'Not found'

            self.end_headers()
            self.wfile.write(reason)
            return

        self.send_response(200)

        # Since only Bitcoin attestations are currently made, once a commitment
        # is timestamped by Bitcoin this response will never change.
        self.send_header('Cache-Control', 'public, max-age=3600')

        self.send_header('Content-type', 'application/octet-stream')
        self.end_headers()

        timestamp.serialize(StreamSerializationContext(self.wfile))
示例#9
0
    def append(self, length, timestamp):
        """Add a new timestamp to the log"""
        if len(timestamp.msg) != self.file_hash_op.DIGEST_LENGTH:
            raise ValueError(
                "Timestamp msg length does not match expected digest length; %d != %d"
                % (len(timestamp.msg), self.file_hash_op.DIGEST_LENGTH))

        with PacketWriter(self.fd) as packet_fd:
            ctx = StreamSerializationContext(packet_fd)
            ctx.write_varuint(length)
            ctx.write_bytes(timestamp.msg)
            timestamp.serialize(ctx)
示例#10
0
文件: ots.py 项目: TimeBags/timebags
def ots_stamp(file_list, min_resp=DEF_MIN_RESP, timeout=DEF_TIMEOUT):
    ''' stamp function '''

    merkle_roots = []
    file_timestamps = []

    for file_name in file_list:
        with open(file_name, 'rb') as file_handler:
            try:
                file_timestamp = DetachedTimestampFile.from_fd(OpSHA256(), file_handler)
            except OSError as exp:
                msg = "Could not read %r: %s" % (file_name, exp)
                logging.error(msg)
                raise

        # nonce
        nonce_appended_stamp = file_timestamp.timestamp.ops.add(OpAppend(os.urandom(16)))
        merkle_root = nonce_appended_stamp.ops.add(OpSHA256())

        merkle_roots.append(merkle_root)
        file_timestamps.append(file_timestamp)

    merkle_tip = make_merkle_tree(merkle_roots)

    calendar_urls = []
    calendar_urls.append('https://a.pool.opentimestamps.org')
    calendar_urls.append('https://b.pool.opentimestamps.org')
    calendar_urls.append('https://a.pool.eternitywall.com')
    calendar_urls.append('https://ots.btc.catallaxy.com')

    if not create_timestamp(merkle_tip, calendar_urls, min_resp, timeout):
        return False

    for (file_name, file_timestamp) in zip(file_list, file_timestamps):
        timestamp_file_path = file_name + '.ots'
        try:
            with open(timestamp_file_path, 'xb') as timestamp_fd:
                ctx = StreamSerializationContext(timestamp_fd)
                file_timestamp.serialize(ctx)
        except IOError as exp:
            msg = "Failed to create timestamp %r: %s" % (timestamp_file_path, exp)
            logging.error(msg)
            raise

    return True
示例#11
0
文件: ots.py 项目: TimeBags/timebags
def ots_upgrade(filename):
    ''' upgrade function '''

    msg = "Upgrading %s" % filename
    logging.debug(msg)

    try:
        with open(filename, 'rb') as old_stamp_fd:
            ctx = StreamDeserializationContext(old_stamp_fd)
            detached_timestamp = DetachedTimestampFile.deserialize(ctx)

    except IOError as exp:
        msg = "Could not read file %s: %s" % (filename, exp)
        logging.error(msg)
        raise
    except BadMagicError:
        msg = "Error! %r is not a timestamp file" % filename
        logging.error(msg)
        raise
    except DeserializationError as exp:
        msg = "Invalid timestamp file %r: %s" % (filename, exp)
        logging.error(msg)
        raise

    changed = upgrade_timestamp(detached_timestamp.timestamp)

    if changed:
        try:
            with open(old_stamp_fd.name, 'wb') as new_stamp_fd:
                ctx = StreamSerializationContext(new_stamp_fd)
                detached_timestamp.serialize(ctx)
        except IOError as exp:
            msg = "Could not upgrade timestamp %s: %s" % (old_stamp_fd.name, exp)
            logging.error(msg)
            raise

    if is_timestamp_complete(detached_timestamp.timestamp):
        logging.info("Success! Timestamp complete")
        return ('UPGRADED', get_attestations_list(detached_timestamp.timestamp))

    logging.warning("Failed! Timestamp not complete")
    return ('PENDING', None)
示例#12
0
    def post_digest(self):
        content_length = int(self.headers['Content-Length'])

        if content_length > self.MAX_DIGEST_LENGTH:
            self.send_response(400)
            self.send_header('Content-type', 'text/plain')
            self.end_headers()
            self.wfile.write(b'digest too long')
            return

        digest = self.rfile.read(content_length)

        timestamp = self.aggregator.submit(digest)

        self.send_response(200)
        self.send_header('Content-type', 'application/octet-stream')
        self.end_headers()

        ctx = StreamSerializationContext(self.wfile)
        timestamp.serialize(ctx)