Esempio n. 1
0
    def _calculate_hash(self, filename, **kwargs):
        """
        Calculates the hash of the file and the hash of the file + metadata (passed on the keywargs)
        """

        # hash to address
        with open(filename, 'rb') as f:
            file_hash = hashlib.md5(f.read()).hexdigest()

        if kwargs:
            data = str([unicode(value) for value in kwargs.itervalues()] + [file_hash])
        else:
            data = file_hash
        address_piece_with_metadata = unicode(pybitcointools.bin_to_b58check(pybitcointools.bin_hash160(data),
                                                                             magicbyte=self._magicbyte))
        address_piece = unicode(pybitcointools.bin_to_b58check(pybitcointools.bin_hash160(file_hash),
                                                               magicbyte=self._magicbyte))
        return address_piece, address_piece_with_metadata
Esempio n. 2
0
 def hash_as_address(self):
     data = str([
         unicode(self.title),
         unicode(self.artist_name),
         unicode(self.date_created),
         unicode(self.bitcoin_path),
         unicode(self.digital_work.hash),
     ])
     address = unicode(
         pybitcointools.bin_to_b58check(pybitcointools.bin_hash160(data)))
     return address
Esempio n. 3
0
    def _calculate_hash(self, filename, **kwargs):
        """
        Calculates the hash of the file and the hash of the file + metadata (passed on the keywargs)
        """

        # hash to address
        with open(filename, 'rb') as f:
            file_hash = hashlib.md5(f.read()).hexdigest()

        if kwargs:
            data = str([unicode(value)
                        for value in kwargs.itervalues()] + [file_hash])
        else:
            data = file_hash
        address_piece_with_metadata = unicode(
            pybitcointools.bin_to_b58check(pybitcointools.bin_hash160(data),
                                           magicbyte=self._magicbyte))
        address_piece = unicode(
            pybitcointools.bin_to_b58check(
                pybitcointools.bin_hash160(file_hash),
                magicbyte=self._magicbyte))
        return address_piece, address_piece_with_metadata
Esempio n. 4
0
    def hash_as_address(self):
        """
        @return
          btc_address -- address that has a hash of all the info about the piece.

        @notes

        Steps:
        1. Hash the data using sha256.
        2. The hash is hashed again using ripemd160*.
        3. The version byte (0x00 for main net) is added to the beginning.
        4. The checksum, which is the first 4 bytes of sha256(sha256(versioned_hash)))*,
         is added to the end.
        5. Finally, everything is converted to base58.

          * Note that when hashing is applied on hashes, the actual hash is being hashed,
            and not the hexadecimal representation of the hash.

        -The steps are from https://www.btproof.com/technical.html
        -Below, the steps are implemented with the help of pybitcointools.

        -The hash is *not* unique per edition, so that we can have a single bitcoin
         transaction to register > 1 pieces, all with the same hash.
        """
        assert 'placeholder' not in self.bitcoin_path
        data = str([
            unicode(self.title),
            unicode(self.artist_name),
            unicode(self.date_created),
            unicode(self.num_editions),
            unicode(self.bitcoin_path),
            unicode(self.digital_work.hash),
        ])
        address = unicode(
            pybitcointools.bin_to_b58check(pybitcointools.bin_hash160(data)))
        return address
Esempio n. 5
0
 def hash_as_address_no_metada(self):
     address = unicode(
         pybitcointools.bin_to_b58check(
             pybitcointools.bin_hash160(self.digital_work.hash)))
     return address
Esempio n. 6
0
def create_address_db(dbfilename, blockdir, update=False, progress_bar=True):
    """Creates an AddressSet database and saves it to a file

    :param dbfilename: the file name where the database is saved (overwriting it)
    :type dbfilename: str
    :param blockdir: the data directory where the Bitcoin block files reside
    :type blockdir: str
    :param update: if True, the existing database file is updated from new txs
    :type update: bool
    :param progress_bar: True to enable the progress bar
    :type progress_bar: bool
    """

    for filename in glob.iglob(path.join(blockdir, "blk*.dat")):
        if path.isfile(filename): break
    else:
        raise ValueError(
            "no block files exist in blocks directory '{}'".format(blockdir))

    if update:
        print("Loading address database ...")
        address_set = AddressSet.fromfile(open(dbfilename, "r+b"),
                                          mmap_access=mmap.ACCESS_WRITE)
        first_filenum = address_set.last_filenum
        print()
    else:
        first_filenum = 0

    filename = "blk{:05}.dat".format(first_filenum)
    if not path.isfile(path.join(blockdir, filename)):
        raise ValueError(
            "first block file '{}' doesn't exist in blocks directory '{}'".
            format(filename, blockdir))

    if not update:
        # Open the file early to make sure we can, but don't overwrite it yet
        # (see AddressSet.tofile() for why io.open() instead of open() is used)
        try:
            dbfile = io.open(dbfilename, "r+b")
        except IOError:
            dbfile = io.open(dbfilename, "wb")
        # With the default bytes_per_addr and max_load, this allocates
        # about 4 GiB which is room for a little over 400 million addresses
        address_set = AddressSet(1 << 29)

    if progress_bar:
        try:
            import progressbar
        except ImportError:
            progress_bar = False

    if progress_bar:
        print("Parsing block files ...")
        for filenum in itertools.count(first_filenum):
            filename = path.join(blockdir, "blk{:05}.dat".format(filenum))
            if not path.isfile(filename):
                break
        progress_label = progressbar.FormatLabel(
            " {:11,} addrs. %(elapsed)s, ".format(len(address_set)))
        progress_bar = progressbar.ProgressBar(
            maxval=filenum - first_filenum,
            widgets=[
                progressbar.SimpleProgress(), " ",
                progressbar.Bar(left="[", fill="-", right="]"), progress_label,
                progressbar.ETA()
            ])
        progress_bar.start()
    else:
        print("Block file   Address count")
        print("------------ -------------")
        # e.g. blk00943.dat   255,212,706

    for filenum in itertools.count(first_filenum):
        filename = path.join(blockdir, "blk{:05}.dat".format(filenum))
        if not path.isfile(filename):
            break
        address_set.last_filenum = filenum

        with open(filename, "rb") as blockfile:
            if not progress_bar:
                print(path.basename(filename), end=" ")

            header = blockfile.read(
                8
            )  # read in the magic and remaining (after these 8 bytes) block length
            while len(header) == 8 and header[4:] != b"\0\0\0\0":
                assert header[:4] == b"\xf9\xbe\xb4\xd9"  # magic

                block = blockfile.read(struct.unpack_from(
                    "<I", header, 4)[0])  # read in the rest of the block
                tx_count, offset = varint(block,
                                          80)  # skips 80 bytes of header
                for tx_num in xrange(tx_count):
                    offset += 4  # skips 4-byte tx version
                    is_bip144 = block[offset] == b"\0"  # bip-144 marker
                    if is_bip144:
                        offset += 2  # skips 1-byte marker & 1-byte flag
                    txin_count, offset = varint(block, offset)
                    for txin_num in xrange(txin_count):
                        sigscript_len, offset = varint(
                            block, offset +
                            36)  # skips 32-byte tx id & 4-byte tx index
                        offset += sigscript_len + 4  # skips sequence number & sigscript
                    txout_count, offset = varint(block, offset)
                    for txout_num in xrange(txout_count):
                        pkscript_len, offset = varint(
                            block, offset + 8)  # skips 8-byte satoshi count

                        # If this is a P2PKH script (OP_DUP OP_HASH160 PUSH(20) <20 address bytes> OP_EQUALVERIFY OP_CHECKSIG)
                        if pkscript_len == 25 and block[
                                offset:offset +
                                3] == b"\x76\xa9\x14" and block[
                                    offset + 23:offset + 25] == b"\x88\xac":
                            # Add the discovered address to the address set
                            address_set.add(block[offset + 3:offset + 23])

                        elif pkscript_len == 67 and block[
                                offset] == b"\x41" and block[offset +
                                                             66] == b"\xac":
                            address_set.add(
                                bin_hash160(block[offset + 1:offset + 66]))

                        elif pkscript_len == 35 and block[
                                offset] == b"\x21" and block[offset +
                                                             34] == b"\xac":
                            address_set.add(
                                bin_hash160(
                                    decompress(block[offset + 1:offset + 34])))

                        offset += pkscript_len  # advances past the pubkey script
                    if is_bip144:
                        for txin_num in xrange(txin_count):
                            stackitem_count, offset = varint(block, offset)
                            for stackitem_num in xrange(stackitem_count):
                                stackitem_len, offset = varint(block, offset)
                                offset += stackitem_len  # skips this stack item
                    offset += 4  # skips the 4-byte locktime
                header = blockfile.read(
                    8)  # read in the next magic and remaining block length

        if progress_bar:
            progress_label.format = " {:11,} addrs. %(elapsed)s, ".format(
                len(address_set))  # updates address count
            nextval = progress_bar.currval + 1
            if nextval > progress_bar.maxval:  # can happen if the bitcoin client is left running
                progress_bar.maxval = nextval
            progress_bar.update(nextval)
        else:
            print("{:13,}".format(len(address_set)))

    if progress_bar:
        progress_bar.widgets.pop()  # remove the ETA
        progress_bar.finish()
    if update:
        print("\nSaving changes to address database ...")
        address_set.close()
    else:
        print("\nSaving address database ...")
        dbfile.truncate(0)
        address_set.tofile(dbfile)
        dbfile.close()

    print("\nDone.")
def create_address_db(dbfilename, blockdir, update = False, progress_bar = True):
    """Creates an AddressSet database and saves it to a file

    :param dbfilename: the file name where the database is saved (overwriting it)
    :type dbfilename: str
    :param blockdir: the data directory where the Bitcoin block files reside
    :type blockdir: str
    :param update: if True, the existing database file is updated from new txs
    :type update: bool
    :param progress_bar: True to enable the progress bar
    :type progress_bar: bool
    """

    for filename in glob.iglob(path.join(blockdir, "blk*.dat")):
        if path.isfile(filename): break
    else:
        raise ValueError("no block files exist in blocks directory '{}'".format(blockdir))

    if update:
        print("Loading address database ...")
        address_set   = AddressSet.fromfile(open(dbfilename, "r+b"), mmap_access=mmap.ACCESS_WRITE)
        first_filenum = address_set.last_filenum
        print()
    else:
        first_filenum = 0

    filename = "blk{:05}.dat".format(first_filenum)
    if not path.isfile(path.join(blockdir, filename)):
        raise ValueError("first block file '{}' doesn't exist in blocks directory '{}'".format(filename, blockdir))

    if not update:
        # Open the file early to make sure we can, but don't overwrite it yet
        # (see AddressSet.tofile() for why io.open() instead of open() is used)
        try:
            dbfile = io.open(dbfilename, "r+b")
        except IOError:
            dbfile = io.open(dbfilename, "wb")
        # With the default bytes_per_addr and max_load, this allocates
        # about 4 GiB which is room for a little over 400 million addresses
        address_set = AddressSet(1 << 29)

    if progress_bar:
        try:
            import progressbar
        except ImportError:
            progress_bar = False

    if progress_bar:
        print("Parsing block files ...")
        for filenum in itertools.count(first_filenum):
            filename = path.join(blockdir, "blk{:05}.dat".format(filenum))
            if not path.isfile(filename):
                break
        progress_label = progressbar.FormatLabel(" {:11,} addrs. %(elapsed)s, ".format(len(address_set)))
        progress_bar = progressbar.ProgressBar(maxval=filenum-first_filenum, widgets=[
            progressbar.SimpleProgress(), " ",
            progressbar.Bar(left="[", fill="-", right="]"),
            progress_label,
            progressbar.ETA()
        ])
        progress_bar.start()
    else:
        print("Block file   Address count")
        print("------------ -------------")
        # e.g. blk00943.dat   255,212,706

    for filenum in itertools.count(first_filenum):
        filename = path.join(blockdir, "blk{:05}.dat".format(filenum))
        if not path.isfile(filename):
            break
        address_set.last_filenum = filenum

        with open(filename, "rb") as blockfile:
            if not progress_bar:
                print(path.basename(filename), end=" ")

            header = blockfile.read(8)  # read in the magic and remaining (after these 8 bytes) block length
            while len(header) == 8 and header[4:] != b"\0\0\0\0":
                assert header[:4] == b"\xf9\xbe\xb4\xd9"                        # magic

                block = blockfile.read(struct.unpack_from("<I", header, 4)[0])  # read in the rest of the block
                tx_count, offset = varint(block, 80)                            # skips 80 bytes of header
                for tx_num in xrange(tx_count):
                    offset += 4                                                 # skips 4-byte tx version
                    is_bip144 = block[offset] == b"\0"                          # bip-144 marker
                    if is_bip144:
                        offset += 2                                             # skips 1-byte marker & 1-byte flag
                    txin_count, offset = varint(block, offset)
                    for txin_num in xrange(txin_count):
                        sigscript_len, offset = varint(block, offset + 36)      # skips 32-byte tx id & 4-byte tx index
                        offset += sigscript_len + 4                             # skips sequence number & sigscript
                    txout_count, offset = varint(block, offset)
                    for txout_num in xrange(txout_count):
                        pkscript_len, offset = varint(block, offset + 8)        # skips 8-byte satoshi count

                        # If this is a P2PKH script (OP_DUP OP_HASH160 PUSH(20) <20 address bytes> OP_EQUALVERIFY OP_CHECKSIG)
                        if pkscript_len == 25 and block[offset:offset+3] == b"\x76\xa9\x14" and block[offset+23:offset+25] == b"\x88\xac":
                            # Add the discovered address to the address set
                            address_set.add(block[offset+3:offset+23])

                        elif pkscript_len == 67 and block[offset] == b"\x41" and block[offset + 66] == b"\xac":
                            address_set.add(bin_hash160(block[offset+1:offset+66]))

                        elif pkscript_len == 35 and block[offset] == b"\x21" and block[offset+34] == b"\xac":
                            address_set.add(bin_hash160(decompress(block[offset+1:offset+34])))

                        offset += pkscript_len                                  # advances past the pubkey script
                    if is_bip144:
                        for txin_num in xrange(txin_count):
                            stackitem_count, offset = varint(block, offset)
                            for stackitem_num in xrange(stackitem_count):
                                stackitem_len, offset = varint(block, offset)
                                offset += stackitem_len                         # skips this stack item
                    offset += 4                                                 # skips the 4-byte locktime
                header = blockfile.read(8)  # read in the next magic and remaining block length

        if progress_bar:
            progress_label.format = " {:11,} addrs. %(elapsed)s, ".format(len(address_set))  # updates address count
            nextval = progress_bar.currval + 1
            if nextval > progress_bar.maxval:  # can happen if the bitcoin client is left running
                progress_bar.maxval = nextval
            progress_bar.update(nextval)
        else:
            print("{:13,}".format(len(address_set)))

    if progress_bar:
        progress_bar.widgets.pop()  # remove the ETA
        progress_bar.finish()
    if update:
        print("\nSaving changes to address database ...")
        address_set.close()
    else:
        print("\nSaving address database ...")
        dbfile.truncate(0)
        address_set.tofile(dbfile)
        dbfile.close()

    print("\nDone.")