async def __register_trade_ticket(self, imagedata_hash_hex, tradetype,
                                      copies, price, expiration):
        imagedata_hash = bytes_from_hex(imagedata_hash_hex)

        # We do this here to prevent creating a ticket we know now as invalid. However anything
        # might happen before this ticket makes it to the network, so this check can't be put in validate()
        if tradetype == "ask":
            # make sure we have enough remaining copies left if we are asking
            require_true(
                self.__artregistry.enough_copies_left(imagedata_hash,
                                                      self.__pubkey, copies))
        else:
            # not a very thorough check, as we might have funds locked in collateral addresses
            # if this is the case we will fail later when trying to move the funds
            if self.__blockchain.getbalance() < price:
                raise ValueError("Not enough money in wallet!")

        # watched address is the address we are using to receive the funds in asks and send the collateral to in bids
        watched_address = self.__blockchain.getnewaddress()

        transreg = TradeRegistrationClient(self.__privkey, self.__pubkey,
                                           self.__blockchain,
                                           self.__chainwrapper,
                                           self.__artregistry)
        await transreg.register_trade(imagedata_hash, tradetype,
                                      watched_address, copies, price,
                                      expiration)
    def validate(self, blockchain, chainwrapper, artregistry):
        # make sure artwork is properly registered
        artregistry.get_ticket_for_artwork(self.imagedata_hash)

        # if this is a bid, validate collateral
        if self.type == "bid":
            # does the collateral utxo exist?
            transaction = blockchain.getrawtransaction(self.collateral_txid, 1)

            # is the utxo a new one we are not currently watching? - this is to prevent a user reusing a collateral
            _, listen_utxos = artregistry.get_listen_addresses_and_utxos()
            require_true(self.collateral_txid not in listen_utxos)

            # validate collateral
            valid = False
            for vout in transaction["vout"]:
                if len(vout["scriptPubKey"]["addresses"]) > 1:
                    continue

                # validate address and amount of collateral
                value = vout["value"]
                address = vout["scriptPubKey"]["addresses"][0]
                if address == self.watched_address and value == self.copies * self.price:
                    valid = True
                    break

            if not valid:
                raise ValueError("UTXO does not contain valid address as vout")
    def validate(self, chainwrapper):
        # validate that the author is correct and pubkeys match MNs
        if self.signature_author.pubkey != self.ticket.author:
            raise ValueError("Signature pubkey does not match regticket.author!")

        # prevent nonce reuse
        require_true(chainwrapper.valid_nonce(self.nonce))

        if NetWorkSettings.VALIDATE_MN_SIGNATURES:
            # validate masternode order that's in the ticket
            masternode_ordering = chainwrapper.masternode_workers(self.ticket.blocknum)

            # make sure we got 3 MNs
            if len(masternode_ordering) != 3:
                raise ValueError("Incorrect masternode list returned by get_masternode_order: %s" % masternode_ordering)

            # make sure they're unique
            if len(set([x['IP:port'] for x in masternode_ordering])) != 3:
                raise ValueError(
                    "Masternodes are not unique as returned by get_masternode_order: %s" % masternode_ordering)

            if (self.signature_1.pubkey != base64.b64decode(masternode_ordering[0]['extKey']) or
                    self.signature_2.pubkey != base64.b64decode(masternode_ordering[1]['extKey']) or
                    self.signature_3.pubkey != base64.b64decode(masternode_ordering[2]['extKey'])):
                raise ValueError("Invalid pubkey for masternode ordering")

            # validate signatures
            self.signature_author.validate(self.ticket)
            self.signature_1.validate(self.ticket)
            self.signature_2.validate(self.ticket)
            self.signature_3.validate(self.ticket)
        else:
            # we are running in debug mode, do not check signatures
            pass
    def validate(self, chainwrapper):
        # validate that the author is correct and pubkeys match MNs
        if self.signature.pubkey != self.ticket.public_key:
            raise ValueError("Signature pubkey does not match regticket.author!")

        # prevent nonce reuse
        require_true(chainwrapper.valid_nonce(self.nonce))
Example #5
0
def calculate_spearmans_rho(candidate_fingerprint, fingerprint_table,
                            registered_fingerprints, strictness, threshold):
    with Timer():
        spearman_vector = []
        for i in range(len(fingerprint_table)):
            part = registered_fingerprints[:, i]
            correlation = scipy.stats.spearmanr(candidate_fingerprint,
                                                part).correlation
            spearman_vector.append(correlation)

        spearman_max = np.array(spearman_vector).max()

        above_threshold = np.nonzero(
            np.array(spearman_vector) >= strictness * threshold)[0].tolist()

        percentage = len(above_threshold) / len(spearman_vector)

    print(('Selected %s fingerprints for further testing' +
           '(%.2f%% of the total registered fingerprints).') %
          (len(above_threshold), round(100 * percentage, 2)))

    futher_testing_needed = [
        registered_fingerprints[:, current_index].tolist()
        for current_index in above_threshold
    ]

    spearman_scores = [
        scipy.stats.spearmanr(candidate_fingerprint, x).correlation
        for x in futher_testing_needed
    ]
    require_true(all(np.array(spearman_scores) >= strictness * threshold))

    return spearman_vector, spearman_max, futher_testing_needed
Example #6
0
    def masternode_sign_activation_ticket(self, data, *args, **kwargs):
        # parse inputs
        signature_serialized, activationticket_serialized, image_serialized = data
        signed_actticket = Signature(serialized=signature_serialized)
        image = ImageData(serialized=image_serialized)
        activation_ticket = ActivationTicket(
            serialized=activationticket_serialized)

        # test image data for validity in a jailed environment
        converter = JailedImageParser(self.__nodenum, image.image)
        converter.parse()

        # validate client's signature on the ticket - so only original client can activate
        require_true(signed_actticket.pubkey == activation_ticket.author)
        signed_actticket.validate(activation_ticket)

        # validate activation ticket
        activation_ticket.validate(self.__chainwrapper, image)

        # sign activation ticket
        ticket_signed_by_mn = Signature(
            dictionary={
                "signature":
                pastel_id_write_signature_on_data_func(
                    activationticket_serialized, self.__priv, self.__pub),
                "pubkey":
                self.__pub,
            })
        return ticket_signed_by_mn.serialize()
def masternode_validate_registration_ticket(data, *args, **kwargs):
    # parse inputs
    artist_pk = kwargs.get('sender_id')
    mn_ticket_logger.info(
        'Masternode validate regticket, data: {}'.format(data))
    regticket_serialized, regticket_signature_serialized = data
    regticket = RegistrationTicket(serialized=regticket_serialized)
    signed_regticket = Signature(serialized=regticket_signature_serialized)
    require_true(signed_regticket.pastelid == regticket.author)
    signed_regticket.validate(regticket)

    # validate registration ticket
    regticket.validate()
    upload_code = uuid.uuid4().bytes

    # TODO: clean upload code and regticket from local db when ticket was placed on the blockchain
    # TODO: clean upload code and regticket from local db if they're old enough
    MASTERNODE_DB.connect(reuse_if_open=True)
    Regticket.create(regticket=regticket_serialized,
                     upload_code=upload_code,
                     created=datetime.now(),
                     artists_signature_ticket=regticket_signature_serialized,
                     artist_pk=artist_pk,
                     image_hash=regticket.imagedata_hash)
    return upload_code
Example #8
0
    def expired(self, current_block_height):
        require_true(self.tickettype == "trade")

        if self.ticket.expiration != 0:
            blocks_elapsed = current_block_height - self.created
            if blocks_elapsed > self.ticket.expiration:
                return True
        return False
Example #9
0
def retrieve_data_from_utxo(jsonrpc, blockchain_transaction_id):
    # raw = jsonrpc.getrawtransaction(blockchain_transaction_id)
    # outputs = raw.split('0100000000000000')
    raw = jsonrpc.getrawtransaction(blockchain_transaction_id)
    outputs = raw.split('01000000000000')  # //ANIME - two zerros less then BTC
    # for idx, output in enumerate(outputs):
    #     print(idx, output)
    encoded_hex_data = ''
    for output in outputs[
            1:-2]:  # there are 3 65-byte parts in this that we need
        cur = 6
        encoded_hex_data += output[cur:cur + 130]
        cur += 132
        encoded_hex_data += output[cur:cur + 130]
        cur += 132
        encoded_hex_data += output[cur:cur + 130]
    encoded_hex_data += outputs[-2][6:-4]
    reconstructed_combined_data = binascii.a2b_hex(encoded_hex_data).decode(
        'utf-8')
    reconstructed_length_of_compressed_data_hex_string = reconstructed_combined_data[
        0:
        30]  # len(hexlify('{0:015}'.format(len(encoded_zstd_compressed_data)).encode('utf-8'))) is 30

    reconstructed_length_of_compressed_data_hex_string = int(
        unhexstr(reconstructed_length_of_compressed_data_hex_string).decode(
            'utf-8').lstrip('0'))
    reconstructed_combined_data__remainder_1 = reconstructed_combined_data[30:]
    length_of_standard_hash_string = NetWorkSettings.CNODE_HEX_DIGEST_SIZE
    reconstructed_compression_dictionary_file_hash = reconstructed_combined_data__remainder_1[
        0:length_of_standard_hash_string]
    reconstructed_combined_data__remainder_2 = reconstructed_combined_data__remainder_1[
        length_of_standard_hash_string:]
    reconstructed_uncompressed_data_file_hash = reconstructed_combined_data__remainder_2[
        0:length_of_standard_hash_string]
    reconstructed_combined_data__remainder_3 = reconstructed_combined_data__remainder_2[
        length_of_standard_hash_string:]
    input_data_hash = reconstructed_combined_data__remainder_3[
        0:length_of_standard_hash_string]
    reconstructed_combined_data__remainder_4 = reconstructed_combined_data__remainder_3[
        length_of_standard_hash_string:]
    reconstructed_encoded_zstd_compressed_data_padded = reconstructed_combined_data__remainder_4.replace(
        'A', ''
    )  # Note sure where this comes from; somehow it is introduced into the data (note this is "A" not "a").
    calculated_padding_length = len(
        reconstructed_encoded_zstd_compressed_data_padded
    ) - reconstructed_length_of_compressed_data_hex_string
    reconstructed_encoded_zstd_compressed_data = reconstructed_encoded_zstd_compressed_data_padded[
        0:-calculated_padding_length]
    output_data = unhexstr(reconstructed_encoded_zstd_compressed_data)
    hash_of_output_data = get_cnode_digest_hex(output_data)
    require_true(hash_of_output_data == input_data_hash)
    # print('Successfully reconstructed and decompressed data!')
    return output_data
def verify_and_unpack(raw_message_contents, expected_receiver_id):
    # validate raw_message_contents
    ensure_type(raw_message_contents, bytes)
    if len(raw_message_contents) > NetWorkSettings.RPC_MSG_SIZELIMIT:
        raise ValueError("raw_message_contents is too large: %s > %s" % (len(raw_message_contents),
                                                                         NetWorkSettings.RPC_MSG_SIZELIMIT))

    # raw=False makes this unpack to utf-8 strings
    container = msgpack.unpackb(raw_message_contents, ext_hook=ext_hook, raw=False)
    ensure_type(container, dict)

    if container.get("version") is None:
        raise ValueError("version field must be supported in all containers!")

    version = ensure_type_of_field(container, "version", int)

    if version > MAX_SUPPORTED_VERSION:
        raise NotImplementedError("version %s not implemented, is larger than %s" % (version, MAX_SUPPORTED_VERSION))

    if version == 1:
        # validate all keys for this version
        a, b = set(container.keys()), VALID_CONTAINER_KEYS_v1
        if len(a - b) + len(b - a) > 0:
            raise KeyError("Keys don't match %s != %s" % (a, b))

        # typecheck all the fields
        sender_id, receiver_id, data, nonce, timestamp, signature = ensure_types_for_v1(container)

        if receiver_id != expected_receiver_id:
            raise ValueError("receiver_id is not us (%s != %s)" % (receiver_id, expected_receiver_id))

        # TODO: validate timestamp - is this enough?
        require_true(timestamp > time.time() - 60)
        require_true(timestamp < time.time() + 60)

        # validate signature:
        #  since signature can't be put into the dict we have to recreate it without the signature field
        #  this validates that the message was indeed signed by the sender_id public key
        tmp = container.copy()
        tmp["signature"] = b''
        sleep_rand()
        raw_hash = get_pynode_digest_bytes(msgpack.packb(tmp, default=default, use_bin_type=True))
        verified = pastel_id_verify_signature_with_public_key_func(raw_hash, signature, sender_id)
        sleep_rand()

        if not verified:
            raise ValueError("Verification failed!")
        # end

        return sender_id, data
    else:
        raise NotImplementedError("version %s not implemented" % version)
    def validate(self):
        # verify luby chunks

        luby.verify_blocks(self.lubychunks)

        # assemble image from chunks and check if it matches
        reconstructed = luby.decode(self.lubychunks)
        require_true(reconstructed == self.image)

        # validate that thumbnail is the same image
        # TODO: we should not regenerate the thumbnail, just look for similarities as this might not be deterministic
        new_thumbnail = self.generate_thumbnail(self.image)
        require_true(self.thumbnail == new_thumbnail)
Example #12
0
    def add_artwork(self, txid, finalactticket, regticket):
        artid = regticket.imagedata_hash
        self.__artworks[artid] = ArtWork(artid, txid, finalactticket, regticket)
        self.__logger.debug("FinalActivationTicket added to artregistry: %s" % finalactticket)

        # update owner DB
        if self.__owners.get(artid) is None:
            self.__owners[artid] = {}
            self.__tickets[artid] = []
        artdb = self.__owners[artid]

        # assert that this artwork is not yet found
        require_true(artdb.get(regticket.author) is None)

        artdb[regticket.author] = regticket.total_copies
        self.__logger.debug("Author %s granted %s copies" % (regticket.author, regticket.total_copies))
Example #13
0
    async def issue_random_tests_forever(self, waittime, number_of_chunks=1):
        while True:
            await asyncio.sleep(waittime)

            chunks = self.__chunkmanager.select_random_chunks_we_have(
                number_of_chunks)
            for chunk in chunks:
                self.__logger.debug("Selected chunk %s for random check" %
                                    chunkid_to_hex(chunk.chunkid))

                # get chunk
                data = self.__chunkmanager.get_chunk(chunk)

                # pick a random range
                require_true(len(data) > 1024)
                start = random.randint(0, len(data) - 1024)
                end = start + 1024

                # calculate digest
                digest = get_pynode_digest_hex(data[start:end])
                self.__logger.debug("Digest for range %s - %s is: %s" %
                                    (start, end, digest))

                # find owners for all the alt keys who are not us
                owners = self.__aliasmanager.find_other_owners_for_chunk(
                    chunk.chunkid)

                # call RPC on all other MNs
                for owner in owners:
                    mn = self.__mn_manager.get(owner)

                    try:
                        response_digest = await mn.send_rpc_spotcheck(
                            chunk.chunkid, start, end)
                    except RPCException as exc:
                        self.__logger.info(
                            "SPOTCHECK RPC FAILED for node %s with exception %s"
                            % (owner, exc))
                    else:
                        if response_digest != digest:
                            self.__logger.warning(
                                "SPOTCHECK FAILED for node %s (%s != %s)" %
                                (owner, digest, response_digest))
                        else:
                            self.__logger.debug(
                                "SPOTCHECK SUCCESS for node %s for chunk: %s" %
                                (owner, digest))
Example #14
0
    async def __collect_mn_regticket_signatures(self, signature, ticket, masternode_ordering):
        signatures = []
        for mn in masternode_ordering:
            data_from_mn = await mn.call_masternode("SIGNREGTICKET_REQ", "SIGNREGTICKET_RESP",
                                                    [signature.serialize(), ticket.serialize()])

            # client parses signed ticket and validated signature
            mn_signature = Signature(serialized=data_from_mn)

            # is the data the same and the signature valid?
            if NetWorkSettings.VALIDATE_MN_SIGNATURES:
                require_true(mn_signature.pubkey == mn.pubkey)
            mn_signature.validate(ticket)

            # add signature to collected signatures
            signatures.append(mn_signature)
        return signatures
    def validate(self, chainwrapper):
        # we have no way to check these but will do so on activation:
        #  o fingerprints
        #  o lubyhashes
        #  o thumbnailhash
        #
        # after these checks are done we know that fingerprints are not dupes and there is no race

        # validate that lubyhashes and lubychunks are the same length
        require_true(len(self.lubyhashes) == len(self.lubyseeds))

        # validate that order txid is not too old
        block_distance = chainwrapper.get_block_distance(chainwrapper.get_last_block_hash(), self.order_block_txid)
        if block_distance > NetWorkSettings.MAX_REGISTRATION_BLOCK_DISTANCE:
            raise ValueError("Block distance between order_block_height and current block is too large!")
        # validate that art hash doesn't exist:
        # TODO: move this artwork index logic into chainwrapper
        fingerprint_db = {}
        if NetWorkSettings.LONG_REGTICKET_VALIDATION_ENABLED:
            for txid, ticket in chainwrapper.all_ticket_iterator():
                if type(ticket) == FinalRegistrationTicket:
                    ticket.validate(chainwrapper)
                else:
                    continue

                regticket = ticket.ticket

                # collect fingerprints
                # TODO: only collect this for activated regtickets and tickets not older than X blocks
                fingerprint_db[regticket.imagedata_hash] = ("DUMMY_PATH", regticket.fingerprints)  # TODO: do we need this?

                # validate that this art hash does not yet exist on the blockchain
                # TODO: only prohibit registration when this was registered in the past X blocks
                # TODO: if regticket is activated: prohibit registration forever
                require_true(regticket.imagedata_hash != self.imagedata_hash)

        # validate that fingerprints are not dupes
        if len(fingerprint_db) > 0:
            # TODO: check for fingerprint dupes
            if NetWorkSettings.DUPE_DETECTION_ENABLED:
                pandas_table = assemble_fingerprints_for_pandas([(k, v) for k, v in fingerprint_db.items()])
                is_duplicate, params_df = measure_similarity(self.fingerprints, pandas_table)
                if is_duplicate:
                    raise ValueError("Image failed fingerprint check!")
Example #16
0
    def reconstruct(serialized: bytes) -> 'RPCMessage':
        if len(serialized) > Settings.RPC_MSG_SIZELIMIT:
            raise ValueError("Message is too large: %s > %s" %
                             (len(serialized), Settings.RPC_MSG_SIZELIMIT))

        container = msgpack.unpackb(serialized, ext_hook=ext_hook, raw=False)
        if not validate_container_format(container):
            raise ValueError('Invalid container format')

        # validate receiver id is us
        if container['receiver_id'] != get_blockchain_connection().pastelid:
            raise ValueError("receiver_id is not us (%s != %s)" %
                             (container['receiver_id'],
                              get_blockchain_connection().pastelid))

        require_true(container['timestamp'] > time.time() - 60)
        require_true(container['timestamp'] < time.time() + 60)

        return RPCMessage(container['data'],
                          container['receiver_id'],
                          container=container)
Example #17
0
    def masternode_sign_registration_ticket(self, data, *args, **kwargs):
        # parse inputs
        signature_serialized, regticket_serialized = data
        signed_regticket = Signature(serialized=signature_serialized)
        regticket = RegistrationTicket(serialized=regticket_serialized)

        # validate client's signature on the ticket
        require_true(signed_regticket.pubkey == regticket.author)
        signed_regticket.validate(regticket)

        # validate registration ticket
        regticket.validate(self.__chainwrapper)

        # sign regticket
        ticket_signed_by_mn = Signature(
            dictionary={
                "signature":
                pastel_id_write_signature_on_data_func(
                    regticket_serialized, self.__priv, self.__pub),
                "pubkey":
                self.__pub,
            })
        return ticket_signed_by_mn.serialize()
Example #18
0
    def register_transfer(self, recipient_pubkey, imagedata_hash, copies):
        transferticket = TransferTicket(
            dictionary={
                "public_key": self.__pubkey,
                "recipient": recipient_pubkey,
                "imagedata_hash": imagedata_hash,
                "copies": copies,
            })
        transferticket.validate(self.__chainwrapper, self.__artregistry)

        # Make sure enough remaining copies are left on our key
        # We do this here to prevent creating a ticket we know now as invalid. However anything
        # might happen before this tickets makes it to the network, os this check can't be put in validate()
        require_true(
            self.__artregistry.enough_copies_left(
                transferticket.imagedata_hash, transferticket.public_key,
                transferticket.copies))

        signature = Signature(
            dictionary={
                "signature":
                pastel_id_write_signature_on_data_func(
                    transferticket.serialize(), self.__privkey, self.__pubkey),
                "pubkey":
                self.__pubkey,
            })
        signature.validate(transferticket)

        finalticket = FinalTransferTicket(
            dictionary={
                "ticket": transferticket.to_dict(),
                "signature": signature.to_dict(),
                "nonce": str(uuid.uuid4()),
            })
        finalticket.validate(self.__chainwrapper)

        self.__chainwrapper.store_ticket(finalticket)
Example #19
0
    def validate(self):
        from core_modules.blackbox_modules.dupe_detection_utils import measure_similarity, \
            assemble_fingerprints_for_pandas

        # we have no way to check these but will do so on activation:
        #  o fingerprints
        #  o lubyhashes
        #  o thumbnailhash
        #
        # after these checks are done we know that fingerprints are not dupes and there is no race

        # validate that lubyhashes and lubychunks are the same length
        require_true(len(self.lubyhashes) == len(self.lubyseeds))

        # validate that order txid is not too old
        block_distance = get_block_distance(
            get_blockchain_connection().getbestblockhash(),
            self.order_block_txid)
        if block_distance > Settings.MAX_REGISTRATION_BLOCK_DISTANCE:
            raise ValueError(
                "Block distance between order_block_height and current block is too large!"
            )
        # validate that art hash doesn't exist:
        # TODO: move this artwork index logic into chainwrapper
        fingerprint_db = {}

        # validate that fingerprints are not dupes
        if len(fingerprint_db) > 0:
            # TODO: check for fingerprint dupes
            if Settings.DUPE_DETECTION_ENABLED:
                pandas_table = assemble_fingerprints_for_pandas([
                    (k, v) for k, v in fingerprint_db.items()
                ])
                is_duplicate, params_df = measure_similarity(
                    self.fingerprints, pandas_table)
                if is_duplicate:
                    raise ValueError("Image failed fingerprint check!")
Example #20
0
def __compute_bootstrapped_hoeffdings_d_func(x, y, sample_size):
    x = np.array(x)
    y = np.array(y)
    require_true(x.size == y.size)
    original_length_of_input = x.size
    bootstrap_sample_indices = __generate_bootstrap_sample_func(
        original_length_of_input - 1, sample_size)
    N = sample_size
    x_bootstrap_sample = x[bootstrap_sample_indices]
    y_bootstrap_sample = y[bootstrap_sample_indices]
    R_bootstrap = scipy.stats.rankdata(x_bootstrap_sample)
    S_bootstrap = scipy.stats.rankdata(y_bootstrap_sample)
    hoeffdingd = functools.partial(__hoeffd_inner_loop_func,
                                   R=R_bootstrap,
                                   S=S_bootstrap)
    Q_bootstrap = [hoeffdingd(x) for x in range(sample_size)]
    Q = np.array(Q_bootstrap)
    D1 = sum(((Q - 1) * (Q - 2)))
    D2 = sum((R_bootstrap - 1) * (R_bootstrap - 2) * (S_bootstrap - 1) *
             (S_bootstrap - 2))
    D3 = sum((R_bootstrap - 2) * (S_bootstrap - 2) * (Q - 1))
    D = 30 * ((N - 2) * (N - 3) * D1 + D2 - 2 *
              (N - 2) * D3) / (N * (N - 1) * (N - 2) * (N - 3) * (N - 4))
    return D
Example #21
0
def pushdata(data):
    require_true(len(data) < OP_PUSHDATA1[0])
    return struct.pack('<B', len(data)) + data
Example #22
0
    def validate_image(self, image_data):
        # TODO: we should validate image only after 10% burn fee is payed by the wallet
        # validate image
        image.validate()

        # get registration ticket
        final_regticket = chainwrapper.retrieve_ticket(
            self.registration_ticket_txid)

        # validate final ticket
        final_regticket.validate(chainwrapper)

        # validate registration ticket
        regticket = final_regticket.ticket

        # validate that the authors match
        require_true(regticket.author == self.author)

        # validate that imagehash, fingerprints, lubyhashes and thumbnailhash indeed belong to the image
        require_true(
            regticket.fingerprints ==
            image.generate_fingerprints())  # TODO: is this deterministic?
        require_true(regticket.lubyhashes == image.get_luby_hashes())
        require_true(regticket.lubyseeds == image.get_luby_seeds())
        require_true(regticket.thumbnailhash == image.get_thumbnail_hash())

        # validate that MN order matches between registration ticket and activation ticket
        require_true(regticket.order_block_txid == self.order_block_txid)

        # image hash matches regticket hash
        require_true(regticket.imagedata_hash == image.get_artwork_hash())

        # run nsfw check
        if NSFWDetector.is_nsfw(image.image):
            raise ValueError("Image is NSFW, score: %s" %
                             NSFWDetector.get_score(image.image))
Example #23
0
def store_data_in_utxo(jsonrpc, input_data):
    uncompressed_file_size_in_bytes = sys.getsizeof(input_data)
    # print('Now storing preparing file for storage in blockchain. Original uncompressed file size in bytes: ' + str(
    #     uncompressed_file_size_in_bytes) + ' bytes')

    input_data_hash = get_cnode_digest_bytes(input_data)

    # TODO: remove unnecessary hashes
    compression_dictionary_file_hash = input_data_hash
    uncompressed_data_file_hash = input_data_hash
    compressed_data_file_hash = input_data_hash

    unspent = list(jsonrpc.listunspent())
    # TODO: figure out what this number should be
    (txins, change) = select_txins(1, unspent)
    txouts = []
    encoded_zstd_compressed_data = hexlify(input_data)
    length_of_compressed_data_string = '{0:015}'.format(
        len(encoded_zstd_compressed_data)).encode('utf-8')
    combined_data_hex = hexlify(length_of_compressed_data_string) + hexlify(
        compression_dictionary_file_hash
    ) + hexlify(uncompressed_data_file_hash) + hexlify(
        compressed_data_file_hash) + encoded_zstd_compressed_data + hexlify(
            ('0' * 100).encode('utf-8'))
    fd = io.BytesIO(combined_data_hex)
    while True:
        scriptPubKey = checkmultisig_scriptPubKey_dump(fd)
        if scriptPubKey is None:
            break
        value = NetWorkSettings.BASE_TRANSACTION_AMOUNT
        txouts.append((value, scriptPubKey))
        change -= value
    out_value = Decimal(NetWorkSettings.BASE_TRANSACTION_AMOUNT)  # dest output
    change -= out_value
    receiving_blockchain_address = jsonrpc.getnewaddress()
    txouts.append((out_value, OP_DUP + OP_HASH160 +
                   pushdata(addr2bytes(receiving_blockchain_address)) +
                   OP_EQUALVERIFY + OP_CHECKSIG))
    change_address = jsonrpc.getnewaddress()  # change output
    txouts.append([
        change, OP_DUP + OP_HASH160 + pushdata(addr2bytes(change_address)) +
        OP_EQUALVERIFY + OP_CHECKSIG
    ])
    tx = packtx(txins, txouts)
    signed_tx = jsonrpc.signrawtransaction(hexlify(tx).decode('utf-8'))
    fee = (Decimal(len(signed_tx['hex'])) / 2 / 1024) * FEEPERKB
    change -= fee
    txouts[-1][0] = change
    final_tx = packtx(txins, txouts)
    signed_tx = jsonrpc.signrawtransaction(hexlify(final_tx).decode('utf-8'))
    require_true(signed_tx['complete'])
    hex_signed_transaction = signed_tx['hex']
    print('Sending data transaction to address: ' +
          receiving_blockchain_address)
    print('Size: %d  Fee: %2.8f' % (len(hex_signed_transaction) / 2, fee))
    print('store_data_in_utxo input_data: {}'.format(input_data))
    print('store_data_in_utxo hex_signed_transaction: {}'.format(
        hex_signed_transaction))

    send_raw_transaction_result = jsonrpc.sendrawtransaction(
        hex_signed_transaction)
    blockchain_transaction_id = send_raw_transaction_result
    # print('Transaction ID: ' + blockchain_transaction_id)
    return blockchain_transaction_id
    def validate(self, chainwrapper, image):
        # TODO:
        # X validate that this ticket references a valid regticket
        #   X regticket is on chain
        #   X regticket is not yet activated
        #   X regticket signatures are valid
        # X validate metadata:
        #   X fingerprints matches image
        #   X lubyhashes matches image
        #   X thumbnailhash matches image
        # X validate image
        #   X image actually hashes to imagedata_hash in the regticket
        #   X image is sfw
        #   X luby chunks generate the image

        # TODO: check that final_regticket ticket is not activated yet

        # validate image
        image.validate()

        # get registration ticket
        final_regticket = chainwrapper.retrieve_ticket(self.registration_ticket_txid)

        # validate final ticket
        final_regticket.validate(chainwrapper)

        # validate registration ticket
        regticket = final_regticket.ticket

        # validate that the authors match
        require_true(regticket.author == self.author)

        # validate that imagehash, fingerprints, lubyhashes and thumbnailhash indeed belong to the image
        require_true(regticket.fingerprints == image.generate_fingerprints())  # TODO: is this deterministic?
        require_true(regticket.lubyhashes == image.get_luby_hashes())
        require_true(regticket.lubyseeds == image.get_luby_seeds())
        require_true(regticket.thumbnailhash == image.get_thumbnail_hash())

        # validate that MN order matches between registration ticket and activation ticket
        require_true(regticket.order_block_txid == self.order_block_txid)

        # image hash matches regticket hash
        require_true(regticket.imagedata_hash == image.get_artwork_hash())

        # run nsfw check
        if NSFWDetector.is_nsfw(image.image):
            raise ValueError("Image is NSFW, score: %s" % NSFWDetector.get_score(image.image))
Example #25
0
def pushint(n):
    require_true(0 < n <= 16)
    return struct.pack('<B', 0x51 + n - 1)