Exemple #1
0
def verify_singlesig(address, hash_hex, scriptSig):
    sighex, pubkey_hex = virtualchain.btc_script_deserialize(scriptSig)
    # verify pubkey_hex corresponds to address
    if keylib.ECPublicKey(pubkey_hex).address() != address:
        log.warn(("Address {} does not match the public key in the" +
                  " provided scriptSig: provided pubkey = {}").format(
                      address, pubkey_hex))
        return False

    sig64 = base64.b64encode(binascii.unhexlify(sighex))

    return virtualchain.ecdsalib.verify_digest(hash_hex, pubkey_hex, sig64)
Exemple #2
0
def get_profile(name,
                zonefile_storage_drivers=None,
                profile_storage_drivers=None,
                proxy=None,
                user_zonefile=None,
                name_record=None,
                include_name_record=False,
                include_raw_zonefile=False,
                use_zonefile_urls=True,
                use_legacy=False,
                use_legacy_zonefile=True,
                decode_profile=True):
    """
    Given a name, look up an associated profile.
    Do so by first looking up the zonefile the name points to,
    and then loading the profile from that zonefile's public key.

    Notes on backwards compatibility (activated if use_legacy=True and use_legacy_zonefile=True):

    * (use_legacy=True) If the user's zonefile is really a legacy profile from Onename, then
    the profile returned will be the converted legacy profile.  The returned zonefile will still
    be a legacy profile, however.
    The caller can check this and perform the conversion automatically.

    * (use_legacy_zonefile=True) If the name points to a current zonefile that does not have a
    data public key, then the owner address of the name will be used to verify
    the profile's authenticity.

    Returns {'status': True, 'profile': profile, 'zonefile': zonefile, 'public_key': ...} on success.
    * If include_name_record is True, then include 'name_record': name_record with the user's blockchain information
    * If include_raw_zonefile is True, then include 'raw_zonefile': raw_zonefile with unparsed zone file

    Returns {'error': ...} on error
    """

    proxy = get_default_proxy() if proxy is None else proxy
    user_profile_pubkey = None

    res = subdomains.is_address_subdomain(str(name))
    if res:
        subdomain, domain = res[1]
        try:
            return subdomains.resolve_subdomain(subdomain, domain)
        except subdomains.SubdomainNotFound as e:
            log.exception(e)
            return {
                'error': "Failed to find name {}.{}".format(subdomain, domain)
            }

    raw_zonefile = None
    if user_zonefile is None:
        user_zonefile = get_name_zonefile(
            name,
            proxy=proxy,
            name_record=name_record,
            include_name_record=True,
            storage_drivers=zonefile_storage_drivers,
            include_raw_zonefile=include_raw_zonefile,
            allow_legacy=True)

        if 'error' in user_zonefile:
            return user_zonefile

        raw_zonefile = None
        if include_raw_zonefile:
            raw_zonefile = user_zonefile.pop('raw_zonefile')

        user_zonefile = user_zonefile['zonefile']

    # is this really a legacy profile?
    if blockstack_profiles.is_profile_in_legacy_format(user_zonefile):
        if not use_legacy:
            return {'error': 'Profile is in legacy format'}

        # convert it
        log.debug('Converting legacy profile to modern profile')
        user_profile = blockstack_profiles.get_person_from_legacy_format(
            user_zonefile)

    elif not user_db.is_user_zonefile(user_zonefile):
        if not use_legacy:
            return {'error': 'Name zonefile is non-standard'}

        # not a legacy profile, but a custom profile
        log.debug('Using custom legacy profile')
        user_profile = copy.deepcopy(user_zonefile)

    else:
        # get user's data public key
        data_address, owner_address = None, None

        try:
            user_data_pubkey = user_db.user_zonefile_data_pubkey(user_zonefile)
            if user_data_pubkey is not None:
                user_data_pubkey = str(user_data_pubkey)
                data_address = keylib.ECPublicKey(user_data_pubkey).address()

        except ValueError:
            # multiple keys defined; we don't know which one to use
            user_data_pubkey = None

        if not use_legacy_zonefile and user_data_pubkey is None:
            # legacy zonefile without a data public key
            return {'error': 'Name zonefile is missing a public key'}

        # find owner address
        if name_record is None:
            name_record = get_name_blockchain_record(name, proxy=proxy)
            if name_record is None or 'error' in name_record:
                log.error(
                    'Failed to look up name record for "{}"'.format(name))
                return {'error': 'Failed to look up name record'}

        assert 'address' in name_record.keys(), json.dumps(name_record,
                                                           indent=4,
                                                           sort_keys=True)
        owner_address = name_record['address']

        # get user's data public key from the zonefile
        urls = None
        if use_zonefile_urls and user_zonefile is not None:
            urls = user_db.user_zonefile_urls(user_zonefile)

        user_profile = None
        user_profile_pubkey = None

        try:
            user_profile_res = storage.get_mutable_data(
                name,
                user_data_pubkey,
                blockchain_id=name,
                data_address=data_address,
                owner_address=owner_address,
                urls=urls,
                drivers=profile_storage_drivers,
                decode=decode_profile,
                return_public_key=True)

            user_profile = user_profile_res['data']
            user_profile_pubkey = user_profile_res['public_key']

        except Exception as e:
            log.exception(e)
            return {'error': 'Failure in parsing and fetching profile'}

        if user_profile is None or json_is_error(user_profile):
            if user_profile is None:
                log.error('no user profile for {}'.format(name))
            else:
                log.error('failed to load profile for {}: {}'.format(
                    name, user_profile['error']))

            return {'error': 'Failed to load user profile'}

    # finally, if the caller asked for the name record, and we didn't get a chance to look it up,
    # then go get it.
    ret = {
        'status': True,
        'profile': user_profile,
        'zonefile': user_zonefile,
        'public_key': user_profile_pubkey
    }

    if include_name_record:
        if name_record is None:
            name_record = get_name_blockchain_record(name, proxy=proxy)

        if name_record is None or 'error' in name_record:
            log.error('Failed to look up name record for "{}"'.format(name))
            return {'error': 'Failed to look up name record'}

        ret['name_record'] = name_record

    if include_raw_zonefile:
        if raw_zonefile is not None:
            ret['raw_zonefile'] = raw_zonefile

    return ret
def fast_sync_import(working_dir, import_url, public_keys=config.FAST_SYNC_PUBLIC_KEYS, num_required=len(config.FAST_SYNC_PUBLIC_KEYS), verbose=False):
    """
    Fast sync import.
    Verify the given fast-sync file from @import_path using @public_key, and then 
    uncompress it into @working_dir.

    Verify that at least `num_required` public keys in `public_keys` signed.
    NOTE: `public_keys` needs to be in the same order as the private keys that signed.
    """

    def logmsg(s):
        if verbose:
            print s
        else:
            log.debug(s)

    def logerr(s):
        if verbose:
            print >> sys.stderr, s
        else:
            log.error(s)

    if working_dir is None or not os.path.exists(working_dir):
        logerr("No such directory {}".format(working_dir))
        return False

    # go get it 
    import_path = fast_sync_fetch(import_url)
    if import_path is None:
        logerr("Failed to fetch {}".format(import_url))
        return False

    # format: <signed bz2 payload> <sigb64> <sigb64 length (8 bytes hex)> ... <num signatures>
    file_size = 0
    try:
        sb = os.stat(import_path)
        file_size = sb.st_size
    except Exception as e:
        log.exception(e)
        return False

    num_signatures = 0
    ptr = file_size
    signatures = []

    with open(import_path, 'r') as f:
        info = fast_sync_inspect( f )
        if 'error' in info:
            logerr("Failed to inspect snapshot {}: {}".format(import_path, info['error']))
            return False

        signatures = info['signatures']
        ptr = info['payload_size']

        # get the hash of the file 
        hash_hex = get_file_hash(f, hashlib.sha256, fd_len=ptr)
        
        # validate signatures over the hash
        logmsg("Verify {} bytes".format(ptr))
        key_idx = 0
        num_match = 0
        for next_pubkey in public_keys:
            for sigb64 in signatures:
                valid = verify_digest( hash_hex, keylib.ECPublicKey(next_pubkey).to_hex(), sigb64, hashfunc=hashlib.sha256 ) 
                if valid:
                    num_match += 1
                    if num_match >= num_required:
                        break
                    
                    logmsg("Public key {} matches {} ({})".format(next_pubkey, sigb64, hash_hex))
                    signatures.remove(sigb64)
                
                else:
                    logmsg("Public key {} does NOT match {} ({})".format(next_pubkey, sigb64, hash_hex))

        # enough signatures?
        if num_match < num_required:
            logerr("Not enough signatures match (required {}, found {})".format(num_required, num_match))
            return False

    # decompress
    import_path = os.path.abspath(import_path)
    res = fast_sync_snapshot_decompress(import_path, working_dir)
    if 'error' in res:
        logerr("Failed to decompress {} to {}: {}".format(import_path, working_dir, res['error']))
        return False

    # restore from backup
    rc = blockstack_backup_restore(working_dir, None)
    if not rc:
        logerr("Failed to instantiate blockstack name database")
        return False

    # success!
    logmsg("Restored to {}".format(working_dir))
    return True
Exemple #4
0
def configure_zonefile(name, zonefile, data_pubkey):
    """
    Given a name and zonefile, help the user configure the
    zonefile information to store (just URLs for now).

    @zonefile must be parsed and must be a dict.

    Return the new zonefile on success
    Return None if the zonefile did not change.
    """

    from .zonefile import make_empty_zonefile
    from .user import user_zonefile_data_pubkey, user_zonefile_set_data_pubkey, user_zonefile_remove_data_pubkey, \
            user_zonefile_urls, add_user_zonefile_url, remove_user_zonefile_url, swap_user_zonefile_urls, \
            add_user_zonefile_txt, remove_user_zonefile_txt, user_zonefile_txts

    from .storage import get_drivers_for_url

    if zonefile is None:
        print('WARNING: No zonefile could be found.')
        print('WARNING: Creating an empty zonefile.')
        zonefile = make_empty_zonefile(name, data_pubkey)

    running = True
    do_update = True
    old_zonefile = {}
    old_zonefile.update(zonefile)

    while running:
        public_key = None
        try:
            public_key = user_zonefile_data_pubkey(zonefile)
        except ValueError:
            # multiple keys
            public_key = None

        urls = user_zonefile_urls(zonefile)
        if urls is None:
            urls = []

        txts = user_zonefile_txts(zonefile)
        if txts is None:
            txts = []

        url_drivers = {}

        # which drivers?
        for url in urls:
            drivers = get_drivers_for_url(url)
            url_drivers[url] = drivers

        print('-' * 80)

        if public_key is not None:
            print('Data public key: {}'.format(public_key))
        else:
            print('Data public key: (not set)')

        print('')
        print('Profile replicas ({}):'.format(len(urls)))
        if len(urls) > 0:
            for i in xrange(0, len(urls)):
                url = urls[i]
                drivers = get_drivers_for_url(url)
                print('({}) {}\n    Handled by drivers: [{}]'.format(
                    i + 1, url, ','.join([d.__name__ for d in drivers])))

        else:
            print('(none)')

        print('')

        # don't count the public key...
        print("TXT records ({}):".format(len(txts) - (1 if public_key else 0)))
        if len(txts) > 0:
            for i in xrange(0, len(txts)):
                # skip public key
                if txts[i]['name'] == 'pubkey':
                    continue

                print('{} "{}"'.format(txts[i]['name'], txts[i]['txt']))

        else:
            print("(none)")

        print('')
        print('What would you like to do?')
        print('(a) Add profile URL')
        print('(b) Remove profile URL')
        print('(c) Swap URL order')
        print('(d) Add TXT record')
        print('(e) Remove TXT record')
        print('(f) Set or change public key')
        print('(g) Save zonefile')
        print('(h) Do not save zonefile')
        print('')

        selection = raw_input('Selection: ').lower()

        if selection == 'h':
            do_update = False
            break

        elif selection == 'a':
            # add a url
            while True:
                try:
                    new_url = raw_input('Enter the new profile URL: ')
                except KeyboardInterrupt:
                    print('Keyboard interrupt')
                    return None

                new_url = new_url.strip()

                # do any drivers accept this URL?
                drivers = get_drivers_for_url(new_url)
                if len(drivers) == 0:
                    print('No drivers can handle "{}"'.format(new_url))
                    continue

                else:
                    # add to the zonefile
                    new_zonefile = add_user_zonefile_url(zonefile, new_url)
                    if new_zonefile is None:
                        print('Duplicate URL')
                        continue

                    else:
                        zonefile = new_zonefile
                        break

        elif selection == 'b':
            # remove a URL
            url_to_remove = None
            while True:
                try:
                    url_to_remove = raw_input(
                        'Which URL do you want to remove? ({}-{}): '.format(
                            1, len(urls)))
                    try:
                        url_to_remove = int(url_to_remove)
                        assert 1 <= url_to_remove and url_to_remove <= len(
                            urls)
                    except:
                        print('Bad selection')
                        continue

                except KeyboardInterrupt:
                    running = False
                    print('Keyboard interrupt')
                    return None

                if url_to_remove is not None:
                    # remove this URL
                    url = urls[url_to_remove - 1]

                    log.debug("Remove '{}'".format(url))

                    new_zonefile = remove_user_zonefile_url(zonefile, url)
                    if new_zonefile is None:
                        print(
                            'BUG: failed to remove url "{}" from zonefile\n{}\n'
                            .format(
                                url,
                                json.dumps(zonefile, indent=4,
                                           sort_keys=True)))
                        os.abort()

                    else:
                        zonefile = new_zonefile
                        break

                else:
                    print("Bad selection")

        elif selection == 'c':
            while True:
                # swap order
                try:
                    url_1 = raw_input(
                        'Which URL do you want to move? ({}-{}): '.format(
                            1, len(urls)))
                    url_2 = raw_input(
                        'Where do you want to move it?  ({}-{}): '.format(
                            1, len(urls)))
                except KeyboardInterrupt:
                    running = False
                    print('Keyboard interrupt')
                    return None

                try:
                    url_1 = int(url_1)
                    url_2 = int(url_2)

                    assert 1 <= url_1 <= len(urls)
                    assert 1 <= url_2 <= len(urls)
                    assert url_1 != url_2

                except:
                    print("Bad selection")
                    continue

                new_zonefile = swap_user_zonefile_urls(zonefile, url_1 - 1,
                                                       url_2 - 1)
                if new_zonefile is None:
                    print('BUG: failed to remove url "{}" from zonefile\n{}\n'.
                          format(
                              url,
                              json.dumps(zonefile, indent=4, sort_keys=True)))
                    os.abort()

                else:
                    zonefile = new_zonefile
                    break

        elif selection == 'd':
            # add txt record
            while True:
                try:
                    txtrec_name = raw_input("New TXT record name: ")
                    txtrec_txt = raw_input("New TXT record data: ")
                except KeyboardInterrupt:
                    running = False
                    print("Keyboard interrupt")
                    return None

                if txtrec_name == 'pubkey':
                    print("Change the ECDSA key explicitly")
                    break

                new_zonefile = add_user_zonefile_txt(zonefile, txtrec_name,
                                                     txtrec_txt)
                if new_zonefile is None:
                    print("Duplicate TXT record")
                    break

                else:
                    zonefile = new_zonefile
                    break

        elif selection == 'e':
            # remove txt record
            while True:
                try:
                    txtrec_name = raw_input('Name of TXT record to remove: ')
                except KeyboardInterrupt:
                    running = False
                    print("Keyboard interrupt")
                    return None

                if txtrec_name == 'pubkey':
                    print("Change the ECDSA key explicitly")
                    break

                new_zonefile = remove_user_zonefile_txt(zonefile, txtrec_name)
                if new_zonefile is None:
                    print("No such TXT record")
                    break

                else:
                    zonefile = new_zonefile
                    break

        elif selection == 'f':
            # change public key
            while True:
                try:
                    pubkey = raw_input(
                        "New ECDSA public key (empty for None): ")

                    if len(pubkey) > 0:
                        pubkey = keylib.ECPublicKey(pubkey).to_hex()

                except KeyboardInterrupt:
                    running = False
                    print("Keyboard interrupt")
                    return None

                except:
                    print("Invalid public key")
                    continue

                new_zonefile = None

                if len(pubkey) == 0:
                    # delete public key
                    new_zonefile = user_zonefile_remove_data_pubkey(zonefile)

                else:
                    # set public key
                    new_zonefile = user_zonefile_set_data_pubkey(
                        zonefile, pubkey)

                zonefile = new_zonefile
                break

        elif selection == 'g':
            # save zonefile
            break

        elif selection == 'h':
            # do not save zonefile
            return None

        log.debug("zonefile is now:\n{}".format(
            json.dumps(zonefile, indent=4, sort_keys=True)))

    return zonefile
def fast_sync_import(working_dir,
                     import_url,
                     public_keys=config.FAST_SYNC_PUBLIC_KEYS,
                     num_required=len(config.FAST_SYNC_PUBLIC_KEYS)):
    """
    Fast sync import.
    Verify the given fast-sync file from @import_path using @public_key, and then 
    uncompress it into @working_dir.

    Verify that at least `num_required` public keys in `public_keys` signed.
    NOTE: `public_keys` needs to be in the same order as the private keys that signed.
    """

    # make sure we have the apppriate tools
    tools = ['tar', 'bzip2', 'mv']
    for tool in tools:
        rc = os.system("which {} > /dev/null".format(tool))
        if rc != 0:
            log.error("'{}' command not found".format(tool))
            return False

    if working_dir is None:
        working_dir = virtualchain.get_working_dir()

    if not os.path.exists(working_dir):
        log.error("No such directory {}".format(working_dir))
        return False

    # go get it
    import_path = fast_sync_fetch(import_url)
    if import_path is None:
        log.error("Failed to fetch {}".format(import_url))
        return False

    # format: <signed bz2 payload> <sigb64> <sigb64 length (8 bytes hex)> ... <num signatures>
    file_size = 0
    try:
        sb = os.stat(import_path)
        file_size = sb.st_size
    except Exception as e:
        log.exception(e)
        return False

    num_signatures = 0
    ptr = file_size
    signatures = []

    with open(import_path, 'r') as f:
        info = fast_sync_inspect(f)
        if 'error' in info:
            log.error("Failed to inspect snapshot {}: {}".format(
                import_path, info['error']))
            return False

        signatures = info['signatures']
        ptr = info['payload_size']

        # get the hash of the file
        hash_hex = blockstack_client.storage.get_file_hash(f,
                                                           hashlib.sha256,
                                                           fd_len=ptr)

        # validate signatures over the hash
        log.debug("Verify {} bytes".format(ptr))
        key_idx = 0
        num_match = 0
        for next_pubkey in public_keys:
            for sigb64 in signatures:
                valid = blockstack_client.keys.verify_digest(
                    hash_hex,
                    keylib.ECPublicKey(next_pubkey).to_hex(),
                    sigb64,
                    hashfunc=hashlib.sha256)
                if valid:
                    num_match += 1
                    if num_match >= num_required:
                        break

                    log.debug("Public key {} matches {} ({})".format(
                        next_pubkey, sigb64, hash_hex))
                    signatures.remove(sigb64)

                elif os.environ.get("BLOCKSTACK_TEST") == "1":
                    log.debug("Public key {} does NOT match {} ({})".format(
                        next_pubkey, sigb64, hash_hex))

        # enough signatures?
        if num_match < num_required:
            log.error(
                "Not enough signatures match (required {}, found {})".format(
                    num_required, num_match))
            return False

    # decompress
    import_path = os.path.abspath(import_path)
    cmd = "cd '{}' && tar xf '{}'".format(working_dir, import_path)
    log.debug(cmd)
    rc = os.system(cmd)
    if rc != 0:
        log.error("Failed to decompress. Exit code {}. Command: {}".format(
            rc, cmd))
        return False

    # restore from backup
    rc = blockstack_backup_restore(working_dir, None)
    if not rc:
        log.error("Failed to instantiate blockstack name database")
        return False

    # success!
    log.debug("Restored to {}".format(working_dir))
    return True
Exemple #6
0
try:
    privkey = sys.argv[1]
    recipient_addr = sys.argv[2]

    if len(sys.argv) > 3:
        amount = int(sys.argv[3])

except Exception as e:
    traceback.print_exc()
    print >> sys.stderr, "Usage: {} privkey recipient_addr [amount]".format(
        sys.argv[0])
    sys.exit(1)

pubkey = keylib.ECPrivateKey(privkey, compressed=False).public_key().to_hex()
payment_addr = keylib.ECPublicKey(pubkey).address()

utxos = blockstack_client.get_utxos(payment_addr)
if len(utxos) == 0:
    print >> sys.stderr, "No UTXOS for {} ({})".format(payment_addr, pubkey)
    sys.exit(1)


def mktx(satoshis, fee):

    outputs = None
    if satoshis is None:
        # send all
        satoshis = sum([u['value'] for u in utxos])

        print 'WARN: sending all of {} ({}) to {}'.format(
    def test_high_volume_derivation(self):
        number_of_keys = 10
        public_keychain = PublicKeychain.from_public_key(self.public_key_hex)
        private_keychain = PrivateKeychain.from_private_key(self.private_key_hex)
        keypairs = []
        print ""
        for i in range(number_of_keys):
            print "making key %i of %i" % (i+1, number_of_keys)
            public_key = public_keychain.child(i).public_key()
            private_key = private_keychain.child(i).private_key()
            keypairs.append({ 'public': public_key, 'private': private_key })

        for i in range(len(keypairs)):
            keypair = keypairs[i]
            print "checking key %i of %i" % (i+1, number_of_keys)
            # self.assertEqual(privkey_to_pubkey(keypair['private']), keypair['public'])
            self.assertEqual(keylib.ECPrivateKey(keypair['private']).public_key().to_hex(), keylib.ECPublicKey(keypair['public']).to_hex())