Esempio n. 1
0
def site_file(name: str, file: str)->Response:
    """Accept a site 'name', if pubkey then show multi-page site, if hash show single page site"""
    resp: str = 'Not Found'
    mime_type = mimetypes.MimeTypes().guess_type(file)[0]

    # If necessary convert the name to base32 from mnemonic
    if mnemonickeys.DELIMITER in name:
        name = mnemonickeys.get_base32(name)

    # Now make sure the key is regardless a valid base32 format ed25519 key (readding padding if necessary)
    if stringvalidators.validate_pub_key(name):
        name = unpaddedbase32.repad(name)
        resp = sitefiles.get_file(name, file)

    elif stringvalidators.validate_hash(name):
        try:
            resp = onionrblockapi.Block(name).bcontent
        except onionrexceptions.NoDataAvailable:
            abort(404)
        except TypeError:
            pass
        try:
            resp = base64.b64decode(resp)
        except binascii.Error:
            pass
    if resp == 'Not Found' or not resp:
        abort(404)
    return Response(resp, mimetype=mime_type)
Esempio n. 2
0
    def get_block_data(self, bHash, decrypt=False, raw=False, headerOnly=False):
        if not stringvalidators.validate_hash(bHash):
            raise onionrexceptions.InvalidHexHash(
                "block hash not valid hash format")
        bl = onionrblockapi.Block(bHash)
        if decrypt:
            bl.decrypt()
            if bl.isEncrypted and not bl.decrypted:
                raise ValueError

        if not raw:
            if not headerOnly:
                retData = {'meta':bl.bheader, 'metadata': bl.bmetadata, 'content': bl.bcontent}
                for x in list(retData.keys()):
                    try:
                        retData[x] = retData[x].decode()
                    except AttributeError:
                        pass
            else:
                validSig = False
                signer = bytesconverter.bytes_to_str(bl.signer)
                if bl.isSigned() and stringvalidators.validate_pub_key(signer) and bl.isSigner(signer):
                    validSig = True
                bl.bheader['validSig'] = validSig
                bl.bheader['meta'] = ''
                retData = {'meta': bl.bheader, 'metadata': bl.bmetadata}
            return json.dumps(retData)
        else:
            return bl.raw
Esempio n. 3
0
def getBlockBodyData(name):
    resp = ''
    if stringvalidators.validate_hash(name):
        try:
            resp = onionrblockapi.Block(name, decrypt=True).bcontent
        except TypeError:
            pass
    else:
        abort(404)
    return Response(resp)
Esempio n. 4
0
def process_block_metadata(blockHash: str):
    '''
        Read metadata from a block and cache it to the block database

        blockHash -> sha3_256 hex formatted hash of Onionr block
    '''
    curTime = epoch.get_rounded_epoch(roundS=60)
    myBlock = onionrblockapi.Block(blockHash)
    if myBlock.isEncrypted:
        myBlock.decrypt()
    if (myBlock.isEncrypted
            and myBlock.decrypted) or (not myBlock.isEncrypted):
        blockType = myBlock.getMetadata(
            'type'
        )  # we would use myBlock.getType() here, but it is bugged with encrypted blocks

        signer = bytesconverter.bytes_to_str(myBlock.signer)
        valid = myBlock.verifySig()
        if valid:
            if myBlock.getMetadata('newFSKey') is not None:
                try:
                    onionrusers.OnionrUser(signer).addForwardKey(
                        myBlock.getMetadata('newFSKey'))
                except onionrexceptions.InvalidPubkey:
                    logger.warn(
                        '%s has invalid forward secrecy key to add: %s' %
                        (signer, myBlock.getMetadata('newFSKey')))

        try:
            if len(blockType) <= onionrvalues.MAX_BLOCK_TYPE_LENGTH:
                blockmetadb.update_block_info(blockHash, 'dataType', blockType)
        except TypeError:
            logger.warn("Missing block information")
            pass
        # Set block expire time if specified
        try:
            expireTime = int(myBlock.getHeader('expire'))
            # test that expire time is an integer of sane length (for epoch)
            # doesn't matter if its too large because of the min() func below
            if not len(str(expireTime)) < 20:
                raise ValueError('timestamp invalid')
        except (ValueError, TypeError) as e:
            expireTime = onionrvalues.DEFAULT_EXPIRE + curTime
        finally:
            expireTime = min(expireTime, curTime + onionrvalues.DEFAULT_EXPIRE)
            blockmetadb.update_block_info(blockHash, 'expire', expireTime)

        if blockType == 'update': updater.update_event(myBlock)
        onionrevents.event('processblocks',
                           data={
                               'block': myBlock,
                               'type': blockType,
                               'signer': signer,
                               'validSig': valid
                           })
Esempio n. 5
0
def find_site_gzip(user_id: str) -> tarfile.TarFile:
    """Return verified site tar object"""
    sites = blockmetadb.get_blocks_by_type('osite')
    user_site = None
    user_id = unpaddedbase32.repad(user_id)
    for site in sites:
        block = onionrblockapi.Block(site)
        if block.isSigner(user_id):
            user_site = block
    if not user_site is None:
        return tarfile.open(fileobj=io.BytesIO(user_site.bcontent), mode='r')
    return None
Esempio n. 6
0
def block_mixer(upload_list: List[onionrtypes.BlockHash],
                block_to_mix: onionrtypes.BlockHash):
    """Delay and mix block inserts.

    Take a block list and a received/created block and add it
    to the said block list
    """
    bl = onionrblockapi.Block(block_to_mix)

    if time.time() - bl.claimedTime > onionrvalues.BLOCK_POOL_MAX_AGE:
        raise ValueError
    if block_to_mix:
        upload_list.append(block_to_mix)
Esempio n. 7
0
def load_inbox():
    inbox_list = []
    deleted = simplekv.DeadSimpleKV(identifyhome.identify_home() +
                                    '/mailcache.dat').get('deleted_mail')
    if deleted is None:
        deleted = []

    for blockHash in blockmetadb.get_blocks_by_type('pm'):
        block = onionrblockapi.Block(blockHash)
        block.decrypt()
        if block.decrypted and reconstructhash.deconstruct_hash(
                blockHash) not in deleted:
            inbox_list.append(blockHash)
    return inbox_list
Esempio n. 8
0
def test_lan_server(testmanager):
    if onionrvalues.IS_QUBES:
        logger.warn("Cannot test LAN on QubesOS", terminal=True)
        return
    start_time = get_epoch()
    for i in range(1337, 1340):
        try:
            if not best_ip or not best_ip.startswith(('192.168')):
                logger.warn(
                    "lanservertest not running, not in standard 192.168 lan " +
                    "run this test on a lan before release",
                    terminal=True)
                return
            if requests.get(f"http://{best_ip}:{i}/ping").text == 'onionr!':
                bl = insert('test data')
                sleep(10)
                bl2 = insert('test data2')
                sleep(30)
                bl3 = insert('test data3')
                l = requests.get(f"http://{best_ip}:{i}/blist/0").text.split(
                    '\n')
                if bl not in l or bl2 not in l or bl3 not in l:
                    logger.error('blocks not in blist ' + '-'.join(l))
                    raise ValueError
                time = blockmetadb.get_block_date(bl3) - 1
                l = requests.get(
                    f"http://{best_ip}:{i}/blist/{time}").text.split('\n')

                if (bl in l and bl2 in l and bl3 in l) or len(l) == 0:
                    logger.error('Failed to get appopriate time' + '-'.join(l))
                    raise ValueError
                if onionrblockapi.Block(bl).raw != requests.get(
                        f"http://{best_ip}:{i}/get/{bl}",
                        stream=True).raw.read(6000000):
                    logger.error('Block doesn\'t match')
                    raise ValueError

                break

        except requests.exceptions.ConnectionError:
            pass
    else:
        raise ValueError
Esempio n. 9
0
def service_creator(daemon):
    assert isinstance(daemon, communicator.OnionrCommunicatorDaemon)
    
    # Find socket connection blocks
    # TODO cache blocks and only look at recently received ones
    con_blocks = blockmetadb.get_blocks_by_type('con')
    for b in con_blocks:
        if not b in daemon.active_services:
            bl = onionrblockapi.Block(b, decrypt=True)
            bs = bytesconverter.bytes_to_str(bl.bcontent) + '.onion'
            if server_exists(bl.signer):
                continue
            if stringvalidators.validate_pub_key(bl.signer) and stringvalidators.validate_transport(bs):
                signer = bytesconverter.bytes_to_str(bl.signer)
                daemon.active_services.append(b)
                daemon.active_services.append(signer)
                if not daemon.services.create_server(signer, bs, daemon):
                    daemon.active_services.remove(b)
                    daemon.active_services.remove(signer)
    daemon.decrementThreadCount('service_creator')
Esempio n. 10
0
def upload_blocks_from_communicator(comm_inst: 'OnionrCommunicatorDaemon'):
    """Accept a communicator instance + upload blocks from its upload queue."""
    """when inserting a block, we try to upload
     it to a few peers to add some deniability & increase functionality"""
    kv: "DeadSimpleKV" = comm_inst.shared_state.get_by_string("DeadSimpleKV")
    TIMER_NAME = "upload_blocks_from_communicator"

    session_manager: sessionmanager.BlockUploadSessionManager
    session_manager = comm_inst.shared_state.get(
        sessionmanager.BlockUploadSessionManager)
    tried_peers: UserID = []
    finishedUploads = []
    kv.put('blocksToUpload',
           onionrcrypto.cryptoutils.random_shuffle(kv.get('blocksToUpload')))

    def remove_from_hidden(bl):
        sleep(60)
        try:
            comm_inst.shared_state.get_by_string(
                'PublicAPI').hideBlocks.remove(bl)
        except ValueError:
            pass

    if len(kv.get('blocksToUpload')) != 0:
        for bl in kv.get('blocksToUpload'):
            if not stringvalidators.validate_hash(bl):
                logger.warn('Requested to upload invalid block', terminal=True)
                comm_inst.decrementThreadCount(TIMER_NAME)
                return
            session = session_manager.add_session(bl)
            for _ in range(min(len(kv.get('onlinePeers')), 6)):
                try:
                    peer = onlinepeers.pick_online_peer(comm_inst)
                except onionrexceptions.OnlinePeerNeeded:
                    continue
                try:
                    session.peer_exists[peer]
                    continue
                except KeyError:
                    pass
                try:
                    if session.peer_fails[peer] > 3:
                        continue
                except KeyError:
                    pass
                if peer in tried_peers:
                    continue
                tried_peers.append(peer)
                url = f'http://{peer}/upload'
                try:
                    data = block.Block(bl).getRaw()
                except onionrexceptions.NoDataAvailable:
                    finishedUploads.append(bl)
                    break
                proxy_type = proxypicker.pick_proxy(peer)
                logger.info(f"Uploading block {bl[:8]} to {peer}",
                            terminal=True)
                resp = basicrequests.do_post_request(
                    url,
                    data=data,
                    proxyType=proxy_type,
                    content_type='application/octet-stream')
                if resp is not False:
                    if resp == 'success':
                        Thread(target=remove_from_hidden,
                               args=[bl],
                               daemon=True).start()
                        session.success()
                        session.peer_exists[peer] = True
                    elif resp == 'exists':
                        session.success()
                        session.peer_exists[peer] = True
                    else:
                        session.fail()
                        session.fail_peer(peer)
                        comm_inst.getPeerProfileInstance(peer).addScore(-5)
                        logger.warn(
                            f'Failed to upload {bl[:8]}, reason: {resp}',
                            terminal=True)
                else:
                    session.fail()
        session_manager.clean_session()
    for x in finishedUploads:
        try:
            kv.get('blocksToUpload').remove(x)

            comm_inst.shared_state.get_by_string(
                'PublicAPI').hideBlocks.remove(x)

        except ValueError:
            pass
    comm_inst.decrementThreadCount(TIMER_NAME)
Esempio n. 11
0
def upload_blocks_from_communicator(comm_inst: OnionrCommunicatorDaemon):
    """Accepts a communicator instance and uploads blocks from its upload queue"""
    """when inserting a block, we try to upload
     it to a few peers to add some deniability & increase functionality"""
    TIMER_NAME = "upload_blocks_from_communicator"

    session_manager: sessionmanager.BlockUploadSessionManager = comm_inst.shared_state.get(
        sessionmanager.BlockUploadSessionManager)
    triedPeers = []
    finishedUploads = []
    comm_inst.blocksToUpload = onionrcrypto.cryptoutils.random_shuffle(
        comm_inst.blocksToUpload)
    if len(comm_inst.blocksToUpload) != 0:
        for bl in comm_inst.blocksToUpload:
            if not stringvalidators.validate_hash(bl):
                logger.warn('Requested to upload invalid block', terminal=True)
                comm_inst.decrementThreadCount(TIMER_NAME)
                return
            session = session_manager.add_session(bl)
            for i in range(min(len(comm_inst.onlinePeers), 6)):
                peer = onlinepeers.pick_online_peer(comm_inst)
                try:
                    session.peer_exists[peer]
                    continue
                except KeyError:
                    pass
                try:
                    if session.peer_fails[peer] > 3: continue
                except KeyError:
                    pass
                if peer in triedPeers: continue
                triedPeers.append(peer)
                url = f'http://{peer}/upload'
                try:
                    data = block.Block(bl).getRaw()
                except onionrexceptions.NoDataAvailable:
                    finishedUploads.append(bl)
                    break
                proxyType = proxypicker.pick_proxy(peer)
                logger.info(f"Uploading block {bl[:8]} to {peer}",
                            terminal=True)
                resp = basicrequests.do_post_request(
                    url,
                    data=data,
                    proxyType=proxyType,
                    content_type='application/octet-stream')
                if not resp == False:
                    if resp == 'success':
                        session.success()
                        session.peer_exists[peer] = True
                    elif resp == 'exists':
                        session.success()
                        session.peer_exists[peer] = True
                    else:
                        session.fail()
                        session.fail_peer(peer)
                        comm_inst.getPeerProfileInstance(peer).addScore(-5)
                        logger.warn(
                            f'Failed to upload {bl[:8]}, reason: {resp[:15]}',
                            terminal=True)
                else:
                    session.fail()
        session_manager.clean_session()
    for x in finishedUploads:
        try:
            comm_inst.blocksToUpload.remove(x)
        except ValueError:
            pass
    comm_inst.decrementThreadCount(TIMER_NAME)
Esempio n. 12
0
 def test_encrypted_insert(self):
     setup_test()
     message = 'hello world2'
     bl = onionrblocks.insert(message, asymPeer=onionrcrypto.pub_key)
     self.assertIn(bytesconverter.str_to_bytes(message),
                   onionrblockapi.Block(bl, decrypt=True).bcontent)
Esempio n. 13
0
 def test_time_insert_10(self):
     bl = time_insert('test', delay=10)
     self.assertTrue(bl)
     bl = onionrblockapi.Block(bl)
     self.assertIs(bl.bmetadata['dly'], 10)
Esempio n. 14
0
 def test_time_insert_none(self):
     bl = time_insert('test')
     self.assertTrue(bl)
     bl = onionrblockapi.Block(bl)
     self.assertIs(bl.bmetadata['dly'], 0)
Esempio n. 15
0
def upload_blocks_from_communicator(shared_state: 'OnionrCommunicatorDaemon'):
    """Accept a communicator instance + upload blocks from its upload queue."""
    """when inserting a block, we try to upload
     it to a few peers to add some deniability & increase functionality"""
    kv: "DeadSimpleKV" = shared_state.get_by_string("DeadSimpleKV")

    session_manager: sessionmanager.BlockUploadSessionManager
    session_manager = shared_state.get(
        sessionmanager.BlockUploadSessionManager)
    tried_peers: UserID = []
    finishedUploads = []

    SystemRandom().shuffle(kv.get('blocksToUpload'))

    def remove_from_hidden(bl):
        sleep(60)
        try:
            shared_state.get_by_string(
                'PublicAPI').hideBlocks.remove(bl)
        except ValueError:
            pass

    if len(kv.get('blocksToUpload')) != 0:
        for bl in kv.get('blocksToUpload'):
            if not stringvalidators.validate_hash(bl):
                logger.warn('Requested to upload invalid block', terminal=True)
                return
            session = session_manager.add_session(bl)
            for _ in range(min(len(kv.get('onlinePeers')), 6)):
                try:
                    peer = onlinepeers.pick_online_peer(kv)
                    if not block.Block(bl).isEncrypted:
                        if peer in kv.get('plaintextDisabledPeers'):
                            logger.info(f"Cannot upload plaintext block to peer that denies it {peer}")  # noqa
                            continue
                except onionrexceptions.OnlinePeerNeeded:
                    continue
                try:
                    session.peer_exists[peer]
                    continue
                except KeyError:
                    pass
                try:
                    if session.peer_fails[peer] > 3:
                        continue
                except KeyError:
                    pass
                if peer in tried_peers:
                    continue
                tried_peers.append(peer)
                url = f'http://{peer}/upload'
                try:
                    data = block.Block(bl).getRaw()
                    if not data:
                        logger.warn(
                            f"Couldn't get data for block in upload list {bl}",
                            terminal=True)
                        raise onionrexceptions.NoDataAvailable
                    try:
                        def __check_metadata():
                            metadata = get_block_metadata_from_data(data)[0]
                            if not validate_metadata(metadata, data):
                                logger.warn(
                                    f"Metadata for uploading block not valid {bl}")
                                raise onionrexceptions.InvalidMetadata
                        __check_metadata()
                    except onionrexceptions.DataExists:
                        pass
                except(  # noqa
                        onionrexceptions.NoDataAvailable,
                        onionrexceptions.InvalidMetadata) as _:
                    finishedUploads.append(bl)
                    break
                proxy_type = proxypicker.pick_proxy(peer)
                logger.info(
                    f"Uploading block {bl[:8]} to {peer}", terminal=True)
                resp = basicrequests.do_post_request(
                    url, data=data, proxyType=proxy_type,
                    content_type='application/octet-stream')
                if resp is not False:
                    if resp == 'success':
                        Thread(target=remove_from_hidden,
                               args=[bl], daemon=True).start()
                        session.success()
                        session.peer_exists[peer] = True
                    elif resp == 'exists':
                        session.success()
                        session.peer_exists[peer] = True
                    else:
                        session.fail()
                        session.fail_peer(peer)
                        shared_state.get_by_string(
                            'OnionrCommunicatorDaemon').getPeerProfileInstance(
                                peer).addScore(-5)
                        logger.warn(
                           f'Failed to upload {bl[:8]}, reason: {resp}',
                           terminal=True)
                else:
                    session.fail()
        session_manager.clean_session()
    for x in finishedUploads:
        try:
            kv.get('blocksToUpload').remove(x)

            shared_state.get_by_string(
                'PublicAPI').hideBlocks.remove(x)

        except ValueError:
            pass