Ejemplo n.º 1
0
 def _on_extract_done(self, retcode, backupID, source_filename,
                      output_location, backup_index):
     tmpfile.throw_out(source_filename, 'file extracted')
     for snapshot_filename in os.listdir(output_location):
         snapshot_path = os.path.join(output_location, snapshot_filename)
         snapshot_data = serialization.BytesToDict(
             local_fs.ReadBinaryFile(snapshot_path), values_to_text=True)
         for archive_message in snapshot_data.get('items', []):
             if self.start_sequence_id is not None:
                 if self.start_sequence_id > archive_message['sequence_id']:
                     continue
             if self.end_sequence_id is not None:
                 if self.end_sequence_id < archive_message['sequence_id']:
                     continue
             self.extracted_messages.append(archive_message)
     if _Debug:
         lg.dbg(
             _DebugLevel,
             'archive snapshot %r extracted successfully to %r, extracted %d archive messages so far'
             % (
                 source_filename,
                 output_location,
                 len(self.extracted_messages),
             ))
     self._do_restore_next_backup(backup_index + 1)
     return retcode
Ejemplo n.º 2
0
def default_nodes():
    """
    List of DHT nodes currently maintained : (host, UDP port number)
    """
    from system import bpio
    from system import local_fs
    from lib import serialization
    from main import settings
    from logs import lg
    networks_json = serialization.BytesToDict(
        local_fs.ReadBinaryFile(
            os.path.join(bpio.getExecutableDir(), 'networks.json')))
    my_network = local_fs.ReadTextFile(settings.NetworkFileName()).strip()
    if not my_network:
        my_network = 'main'
    if my_network not in networks_json:
        my_network = 'main'
    network_info = networks_json[my_network]
    dht_seeds = []
    for dht_seed in network_info['dht-seeds']:
        dht_seeds.append((
            dht_seed['host'],
            dht_seed['udp_port'],
        ))
    lg.info('Active network is [%s]   dht_seeds=%s' % (
        my_network,
        dht_seeds,
    ))
    return dht_seeds
Ejemplo n.º 3
0
def eraseLocalIdentity(do_backup=True):
    if do_backup:
        if os.path.isfile(settings.LocalIdentityFilename()):
            current_identity_xmlsrc = local_fs.ReadBinaryFile(settings.LocalIdentityFilename())
            if current_identity_xmlsrc:
                fd, fname = tempfile.mkstemp(prefix='localidentity_', dir=settings.MetaDataDir())
                os.write(fd, current_identity_xmlsrc)
                os.close(fd)
                lg.info('created backup copy of my local identity in the file : %r' % fname)
    filename = bpio.portablePath(settings.LocalIdentityFilename())
    if not os.path.exists(filename):
        if _Debug:
            lg.out(_DebugLevel, "my_id.eraseLocalIdentity SKIP file %s not exist" % filename)
        return True
    if not os.path.isfile(filename):
        if _Debug:
            lg.out(_DebugLevel, "my_id.eraseLocalIdentity ERROR path %s is not a file" % filename)
        return False
    try:
        os.remove(filename)
    except:
        lg.exc()
        return False
    events.send('local-identity-erased', data=dict())
    if _Debug:
        lg.out(_DebugLevel, "my_id.eraseLocalIdentity file %s was deleted" % filename)
    return True
Ejemplo n.º 4
0
def get_latest_ident(pub_key):
    global _KnownUsers
    from userid import identity
    user_path = _KnownUsers.get(pub_key)
    if not user_path:
        return None
    user_identity_files = sorted(map(int, os.listdir(user_path)))
    if len(user_identity_files) == 0:
        lg.warn('identity history is broken, public key is known, but no identity files found')
    latest_revision = -1
    latest_ident = None
    known_revisions = set()
    for_cleanup = []
    for id_file in user_identity_files:
        identity_file_path = os.path.join(user_path, strng.to_text(id_file))
        xmlsrc = local_fs.ReadBinaryFile(identity_file_path)
        one_id_obj = identity.identity(xmlsrc=xmlsrc)
        if not one_id_obj.isCorrect():
            lg.warn('identity history is broken, identity in the file %r is not correct' % identity_file_path)
            for_cleanup.append(identity_file_path)
            continue
        if not one_id_obj.Valid():
            lg.warn('identity history is broken, identity in the file %r is not valid' % identity_file_path)
            for_cleanup.append(identity_file_path)
            continue
        if pub_key != one_id_obj.getPublicKey():
            lg.err('identity history is broken, public key not matching in the file %r' % identity_file_path)
            for_cleanup.append(identity_file_path)
            continue
        known_revisions.add(one_id_obj.getRevisionValue())
        if one_id_obj.getRevisionValue() > latest_revision:
            latest_revision = one_id_obj.getRevisionValue()
            latest_ident = one_id_obj
    return latest_ident
Ejemplo n.º 5
0
def default_nodes():
    """
    A set of identity servers currently maintained, see file networks.json in the root folder.
    """
    from system import bpio
    from system import local_fs
    from lib import serialization
    from lib import strng
    from main import settings
    # from logs import lg
    networks_json = serialization.BytesToDict(
        local_fs.ReadBinaryFile(
            os.path.join(bpio.getExecutableDir(), 'networks.json')),
        keys_to_text=True,
        values_to_text=True,
    )
    my_network = local_fs.ReadTextFile(settings.NetworkFileName()).strip()
    if not my_network:
        my_network = 'main'
    if my_network not in networks_json:
        my_network = 'main'
    network_info = networks_json[my_network]
    identity_servers = {}
    for identity_server in network_info['identity-servers']:
        identity_servers[strng.to_bin(identity_server['host'])] = (
            identity_server['http_port'],
            identity_server['tcp_port'],
        )
    # lg.info('Active network is [%s]   identity_servers=%s' % (my_network, identity_servers, ))
    return identity_servers
Ejemplo n.º 6
0
def ForgetMyKey(keyfilename=None, erase_file=False, do_backup=False):
    """
    Remove Private Key from memory.
    """
    global _MyKeyObject
    if _MyKeyObject:
        _MyKeyObject.forget()
    _MyKeyObject = None
    if erase_file:
        if keyfilename is None:
            keyfilename = settings.KeyFileName()
        if do_backup:
            if os.path.isfile(keyfilename):
                current_pk_src = local_fs.ReadBinaryFile(keyfilename)
                if current_pk_src:
                    fd, fname = tempfile.mkstemp(prefix='mykeyfile_',
                                                 dir=settings.MetaDataDir())
                    os.write(fd, current_pk_src)
                    os.close(fd)
                    lg.info(
                        'created backup copy of my private key in the file : %r'
                        % fname)
        if os.path.isfile(keyfilename):
            os.remove(keyfilename)
            lg.info('local private key erased, deleted file : %r' %
                    keyfilename)
Ejemplo n.º 7
0
def read_network_config_file():
    networks_json_path = find_network_config_file()
    network_info = serialization.BytesToDict(
        local_fs.ReadBinaryFile(networks_json_path),
        keys_to_text=True,
        values_to_text=True,
    )
    return network_info
Ejemplo n.º 8
0
def serve_https(port):
    global _APIListener
    from crypt import certificate

    # server private key
    if os.path.exists(settings.APIServerCertificateKeyFile()):
        server_key_pem = local_fs.ReadBinaryFile(settings.APIServerCertificateKeyFile())
        server_key = certificate.load_private_key(server_key_pem)
    else:
        server_key, server_key_pem = certificate.generate_private_key()
        local_fs.WriteBinaryFile(settings.APIServerCertificateKeyFile(), server_key_pem)
    # server certificate
    if os.path.exists(settings.APIServerCertificateFile()):
        server_cert_pem = local_fs.ReadBinaryFile(settings.APIServerCertificateFile())
    else:
        server_cert_pem = certificate.generate_self_signed_cert(
            hostname=u'localhost',
            ip_addresses=[u'127.0.0.1', ],
            server_key=server_key,
        )
        local_fs.WriteBinaryFile(settings.APIServerCertificateFile(), server_cert_pem)
    # client private key
    if os.path.exists(settings.APIClientCertificateKeyFile()):
        client_key_pem = local_fs.ReadBinaryFile(settings.APIClientCertificateKeyFile())
        client_key = certificate.load_private_key(client_key_pem)
    else:
        client_key, client_key_pem = certificate.generate_private_key()
        local_fs.WriteBinaryFile(settings.APIClientCertificateKeyFile(), client_key_pem)
    # client certificate
    if os.path.exists(settings.APIClientCertificateFile()):
        client_cert_pem = local_fs.ReadBinaryFile(settings.APIClientCertificateFile())
        ca_cert_pem = local_fs.ReadBinaryFile(settings.APIServerCertificateFile())
    else:
        ca_cert_pem = local_fs.ReadBinaryFile(settings.APIServerCertificateFile())
        ca_cert = certificate.load_certificate(ca_cert_pem)
        client_cert_pem = certificate.generate_csr_client_cert(
            hostname=u'localhost',
            server_ca_cert=ca_cert,
            server_key=server_key,
            client_key=client_key,
        )
        local_fs.WriteBinaryFile(settings.APIClientCertificateFile(), client_cert_pem)

    try:
        from twisted.internet import ssl
        api_resource = BitDustRESTHTTPServer()
        site = BitDustAPISite(api_resource, timeout=None)
        auth = ssl.Certificate.loadPEM(server_cert_pem)
        cert = ssl.PrivateCertificate.loadPEM(server_cert_pem + server_key_pem)
        _APIListener = reactor.listenSSL(port, site, cert.options(auth), interface='127.0.0.1')  # @UndefinedVariable
    except:
        lg.exc()
        os._exit(1)
Ejemplo n.º 9
0
def identity_cached(new_id_obj):
    """
    After receiving identity file of another user we need to check his identity sources.
    I can be file from identity server or Identity() packet received directly from remote peer.
    Also it can be my own identity that was changed locally.
    In any case we need to take certain actions if those identity sources changed.
    First identity source forms IDURL of that identity and act as unique global ID of that BitDust node.
    When first identity source changed (because identity server went down) identity is "rotated":
    second identity source will be placed on the first position and IDURL will change.
    In that case we need to remember new IDURL and keep track of old IDURL of that user - this way we can
    match and merge different IDURL's for one owner.
    """
    global _IdentityHistoryDir
    global _KnownUsers
    global _KnownIDURLs
    global _MergedIDURLs
    from userid import identity
    pub_key = new_id_obj.getPublicKey()
    user_name = new_id_obj.getIDName()
    if _Debug:
        lg.args(_DebugLevel, user_name=user_name)
    is_identity_rotated = False
    latest_id_obj = None
    latest_sources = []
    if pub_key not in _KnownUsers:
        user_path = tempfile.mkdtemp(prefix=user_name+'@', dir=_IdentityHistoryDir)
        _KnownUsers[pub_key] = user_path
        first_identity_file_path = os.path.join(user_path, '0')
        local_fs.WriteBinaryFile(first_identity_file_path, new_id_obj.serialize())
        if _Debug:
            lg.out(_DebugLevel, 'id_url.identity_cached wrote first item for user %r in identity history: %r' % (
                user_name, first_identity_file_path))
    else:
        user_path = _KnownUsers[pub_key]
        user_identity_files = sorted(map(int, os.listdir(user_path)))
        if len(user_identity_files) == 0:
            raise Exception('identity history for user %r is broken, public key is known, but no identity files found' % user_name)
        latest_identity_file_path = ''
        latest_pub_key = None
        latest_revision = -1
        known_revisions = set()
        for id_file in user_identity_files:
            identity_file_path = os.path.join(user_path, strng.to_text(id_file))
            xmlsrc = local_fs.ReadBinaryFile(identity_file_path)
            one_id_obj = identity.identity(xmlsrc=xmlsrc)
            if not one_id_obj.isCorrect():
                lg.err('identity history for user %r is broken, identity in the file %r is not correct' % (user_name, identity_file_path))
                continue
            if not one_id_obj.Valid():
                lg.err('identity history for user %r is broken, identity in the file %r is not valid' % (user_name, identity_file_path))
                continue
            if not latest_pub_key:
                latest_pub_key = one_id_obj.getPublicKey()
            if latest_pub_key != one_id_obj.getPublicKey():
                lg.err('identity history for user %r is broken, public key not matching in the file %r' % (user_name, identity_file_path))
                continue
            known_revisions.add(one_id_obj.getRevisionValue())
            if one_id_obj.getRevisionValue() > latest_revision:
                latest_revision = one_id_obj.getRevisionValue()
                latest_identity_file_path = identity_file_path
        xmlsrc = local_fs.ReadBinaryFile(latest_identity_file_path)
        latest_id_obj = identity.identity(xmlsrc=xmlsrc)
        if latest_id_obj.getPublicKey() != new_id_obj.getPublicKey():
            raise Exception('identity history for user %r is broken, public key not matching' % user_name)
        if latest_id_obj.getIDName() != new_id_obj.getIDName():
            lg.warn('found another user name in identity history for user %r : %r' % (user_name, latest_id_obj.getIDName()))
        if new_id_obj.getRevisionValue() in known_revisions:
            if _Debug:
                lg.out(_DebugLevel, 'id_url.identity_cached revision %d already known for user %r' % (new_id_obj.getRevisionValue(), user_name))
        else:
            latest_sources = latest_id_obj.getSources(as_originals=True)
            new_sources = new_id_obj.getSources(as_originals=True)
            if latest_sources == new_sources:
                local_fs.WriteBinaryFile(latest_identity_file_path, new_id_obj.serialize())
                if _Debug:
                    lg.out(_DebugLevel, 'id_url.identity_cached latest identity sources for user %r did not changed, updated file %r' % (
                        user_name, latest_identity_file_path))
            else:
                next_identity_file = user_identity_files[-1] + 1
                next_identity_file_path = os.path.join(user_path, strng.to_text(next_identity_file))
                local_fs.WriteBinaryFile(next_identity_file_path, new_id_obj.serialize())
                is_identity_rotated = True
                if _Debug:
                    lg.out(_DebugLevel, 'id_url.identity_cached identity sources for user %r changed, wrote new item in the history: %r' % (
                        user_name, next_identity_file_path))
    new_revision = new_id_obj.getRevisionValue()
    new_sources = new_id_obj.getSources(as_originals=True)
    for new_idurl in reversed(new_sources):
        if new_idurl not in _KnownIDURLs:
            _KnownIDURLs[new_idurl] = new_id_obj.getPublicKey()
            if _Debug:
                lg.out(_DebugLevel, 'id_url.identity_cached new IDURL added: %r' % new_idurl)
        else:
            if _KnownIDURLs[new_idurl] != new_id_obj.getPublicKey():
                lg.warn('another user had same identity source: %r' % new_idurl)
                _KnownIDURLs[new_idurl] = new_id_obj.getPublicKey()
        if pub_key not in _MergedIDURLs:
            _MergedIDURLs[pub_key] = {}
            if _Debug:
                lg.out(_DebugLevel, 'id_url.identity_cached new Public Key added: %s...' % pub_key[-10:])
        prev_idurl = _MergedIDURLs[pub_key].get(new_revision, None)
        if new_revision in _MergedIDURLs[pub_key]:
            if _MergedIDURLs[pub_key][new_revision] != new_idurl:
                if nameurl.GetName(_MergedIDURLs[pub_key][new_revision]) == nameurl.GetName(new_idurl):
                    if _MergedIDURLs[pub_key][new_revision] not in new_sources:
                        lg.warn('rewriting existing identity revision %d : %r -> %r' % (
                        new_revision, _MergedIDURLs[pub_key][new_revision], new_idurl))
            _MergedIDURLs[pub_key][new_revision] = new_idurl
        else:
            _MergedIDURLs[pub_key][new_revision] = new_idurl
            if _Debug:
                lg.out(_DebugLevel, 'id_url.identity_cached added new revision %d for user %r, total revisions %d: %r -> %r' % (
                    new_revision, user_name, len(_MergedIDURLs[pub_key]), prev_idurl, new_idurl))
    if _Debug:
        lg.args(_DebugLevel, is_identity_rotated=is_identity_rotated, latest_id_obj=bool(latest_id_obj))
    if is_identity_rotated and latest_id_obj is not None:
        latest_revision = latest_id_obj.getRevisionValue()
        if _Debug:
            lg.args(_DebugLevel, new_revision=new_revision, latest_revision=latest_revision)
        if new_revision > latest_revision:
            lg.info('found rotated identity after caching %r -> %r' % (
                latest_id_obj.getSources(as_originals=True)[0], new_sources[0]))
            from main import events
            events.send('identity-rotated', data=dict(
                old_idurls=latest_id_obj.getSources(as_originals=True),
                new_idurls=new_id_obj.getSources(as_originals=True),
                old_revision=latest_id_obj.getRevisionValue(),
                new_revision=new_revision,
            ))
            if latest_id_obj.getIDURL(as_original=True) != new_id_obj.getIDURL(as_original=True):
                events.send('identity-url-changed', data=dict(
                    old_idurl=latest_id_obj.getIDURL(as_original=True),
                    new_idurl=new_id_obj.getIDURL(as_original=True),
                    old_revision=latest_id_obj.getRevisionValue(),
                    new_revision=new_revision,
                ))
            from userid import my_id
            if my_id.isLocalIdentityReady():
                if my_id.getLocalID() == new_id_obj.getIDURL():
                    events.send('my-identity-rotated', data=dict(
                        old_idurls=latest_id_obj.getSources(as_originals=True),
                        new_idurls=new_id_obj.getSources(as_originals=True),
                        old_revision=latest_id_obj.getRevisionValue(),
                        new_revision=new_revision,
                    ))
                    if latest_id_obj.getIDURL(as_original=True) != new_id_obj.getIDURL(as_original=True):
                        events.send('my-identity-url-changed', data=dict(
                            old_idurl=latest_id_obj.getIDURL(as_original=True),
                            new_idurl=new_id_obj.getIDURL(as_original=True),
                            old_revision=latest_id_obj.getRevisionValue(),
                            new_revision=new_revision,
                        ))
        else:
            lg.warn('cached out-dated revision %d for %r' % (new_revision, new_sources[0]))
    else:
        if _Debug:
            lg.out(_DebugLevel, 'id_url.identity_cached revision %d for %r' % (new_revision, new_sources[0]))
    return True
Ejemplo n.º 10
0
def ReadBinaryFile(filename, decode_encoding=None):
    return local_fs.ReadBinaryFile(filename=filename,
                                   decode_encoding=decode_encoding)