Пример #1
0
 def _do_open_known_shares(self):
     from crypt import my_keys
     from access import shared_access_coordinator
     from storage import backup_fs
     known_offline_shares = []
     for key_id in my_keys.known_keys():
         if not key_id.startswith('share_'):
             continue
         active_share = shared_access_coordinator.get_active_share(key_id)
         if active_share:
             continue
         known_offline_shares.append(key_id)
     to_be_opened = []
     for pathID, localPath, itemInfo in backup_fs.IterateIDs():
         if not itemInfo.key_id:
             continue
         if itemInfo.key_id in to_be_opened:
             continue
         if itemInfo.key_id not in known_offline_shares:
             continue
         to_be_opened.append(itemInfo.key_id)
     for key_id in to_be_opened:
         active_share = shared_access_coordinator.SharedAccessCoordinator(
             key_id,
             log_events=True,
             publish_events=False,
         )
         active_share.automat('restart')
Пример #2
0
 def doBackupKeys(self, *args, **kwargs):
     """
     Action method.
     """
     for old_key_id in list(self.keys_to_rename.keys()):
         new_key_id, is_private = self.keys_to_rename[old_key_id]
         if old_key_id in self.stored_keys and new_key_id not in self.stored_keys:
             self.keys_to_upload.add(new_key_id)
         if new_key_id in self.stored_keys and old_key_id in self.stored_keys:
             self.keys_to_erase[old_key_id] = is_private
     for key_id in my_keys.known_keys().keys():
         if key_id not in self.stored_keys or key_id in self.not_stored_keys:
             self.keys_to_upload.add(key_id)
     keys_saved = []
     for key_id in self.keys_to_upload:
         res = key_ring.do_backup_key(key_id)
         keys_saved.append(res)
         self.saved_count += 1
     if _Debug:
         lg.args(_DebugLevel, keys_saved=len(keys_saved))
     wait_all_saved = DeferredList(keys_saved,
                                   fireOnOneErrback=False,
                                   consumeErrors=True)
     wait_all_saved.addCallback(lambda ok: self.automat('backup-ok', ok))
     wait_all_saved.addErrback(lambda err: self.automat('error', err))
Пример #3
0
 def doCleanKeys(self, *args, **kwargs):
     """
     Action method.
     """
     keys_deleted = []
     for key_id, is_private in self.stored_keys.items():
         latest_key_id = my_keys.latest_key_id(key_id)
         if key_id not in my_keys.known_keys() and latest_key_id not in my_keys.known_keys():
             self.keys_to_erase[key_id] = is_private
     for key_id, is_private in self.keys_to_erase.items():
         res = key_ring.do_delete_key(key_id, is_private)
         keys_deleted.append(res)
         self.deleted_count += 1
     if _Debug:
         lg.args(_DebugLevel, restored=self.restored_count, saved=self.saved_count, deleted=self.deleted_count)
     self.automat('clean-ok')
Пример #4
0
 def _do_open_known_shares(self):
     from crypt import my_keys
     from main import listeners
     from access import shared_access_coordinator
     from storage import backup_fs
     known_offline_shares = []
     for key_id in my_keys.known_keys():
         if not key_id.startswith('share_'):
             continue
         active_share = shared_access_coordinator.get_active_share(key_id)
         if active_share:
             continue
         known_offline_shares.append(key_id)
     to_be_opened = []
     for _, _, itemInfo in backup_fs.IterateIDs():
         if not itemInfo.key_id:
             continue
         if itemInfo.key_id in to_be_opened:
             continue
         if itemInfo.key_id not in known_offline_shares:
             continue
         to_be_opened.append(itemInfo.key_id)
     for key_id in to_be_opened:
         active_share = shared_access_coordinator.SharedAccessCoordinator(key_id, log_events=True, publish_events=False, )
         active_share.automat('restart')
         if listeners.is_populate_requered('shared_file'):
             listeners.populate_later().remove('shared_file')
             backup_fs.populate_shared_files(key_id=key_id)
Пример #5
0
def transfer_private_key(key_id, idurl):
    if _Debug:
        lg.out(_DebugLevel,
               'key_ring.transfer_private_key  %s -> %s' % (key_id, idurl))
    result = Deferred()
    recipient_id_obj = identitycache.FromCache(idurl)
    if not recipient_id_obj:
        lg.warn('not found "%s" in identity cache' % idurl)
        result.errback(Exception('not found "%s" in identity cache' % idurl))
        return result
    key_alias, creator_idurl = my_keys.split_key_id(key_id)
    if not key_alias or not creator_idurl:
        lg.warn('wrong key_id')
        result.errback(Exception('wrong key_id'))
        return result
    key_object = my_keys.known_keys().get(key_id)
    if key_object is None:
        lg.warn('unknown key: "%s"' % key_id)
        result.errback(Exception('unknown key: "%s"' % key_id))
        return result
    key_json = {
        'key_id': key_id,
        'alias': key_alias,
        'creator': creator_idurl,
        'fingerprint': str(key_object.fingerprint()),
        'type': str(key_object.type()),
        'ssh_type': str(key_object.sshType()),
        'size': str(key_object.size()),
        'public': str(key_object.public().toString('openssh')),
        'private': str(key_object.toString('openssh')),
    }
    key_data = json.dumps(key_json)
    block = encrypted.Block(
        BackupID=key_id,
        Data=key_data,
        SessionKey=key.NewSessionKey(),
        # encrypt data using public key of recipient
        EncryptKey=lambda inp: recipient_id_obj.encrypt(inp),
    )
    encrypted_key_data = block.Serialize()
    p2p_service.SendKey(
        remote_idurl=recipient_id_obj.getIDURL(),
        encrypted_key_data=encrypted_key_data,
        packet_id=key_id,
        callbacks={
            commands.Ack():
            lambda response, info: result.callback(response),
            commands.Fail():
            lambda response, info: result.errback(Exception(response)),
        },
    )
    return result
Пример #6
0
def transfer_key(key_id, trusted_idurl, include_private=False, timeout=10, result=None):
    if _Debug:
        lg.out(_DebugLevel, 'key_ring.transfer_key  %s -> %s' % (key_id, trusted_idurl))
    if not result:
        result = Deferred()
    recipient_id_obj = identitycache.FromCache(trusted_idurl)
    if not recipient_id_obj:
        lg.warn('not found "%s" in identity cache' % trusted_idurl)
        result.errback(Exception('not found "%s" in identity cache' % trusted_idurl))
        return result
    key_alias, creator_idurl = my_keys.split_key_id(key_id)
    if not key_alias or not creator_idurl:
        lg.warn('wrong key_id')
        result.errback(Exception('wrong key_id'))
        return result
    key_object = my_keys.known_keys().get(key_id)
    if key_object is None:
        lg.warn('unknown key: "%s"' % key_id)
        result.errback(Exception('unknown key: "%s"' % key_id))
        return result
    try:
        key_json = my_keys.make_key_info(key_object, key_id=key_id, include_private=include_private)
    except Exception as exc:
        lg.exc()
        result.errback(exc)
        return result
    key_data = serialization.DictToBytes(key_json, values_to_text=True)
    block = encrypted.Block(
        BackupID=key_id,
        Data=key_data,
        SessionKey=key.NewSessionKey(),
        # encrypt data using public key of recipient
        EncryptKey=lambda inp: recipient_id_obj.encrypt(inp),
    )
    encrypted_key_data = block.Serialize()
    p2p_service.SendKey(
        remote_idurl=recipient_id_obj.getIDURL(),
        encrypted_key_data=encrypted_key_data,
        packet_id=key_id,
        callbacks={
            commands.Ack(): lambda response, info: _on_transfer_key_response(response, info, key_id, result),
            commands.Fail(): lambda response, info: _on_transfer_key_response(response, info, key_id, result),
            # commands.Ack(): lambda response, info: result.callback(response),
            # commands.Fail(): lambda response, info: result.errback(Exception(response)),
            None: lambda pkt_out: _on_transfer_key_response(None, None, key_id, result),
        },
        timeout=timeout,
    )
    return result
Пример #7
0
def check_rename_my_keys():
    """
    Make sure all my keys have correct names according to known latest identities I have cached.
    For every key checks corresponding IDURL info and decides to rename it if key owner's identity was rotated.
    """
    keys_to_be_renamed = {}
    for key_id in list(my_keys.known_keys().keys()):
        key_glob_id = global_id.ParseGlobalID(key_id)
        owner_idurl = key_glob_id['idurl']
        if not owner_idurl.is_latest():
            keys_to_be_renamed[key_id] = global_id.MakeGlobalID(
                idurl=owner_idurl.to_bin(),
                key_alias=key_glob_id['key_alias'],
            )
    for current_key_id, new_key_id in keys_to_be_renamed.items():
        my_keys.rename_key(current_key_id, new_key_id)
Пример #8
0
 def _on_supplier_modified(self, evt):
     from access import key_ring
     from crypt import my_keys
     from userid import global_id
     from userid import my_id
     if evt.data['new_idurl']:
         my_keys_to_be_republished = []
         for key_id in my_keys.known_keys():
             if not key_id.startswith('share_'):
                 continue
             _glob_id = global_id.ParseGlobalID(key_id)
             if _glob_id['idurl'] == my_id.getLocalIDURL():
                 my_keys_to_be_republished.append(key_id)
         for key_id in my_keys_to_be_republished:
             key_ring.transfer_key(key_id,
                                   trusted_idurl=evt.data['new_idurl'],
                                   include_private=False)
Пример #9
0
 def _on_supplier_modified(self, evt):
     from logs import lg
     from access import key_ring
     from crypt import my_keys
     from userid import global_id
     from userid import my_id
     if evt.data['new_idurl']:
         my_keys_to_be_republished = []
         for key_id in my_keys.known_keys():
             if not key_id.startswith('group_'):
                 continue
             _glob_id = global_id.ParseGlobalID(key_id)
             if _glob_id['idurl'] == my_id.getIDURL():
                 # only send public keys of my own groups
                 my_keys_to_be_republished.append(key_id)
         for group_key_id in my_keys_to_be_republished:
             d = key_ring.transfer_key(group_key_id, trusted_idurl=evt.data['new_idurl'], include_private=False, include_signature=False)
             d.addErrback(lambda *a: lg.err('transfer key failed: %s' % str(*a)))
Пример #10
0
def start_message_producers():
    started = 0
    for key_id in my_keys.known_keys().keys():
        # if not key_id.startswith('person$'):
        #     continue
        group_key_id = key_id
        existing_message_producer = get_active_message_producer(group_key_id)
        if not existing_message_producer:
            existing_message_producer = MessageProducer(group_key_id)
            existing_message_producer.automat('init')
        if existing_message_producer.state in [
                'DHT_READ?',
                'BROKER?',
                'CONNECTED',
        ]:
            continue
        existing_message_producer.automat('connect')
        started += 1
    return started