Esempio n. 1
0
 def OnDataReceived(self, newpacket, result):
     # we requested some data from a supplier, and just received it
     if self.shutdown:
         lg.warn('skip, supplier queue is shutting down')
         self.StopAllRequests()
         return
     if _Debug:
         lg.args(_DebugLevel, newpacket=newpacket, result=result, queue=len(self.fileRequestQueue), remoteName=self.remoteName)
     packetID = global_id.CanonicalID(newpacket.PacketID)
     if (packetID not in self.fileRequestQueue) or (packetID not in self.fileRequestDict):
         latest_idurl = global_id.NormalizeGlobalID(packetID, as_field=True)['idurl'].latest
         another_packetID = global_id.SubstitutePacketID(packetID, idurl=latest_idurl)
         if (another_packetID in self.fileRequestQueue) and (another_packetID in self.fileRequestDict):
             packetID = another_packetID
             lg.warn('found incoming %r with outdated packet id, corrected: %r' % (newpacket, another_packetID, ))
     if (packetID not in self.fileRequestQueue) or (packetID not in self.fileRequestDict):
         lg.err('unexpected %r received which is not in the downloading queue' % newpacket)
     else:
         f_down = self.fileRequestDict[packetID]
         if newpacket.Command == commands.Data():
             wrapped_packet = signed.Unserialize(newpacket.Payload)
             if not wrapped_packet or not wrapped_packet.Valid():
                 lg.err('incoming Data() packet is not valid')
                 f_down.event('fail-received', newpacket)
                 return
             f_down.event('valid-data-received', wrapped_packet)
         elif newpacket.Command == commands.Fail():
             f_down.event('fail-received', newpacket)
         else:
             lg.err('incorrect response command: %r' % newpacket)
Esempio n. 2
0
 def __init__(self,
              parent, callOnReceived, creatorID, packetID, ownerID, remoteID,
              debug_level=_DebugLevel, log_events=_Debug, log_transitions=_Debug, publish_events=False, **kwargs):
     """
     Builds `file_down()` state machine.
     """
     self.parent = parent
     self.callOnReceived = []
     self.callOnReceived.append(callOnReceived)
     self.creatorID = creatorID
     self.packetID = global_id.CanonicalID(packetID)
     parts = global_id.NormalizeGlobalID(packetID)
     self.customerID = parts['customer']
     self.remotePath = parts['path']
     self.customerIDURL = parts['idurl']
     customerGlobalID, remotePath, versionName, fileName = packetid.SplitVersionFilename(packetID)
     self.backupID = packetid.MakeBackupID(customerGlobalID, remotePath, versionName)
     self.fileName = fileName
     self.ownerID = ownerID
     self.remoteID = remoteID
     self.requestTime = None
     self.fileReceivedTime = None
     self.requestTimeout = max(30, 2 * int(settings.getBackupBlockSize() / settings.SendingSpeedLimit()))
     self.result = ''
     self.created = utime.get_sec1970()
     super(FileDown, self).__init__(
         name="file_down_%s_%s/%s/%s" % (nameurl.GetName(self.remoteID), remotePath, versionName, fileName),
         state="AT_STARTUP",
         debug_level=debug_level,
         log_events=log_events,
         log_transitions=log_transitions,
         publish_events=publish_events,
         **kwargs
     )
Esempio n. 3
0
 def __init__(self,
              key_id,
              debug_level=_DebugLevel,
              log_events=_Debug,
              log_transitions=_Debug,
              publish_events=False,
              **kwargs):
     """
     Create shared_access_coordinator() state machine.
     Use this method if you need to call Automat.__init__() in a special way.
     """
     self.key_id = key_id
     self.glob_id = global_id.NormalizeGlobalID(self.key_id)
     self.key_alias = self.glob_id['key_alias']
     self.customer_idurl = self.glob_id['idurl']
     self.known_suppliers_list = []
     self.known_ecc_map = None
     self.dht_lookup_use_cache = True
     self.outgoing_list_files_packets_ids = []
     super(SharedAccessCoordinator, self).__init__(
         name="%s$%s" % (self.key_alias[:10], self.glob_id['customer']),
         state='AT_STARTUP',
         debug_level=debug_level,
         log_events=log_events,
         log_transitions=log_transitions,
         publish_events=publish_events,
         **kwargs)
Esempio n. 4
0
 def encrypt(self, message_body, encrypt_session_func=None):
     if _Debug:
         lg.args(_DebugLevel, encrypt_session_func=encrypt_session_func, recipient=self.recipient)
     new_sessionkey = key.NewSessionKey(session_key_type=key.SessionKeyType())
     if not encrypt_session_func:
         if my_keys.is_key_registered(self.recipient):
             if _Debug:
                 lg.dbg(_DebugLevel, 'with registered key %r' % self.recipient)
             encrypt_session_func = lambda inp: my_keys.encrypt(self.recipient, inp)
     if not encrypt_session_func:
         glob_id = global_id.NormalizeGlobalID(self.recipient)
         if glob_id['key_alias'] == 'master':
             if glob_id['idurl'] == my_id.getIDURL():
                 lg.warn('making encrypted message addressed to me ?')
                 encrypt_session_func = lambda inp: my_keys.encrypt('master', inp)
             else:
                 remote_identity = identitycache.FromCache(glob_id['idurl'])
                 if not remote_identity:
                     raise Exception('remote identity is not cached yet, not able to encrypt the message')
                 if _Debug:
                     lg.dbg(_DebugLevel, 'with remote identity public key %r' % glob_id['idurl'])
                 encrypt_session_func = remote_identity.encrypt
         else:
             own_key = global_id.MakeGlobalID(idurl=my_id.getIDURL(), key_alias=glob_id['key_alias'])
             if my_keys.is_key_registered(own_key):
                 if _Debug:
                     lg.dbg(_DebugLevel, 'with registered key (found by alias) %r' % own_key)
                 encrypt_session_func = lambda inp: my_keys.encrypt(own_key, inp)
     if not encrypt_session_func:
         raise Exception('can not find key for given recipient')
     self.encrypted_session = encrypt_session_func(new_sessionkey)
     self.encrypted_body = key.EncryptWithSessionKey(new_sessionkey, message_body, session_key_type=key.SessionKeyType())
     return self.encrypted_session, self.encrypted_body
Esempio n. 5
0
 def set_path_id(self, pathID):
     parts = global_id.NormalizeGlobalID(pathID)
     self.pathID = pathID  # source path to backup
     self.customerGlobID = parts['customer']
     self.customerIDURL = parts['idurl']
     self.remotePath = parts['path']  # here it must be in 0/1/2 form
     if parts['key_alias']:
         self.set_key_id(my_keys.make_key_id(alias=parts['key_alias'], creator_glob_id=self.customerGlobID))
     return parts
Esempio n. 6
0
 def __init__(self,
              parent,
              fileName,
              packetID,
              remoteID,
              ownerID,
              callOnAck=None,
              callOnFail=None,
              debug_level=_DebugLevel,
              log_events=_Debug,
              log_transitions=_Debug,
              publish_events=False,
              **kwargs):
     """
     Builds `file_up()` state machine.
     """
     self.parent = parent
     self.fileName = fileName
     try:
         self.fileSize = os.path.getsize(os.path.abspath(fileName))
     except:
         lg.exc()
         self.fileSize = 0
     self.packetID = global_id.CanonicalID(packetID)
     parts = global_id.NormalizeGlobalID(packetID)
     self.customerID = parts['customer']
     self.remotePath = parts['path']
     self.customerIDURL = parts['idurl']
     customerGlobalID, remotePath, versionName, fileName = packetid.SplitVersionFilename(
         packetID)
     self.backupID = packetid.MakeBackupID(customerGlobalID, remotePath,
                                           versionName)
     self.remoteID = remoteID
     self.ownerID = ownerID
     self.callOnAck = callOnAck
     self.callOnFail = callOnFail
     self.sendTime = None
     self.ackTime = None
     self.sendTimeout = 10 * 2 * (
         max(int(self.fileSize / settings.SendingSpeedLimit()), 5) + 5
     )  # maximum 5 seconds to get an Ack
     self.result = ''
     self.created = utime.get_sec1970()
     super(FileUp,
           self).__init__(name="file_up_%s_%s/%s/%s" % (nameurl.GetName(
               self.remoteID), remotePath, versionName, fileName),
                          state="AT_STARTUP",
                          debug_level=debug_level,
                          log_events=log_events,
                          log_transitions=log_transitions,
                          publish_events=publish_events,
                          **kwargs)
Esempio n. 7
0
 def _on_supplier_modified(self, evt):
     from access import key_ring
     from crypt import my_keys
     from userid import global_id
     from userid import my_id
     from logs import lg
     if evt.data['new_idurl']:
         my_keys_to_be_republished = []
         for key_id in my_keys.known_keys():
             if not key_id.startswith('share_'):
                 continue
             _glob_id = global_id.NormalizeGlobalID(key_id)
             if _glob_id['idurl'].to_bin() == my_id.getIDURL().to_bin():
                 # only send public keys of my own shares
                 my_keys_to_be_republished.append(key_id)
         for key_id in my_keys_to_be_republished:
             d = key_ring.transfer_key(key_id, trusted_idurl=evt.data['new_idurl'], include_private=False, include_signature=False)
             d.addErrback(lambda *a: lg.err('transfer key failed: %s' % str(*a)))
Esempio n. 8
0
 def decrypt(self, decrypt_session_func=None):
     if _Debug:
         lg.args(_DebugLevel, decrypt_session_func=decrypt_session_func, recipient=self.recipient)
     if not decrypt_session_func:
         if my_keys.is_key_registered(self.recipient):
             if _Debug:
                 lg.dbg(_DebugLevel, 'decrypt with registered key %r' % self.recipient)
             decrypt_session_func = lambda inp: my_keys.decrypt(self.recipient, inp)
     if not decrypt_session_func:
         glob_id = global_id.NormalizeGlobalID(self.recipient)
         if glob_id['idurl'] == my_id.getIDURL():
             if glob_id['key_alias'] == 'master':
                 if _Debug:
                     lg.dbg(_DebugLevel, 'decrypt with my master key %r' % self.recipient)
                 decrypt_session_func = lambda inp: my_keys.decrypt('master', inp)
     if not decrypt_session_func:
         raise Exception('can not find key for given recipient: %s' % self.recipient)
     decrypted_sessionkey = decrypt_session_func(self.encrypted_session)
     return key.DecryptWithSessionKey(decrypted_sessionkey, self.encrypted_body, session_key_type=key.SessionKeyType())
Esempio n. 9
0
 def on_my_message(self, message):
     if message.startswith('!add '):
         idurl = message[5:]
         if global_id.IsValidGlobalUser(idurl):
             gid = global_id.NormalizeGlobalID(idurl)
             idurl = gid['idurl']
         if idurl.strip() and idurl not in self.users:
             self.users.append(idurl)
             name = nameurl.GetName(idurl)
             self.history.append({
                 'text': 'user "%s" was added to the channel' % name,
                 'name': '',
                 'time': time.time(),
             })
         return
     if message.startswith('!find ') or message.startswith('!search '):
         _, _, inp = message.partition(' ')
         if not self.search_user_func:
             self.history.append({
                 'text': 'search failed, method not defined',
                 'name': '',
                 'time': time.time(),
             })
             return
         self.search_user_func(inp).addBoth(self.on_nickname_search_result)
         self.history.append({
             'text': 'looking for "%s" ...' % inp,
             'name': '',
             'time': time.time(),
         })
         return
     self.history.append({
         'text': message,
         'name': 'you',
         'time': time.time(),
     })
     if self.send_message_func is not None:
         for to in self.users:
             reactor.callFromThread(self.send_message_func, to, message)
Esempio n. 10
0
 def doSavePacket(self, *args, **kwargs):
     """
     Action method.
     """
     if not args or not args[0]:
         raise Exception('no input found')
     NewPacket, PacketID = args[0]
     glob_path = global_id.NormalizeGlobalID(PacketID, detect_version=True)
     packetID = global_id.CanonicalID(PacketID)
     customer_id, _, _, _, SupplierNumber, dataORparity = packetid.SplitFull(
         packetID)
     if dataORparity == 'Data':
         self.OnHandData[SupplierNumber] = True
     elif dataORparity == 'Parity':
         self.OnHandParity[SupplierNumber] = True
     if not NewPacket:
         lg.warn('packet %r already exists locally' % packetID)
         return
     filename = os.path.join(settings.getLocalBackupsDir(), customer_id,
                             glob_path['path'])
     dirpath = os.path.dirname(filename)
     if not os.path.exists(dirpath):
         try:
             bpio._dirs_make(dirpath)
         except:
             lg.exc()
     # either way the payload of packet is saved
     if not bpio.WriteBinaryFile(filename, NewPacket.Payload):
         lg.err("unable to write to %s" % filename)
         return
     if self.packetInCallback is not None:
         self.packetInCallback(self.backup_id, NewPacket)
     if _Debug:
         lg.out(
             _DebugLevel, "restore_worker.doSavePacket %s saved to %s" %
             (packetID, filename))
Esempio n. 11
0
 def _on_packet_request_result(self, NewPacketOrPacketID, result):
     if self.block_requests is None:
         return
     if _Debug:
         lg.args(_DebugLevel, packet=NewPacketOrPacketID, result=result)
     packet_id = None
     if strng.is_string(NewPacketOrPacketID):
         packet_id = NewPacketOrPacketID
     else:
         packet_id = getattr(NewPacketOrPacketID, 'PacketID', None)
     if not packet_id:
         raise Exception('packet ID is unknown from %r' %
                         NewPacketOrPacketID)
     if packet_id not in self.block_requests:
         resp = global_id.NormalizeGlobalID(packet_id)
         for req_packet_id in self.block_requests:
             req = global_id.NormalizeGlobalID(req_packet_id)
             if resp['version'] == req['version'] and resp['path'] == req[
                     'path']:
                 if resp['key_alias'] == req['key_alias'] and resp[
                         'user'] == req['user']:
                     if id_url.is_the_same(resp['idurl'], req['idurl']):
                         packet_id = req_packet_id
                         lg.warn(
                             'found matching packet request %r for rotated idurl %r'
                             % (
                                 packet_id,
                                 resp['idurl'],
                             ))
                         break
     if packet_id not in self.block_requests:
         if _Debug:
             lg.args(_DebugLevel, block_requests=self.block_requests)
         raise Exception('packet ID not registered')
     if result == 'in queue':
         if self.block_requests[packet_id] is not None:
             raise Exception(
                 'packet is still in IO queue, but already unregistered')
         lg.warn('packet already in the request queue: %r' % packet_id)
         return
     if result in [
             'received',
             'exist',
     ]:
         self.block_requests[packet_id] = True
         if result == 'exist':
             # reactor.callLater(0, self.automat, 'data-received', (None, packet_id, ))  # @UndefinedVariable
             self.event('data-received', (
                 None,
                 packet_id,
             ))
         else:
             # reactor.callLater(0, self.automat, 'data-received', (NewPacketOrPacketID, packet_id, ))  # @UndefinedVariable
             self.event('data-received', (
                 NewPacketOrPacketID,
                 packet_id,
             ))
     else:
         self.block_requests[packet_id] = False
         self.RequestFails.append(packet_id)
         # reactor.callLater(0, self.automat, 'request-failed', packet_id)  # @UndefinedVariable
         self.event('request-failed', packet_id)
Esempio n. 12
0
def send(customer_idurl,
         packet_id,
         format_type,
         key_id,
         remote_idurl,
         query_items=[]):
    if not query_items:
        query_items = [
            '*',
        ]
    key_id = my_keys.latest_key_id(key_id)
    parts = global_id.NormalizeGlobalID(key_id)
    if parts['key_alias'] == 'master' and parts['idurl'] != my_id.getIDURL():
        # lg.warn('incoming ListFiles() request with customer "master" key: %r' % key_id)
        if not my_keys.is_key_registered(key_id) and identitycache.HasKey(
                parts['idurl']):
            lg.info(
                'customer public key %r to be registered locally for the first time'
                % key_id)
            known_ident = identitycache.FromCache(parts['idurl'])
            if not my_keys.register_key(key_id, known_ident.getPublicKey()):
                lg.err(
                    'failed to register known public key of the customer: %r' %
                    key_id)
    if not my_keys.is_key_registered(key_id):
        lg.warn(
            'not able to return Files() for customer %s, key %s not registered'
            % (
                customer_idurl,
                key_id,
            ))
        return p2p_service.SendFailNoRequest(customer_idurl,
                                             packet_id,
                                             response='key not registered')
    if _Debug:
        lg.out(
            _DebugLevel,
            "list_files.send to %s, customer_idurl=%s, key_id=%s, query_items=%r"
            % (
                remote_idurl,
                customer_idurl,
                key_id,
                query_items,
            ))
    ownerdir = settings.getCustomerFilesDir(customer_idurl)
    plaintext = ''
    if os.path.isdir(ownerdir):
        try:
            for query_path in query_items:
                plaintext += process_query_item(query_path, parts['key_alias'],
                                                ownerdir)
        except:
            lg.exc()
            return p2p_service.SendFailNoRequest(
                customer_idurl,
                packet_id,
                response='list files query processing error')
    else:
        lg.warn('did not found customer folder: %s' % ownerdir)
    if _Debug:
        lg.out(_DebugLevel, '\n%s' % plaintext)
    raw_list_files = PackListFiles(plaintext, format_type)
    block = encrypted.Block(
        CreatorID=my_id.getIDURL(),
        BackupID=key_id,
        Data=raw_list_files,
        SessionKey=key.NewSessionKey(session_key_type=key.SessionKeyType()),
        SessionKeyType=key.SessionKeyType(),
        EncryptKey=key_id,
    )
    encrypted_list_files = block.Serialize()
    newpacket = p2p_service.SendFiles(
        idurl=remote_idurl,
        raw_list_files_info=encrypted_list_files,
        packet_id=packet_id,
        callbacks={
            commands.Ack(): on_acked,
            commands.Fail(): on_failed,
            None: on_timeout,
        },
    )
    return newpacket