Exemple #1
0
 def doFindNewSupplier(self, *args, **kwargs):
     """
     Action method.
     """
     if _Debug:
         lg.out(_DebugLevel, 'fire_hire.doFindNewSupplier')
     position_for_new_supplier = None
     for pos in range(settings.getSuppliersNumberDesired()):
         if pos in self.hire_list:
             continue
         supplier_idurl = contactsdb.supplier(pos)
         if not supplier_idurl:
             lg.info('found empty supplier at position %d and going to find new supplier on that position' % pos)
             position_for_new_supplier = pos
             break
         if supplier_idurl in self.dismiss_list:
             lg.info('going to find new supplier on existing position %d to replace supplier %s' % (
                 pos, supplier_idurl, ))
             position_for_new_supplier = pos
             break
     if position_for_new_supplier is None:
         lg.err('did not found position for new supplier')
         self.automat('search-failed')
         return
     self.hire_list.append(position_for_new_supplier)
     supplier_finder.A(
         'start',
         family_position=position_for_new_supplier,
         ecc_map=eccmap.Current().name,
         family_snapshot=contactsdb.suppliers(),
     )
def write_state(customer_id, broker_id, json_value):
    service_dir = settings.ServiceDir('service_message_broker')
    keepers_dir = os.path.join(service_dir, 'keepers')
    broker_dir = os.path.join(keepers_dir, broker_id)
    keeper_state_file_path = os.path.join(broker_dir, customer_id)
    if json_value is None:
        if os.path.isfile(keeper_state_file_path):
            try:
                os.remove(keeper_state_file_path)
            except:
                lg.exc()
        if _Debug:
            lg.args(_DebugLevel, customer_id=customer_id, broker_id=broker_id)
        return None
    if not os.path.isdir(broker_dir):
        try:
            os.makedirs(broker_dir)
        except:
            lg.exc()
            return None
    if not local_fs.WriteTextFile(keeper_state_file_path, jsn.dumps(json_value)):
        lg.err('failed writing queue_keeper state for customer %r of broker %r to %r' % (
            customer_id, broker_id, keeper_state_file_path, ))
        return None
    if _Debug:
        lg.args(_DebugLevel, customer_id=customer_id, broker_id=broker_id, json_value=json_value)
    return json_value
Exemple #3
0
def do_restore_key(key_id, is_private, keys_folder=None, wait_result=False):
    """
    Restore given key from my suppliers if I do not have it locally.
    """
    if _Debug:
        lg.out(_DebugLevel, 'key_ring.do_restore_key     key_id=%r    is_private=%r' % (key_id, is_private, ))
    if my_keys.is_key_registered(key_id):
        lg.err('local key already exist: "%s"' % key_id)
        if wait_result:
            return fail(Exception('local key already exist: "%s"' % key_id))
        return False
    if not keys_folder:
        keys_folder = settings.KeyStoreDir()
    if is_private:
        remote_path_for_key = '.keys/%s.private' % key_id
    else:
        remote_path_for_key = '.keys/%s.public' % key_id
    global_key_path = global_id.MakeGlobalID(
        key_alias='master', customer=my_id.getGlobalID(), path=remote_path_for_key)
    ret = api.file_download_start(
        remote_path=global_key_path,
        destination_path=keys_folder,
        wait_result=True,
        open_share=False,
    )
    if not isinstance(ret, Deferred):
        lg.err('failed to download key "%s": %s' % (key_id, ret))
        if wait_result:
            return fail(Exception('failed to download key "%s": %s' % (key_id, ret)))
        return False

    result = Deferred()

    def _on_result(res):
        if not isinstance(res, dict):
            lg.err('failed to download key "%s": %s' % (key_id, res))
            if wait_result:
                result.errback(Exception('failed to download key "%s": %s' % (key_id, res)))
            return None
        if res['status'] != 'OK':
            lg.err('failed to download key "%s": %r' % (key_id, res))
            if wait_result:
                result.errback(Exception('failed to download key "%s": %r' % (key_id, res)))
            return None
        if not my_keys.load_key(key_id, keys_folder):
            lg.err('failed to read key "%s" from local folder "%s"' % (key_id, keys_folder))
            if wait_result:
                result.errback(Exception('failed to read key "%s" from local folder "%s"' % (key_id, keys_folder)))
            return None
        if _Debug:
            lg.out(_DebugLevel, 'key_ring.do_restore_key._on_result key_id=%s  is_private=%r : %r' % (key_id, is_private, res))
        if wait_result:
            result.callback(res)
        return None

    ret.addBoth(_on_result)

    if not wait_result:
        return True
    return result
Exemple #4
0
 def _on_broker_connected(self, idurl, broker_pos):
     if _Debug:
         lg.args(_DebugLevel,
                 idurl=idurl,
                 broker_pos=broker_pos,
                 connecting_brokers=self.connecting_brokers)
     if idurl:
         self.connected_brokers[broker_pos] = idurl
     self.connecting_brokers.discard(broker_pos)
     if _Debug:
         lg.args(_DebugLevel,
                 idurl=idurl,
                 broker_pos=broker_pos,
                 connecting_brokers=self.connecting_brokers,
                 connected_brokers=self.connected_brokers)
     if self.connecting_brokers:
         return
     if not self.connected_brokers:
         lg.err('failed to connect with any brokers')
         self.automat('brokers-failed')
         return
     if 0 not in self.connected_brokers:
         lg.warn(
             'some brokers connected, but broker at position 0 is still empty'
         )
         self.automat('brokers-failed')
         return
     if self.rotated_brokers:
         self.automat('brokers-rotated')
     else:
         self.automat('brokers-connected')
Exemple #5
0
 def doSendHisFiles(self, *args, **kwargs):
     """
     Action method.
     """
     customer_key_id = my_keys.make_key_id(
         alias='customer', creator_idurl=self.customer_idurl)
     if my_keys.is_key_registered(customer_key_id):
         list_files.send(
             customer_idurl=self.customer_idurl,
             packet_id='%s:%s' % (
                 customer_key_id,
                 packetid.UniqueID(),
             ),
             format_type=settings.ListFilesFormat(),
             key_id=customer_key_id,
             remote_idurl=self.customer_idurl,  # send to the customer
         )
     else:
         # if "customer" key is not delivered to me yet, use his "master" key
         list_files.send(
             customer_idurl=self.customer_idurl,
             packet_id='%s:%s' % (
                 customer_key_id,
                 packetid.UniqueID(),
             ),
             format_type=settings.ListFilesFormat(),
             key_id=my_keys.make_key_id(alias='master',
                                        creator_idurl=self.customer_idurl),
             remote_idurl=self.customer_idurl,  # send to the customer
         )
         lg.err('key %s is not registered, not able to send his files' %
                customer_key_id)
Exemple #6
0
 def _do_retry_one_time(self, fail_info):
     to_idurl = id_url.field(fail_info['to']).to_bin()
     from_idurl = id_url.field(fail_info['from']).to_bin()
     _key = (fail_info['command'], fail_info['packet_id'], from_idurl,
             to_idurl)
     current_retries = self.packets_retries.get(_key, 0)
     if _Debug:
         lg.args(_DebugLevel, key=_key, retries=current_retries)
     if fail_info.get('error') != 'route already closed':
         lg.err('failed sending routed packet : %r' % fail_info)
         self._do_clean_sent_packet(fail_info)
         self._do_cancel_outbox_packets(fail_info)
         self.packets_retries.pop(_key, None)
         return
     if current_retries >= 1:
         lg.err('failed sending routed packet after few attempts : %r' %
                fail_info)
         self.automat('retry-failed', fail_info)
         self._do_clean_sent_packet(fail_info)
         self._do_cancel_outbox_packets(fail_info)
         self.packets_retries.pop(_key, None)
         return
     self.packets_retries[_key] = current_retries + 1
     d = identitycache.immediatelyCaching(fail_info['to'])
     d.addCallback(self._on_cache_retry_success, fail_info)
     d.addErrback(self._on_cache_retry_failed, fail_info)
Exemple #7
0
 def _do_request(self, x=None):
     from raid import eccmap
     self.received_lf_counter = 0
     self.requested_lf_packet_ids.clear()
     known_suppliers = contactsdb.suppliers(customer_idurl=self.target_customer_idurl)
     try:
         self.critical_suppliers_number = eccmap.GetCorrectableErrors(len(known_suppliers))
     except:
         lg.warn('number of known suppliers for customer %r is not standard' % self.target_customer_idurl)
         self.critical_suppliers_number = int(float(len(known_suppliers)) * 0.75)
     for idurl in known_suppliers:
         if idurl:
             if online_status.isOnline(idurl):
                 if _Debug:
                     lg.out(_DebugLevel, 'list_files_orator._do_request  ListFiles() from my supplier %s' % idurl)
                 outpacket = p2p_service.SendListFiles(
                     target_supplier=idurl,
                     customer_idurl=self.target_customer_idurl,
                     timeout=30,
                 )
                 if outpacket:
                     self.requested_lf_packet_ids.add(outpacket.PacketID)
                 else:
                     lg.err('failed sending ListFiles() to %r' % idurl)
             else:
                 lg.warn('skip sending ListFiles() because %s is not online' % idurl)
Exemple #8
0
def restore_done(result, backupID, outfd, tarfilename, outputlocation,
                 callback_method):
    global _WorkingBackupIDs
    global _WorkingRestoreProgress
    global OnRestoreDoneFunc
    if result == 'done':
        lg.info('restore success of %s with result=%s' % (backupID, result))
    else:
        lg.err('restore failed of %s with result=%s' % (backupID, result))
    try:
        os.close(outfd)
    except:
        lg.exc()
    if result == 'done':
        d = backup_tar.extracttar_thread(tarfilename, outputlocation)
        d.addCallback(extract_done, backupID, tarfilename, outputlocation,
                      callback_method)
        d.addErrback(extract_failed, backupID, tarfilename, outputlocation,
                     callback_method)
        return d
    _WorkingBackupIDs.pop(backupID, None)
    _WorkingRestoreProgress.pop(backupID, None)
    tmpfile.throw_out(tarfilename, 'restore ' + result)
    if OnRestoreDoneFunc is not None:
        OnRestoreDoneFunc(backupID, result)
    if callback_method:
        try:
            callback_method(backupID, result)
        except:
            lg.exc()
    return result
Exemple #9
0
def OnTaskFailed(pathID, result):
    """
    Called when backup process get failed somehow.
    """
    lg.err('task failed [%s] with result "%s", %d more tasks' % (pathID, result, len(tasks())))
    reactor.callLater(0, RunTask)  # @UndefinedVariable
    reactor.callLater(0, FireTaskFinishedCallbacks, pathID, None, result)  # @UndefinedVariable
 def _on_supplier_response(self, newpacket, info):
     wrapped_packet = signed.Unserialize(newpacket.Payload)
     if _Debug:
         lg.args(_DebugLevel,
                 newpacket=newpacket,
                 wrapped_packet=wrapped_packet)
     if not wrapped_packet or not wrapped_packet.Valid():
         lg.err('incoming Data() is not valid')
         return
     supplier_idurl = wrapped_packet.RemoteID
     from storage import backup_control
     supplier_revision = backup_control.IncomingSupplierBackupIndex(
         wrapped_packet)
     self.requesting_suppliers.discard(supplier_idurl)
     if supplier_revision is not None:
         reactor.callLater(0, self.automat, 'index-file-received', (
             newpacket,
             supplier_revision,
         ))  # @UndefinedVariable
     if _Debug:
         lg.out(
             _DebugLevel,
             'index_synchronizer._on_supplier_response %s from %r, pending: %d, total: %d'
             % (newpacket, supplier_idurl, len(self.requesting_suppliers),
                self.requested_suppliers_number))
     if len(self.requesting_suppliers) == 0:
         reactor.callLater(0, self.automat,
                           'all-responded')  # @UndefinedVariable
Exemple #11
0
def Load(filepath=None):
    """
    This load the data from local file and call ``ReadIndex()`` method.
    """
    global _LoadingFlag
    if _LoadingFlag:
        return False
    if filepath is None:
        filepath = settings.BackupIndexFilePath()
    if not os.path.isfile(filepath):
        lg.warn('file %s not exist' % filepath)
        WriteIndex(filepath)
    src = bpio.ReadTextFile(filepath)
    if not src:
        lg.err('failed reading file %s' % filepath)
        return False
    inpt = StringIO(src)
    try:
        known_revision = int(inpt.readline().rstrip('\n'))
    except:
        lg.exc()
        return False
    raw_data = inpt.read()
    inpt.close()
    ret = ReadIndex(raw_data)
    if ret:
        commit(known_revision)
        backup_fs.Scan()
        backup_fs.Calculate()
    else:
        lg.warn('catalog index reading failed')
    return ret
Exemple #12
0
 def _on_existing_customer_accepted(self, evt):
     from twisted.internet import reactor  # @UnresolvedImport
     from logs import lg
     from supplier import family_member
     from userid import id_url
     from userid import my_id
     customer_idurl = evt.data['idurl']
     if customer_idurl == my_id.getLocalID():
         lg.warn('skipping my own identity')
         return
     if evt.data.get('position') is None:
         lg.warn('position of supplier in the family is still unclear')
         return
     fm = family_member.by_customer_idurl(customer_idurl)
     if not fm:
         lg.err(
             'family_member() instance was not found for existing customer %s'
             % customer_idurl)
         return
     reactor.callLater(0, fm.automat, 'family-join', {  # @UndefinedVariable
         'supplier_idurl': my_id.getLocalID().to_bin(),
         'ecc_map': evt.data.get('ecc_map'),
         'position': evt.data.get('position'),
         'family_snapshot': id_url.to_bin_list(evt.data.get('family_snapshot')),
     })
Exemple #13
0
 def doSavePacket(self, *args, **kwargs):
     """
     Action method.
     """
     if not args or not args[0]:
         raise Exception('no input found')
     NewPacket, PacketID = args[0]
     glob_path = global_id.ParseGlobalID(PacketID, detect_version=True)
     packetID = global_id.CanonicalID(PacketID)
     customer_id, _, _, _, SupplierNumber, dataORparity = packetid.SplitFull(packetID)
     if dataORparity == 'Data':
         self.OnHandData[SupplierNumber] = True
     elif dataORparity == 'Parity':
         self.OnHandParity[SupplierNumber] = True
     if not NewPacket:
         lg.warn('packet %r already exists locally' % packetID)
         return
     filename = os.path.join(settings.getLocalBackupsDir(), customer_id, glob_path['path'])
     dirpath = os.path.dirname(filename)
     if not os.path.exists(dirpath):
         try:
             bpio._dirs_make(dirpath)
         except:
             lg.exc()
     # either way the payload of packet is saved
     if not bpio.WriteBinaryFile(filename, NewPacket.Payload):
         lg.err("unable to write to %s" % filename)
         return
     if self.packetInCallback is not None:
         self.packetInCallback(self.backup_id, NewPacket)
     if _Debug:
         lg.out(_DebugLevel, "restore_worker.doSavePacket %s saved to %s" % (packetID, filename))
 def _callMethod(self, request_dict):
     if _Debug:
         lg.out(
             _DebugLevel, 'api_jsonrpc_server._callMethod:\n%s' %
             pprint.pformat(request_dict))
     request_dict['_executed'] = time.time()
     try:
         fm_result = self._catch_filemanager_methods(request_dict)
         if fm_result is None:
             result = JSONRPCServer._callMethod(self, request_dict)
         else:
             result = fm_result
     except JSONRPCError as exc:
         lg.err(exc.strerror)
         result = api.ERROR(exc.strerror)
     except Exception as exc:
         lg.exc()
         result = api.ERROR(str(traceback.format_exc()),
                            message=exc.message)
     if isinstance(result, Deferred):
         result.addCallback(
             lambda result: self._register_execution(request_dict, result))
     else:
         result = self._register_execution(request_dict, result)
     return result
Exemple #15
0
 def _on_failed_one(err, pos, key_id):
     lg.err('failed to restore key %r : %r' % (
         key_id,
         err,
     ))
     _do_restore_one(pos + 1)
     return None
Exemple #16
0
 def _on_extract_failed(self, err, backupID, source_filename,
                        output_location):
     lg.err('archive %r extract failed from %r to %r with: %r' %
            (backupID, source_filename, output_location, err))
     tmpfile.throw_out(source_filename, 'file extract failed')
     self.automat('extract-failed', err)
     return None
Exemple #17
0
 def _on_restore_done(self, result, backup_id, outfd, tarfilename,
                      backup_index):
     try:
         os.close(outfd)
     except:
         lg.exc()
     if result == 'done':
         lg.info('archive %r restore success from %r' % (
             backup_id,
             tarfilename,
         ))
     else:
         lg.err('archive %r restore failed from %r with : %r' % (
             backup_id,
             tarfilename,
             result,
         ))
     if result != 'done':
         tmpfile.throw_out(tarfilename, 'restore ' + result)
         self.automat('restore-failed',
                      backup_id=backup_id,
                      tarfilename=tarfilename)
         return None
     self.automat('restore-done',
                  backup_id=backup_id,
                  tarfilename=tarfilename,
                  backup_index=backup_index)
     return
Exemple #18
0
def cache_suppliers(path=None):
    """
    Make sure identities of all suppliers we know are cached.
    """
    dl = []
    list_local_customers = list(os.listdir(settings.SuppliersDir()))
    for customer_id in list_local_customers:
        if not global_id.IsValidGlobalUser(customer_id):
            lg.warn('invalid customer record %s found in %s' % (customer_id, settings.SuppliersDir()))
            continue
        try:
            one_customer_idurl = global_id.GlobalUserToIDURL(customer_id)
        except Exception as exc:
            lg.err('idurl caching failed: %r' % exc)
            continue
        if not id_url.is_cached(one_customer_idurl):
            dl.append(identitycache.immediatelyCaching(one_customer_idurl))
        path = os.path.join(settings.SuppliersDir(), customer_id, 'supplierids')
        lst = bpio._read_list(path)
        if lst is None:
            lg.warn('did not found suppliers ids at %s' % path)
            continue
        for one_supplier_idurl in lst:
            if one_supplier_idurl:
                if not id_url.is_cached(one_supplier_idurl):
                    dl.append(identitycache.immediatelyCaching(one_supplier_idurl))
    if _Debug:
        lg.out(_DebugLevel, 'contactsdb.cache_suppliers prepared %d idurls to be cached' % len(dl))
    return DeferredList(dl, consumeErrors=True)
 def _on_list_files_failed(self, response, info, customer_idurl,
                           supplier_idurl, key_id):
     if strng.to_text(response.Payload) == 'key not registered':
         if _Debug:
             lg.dbg(
                 _DebugLevel,
                 'supplier %r of customer %r do not possess public key %r yet, sending it now'
                 % (
                     supplier_idurl,
                     customer_idurl,
                     key_id,
                 ))
         result = key_ring.transfer_key(key_id,
                                        supplier_idurl,
                                        include_private=False,
                                        include_signature=False)
         result.addCallback(lambda r: self._on_key_transfer_success(
             customer_idurl, supplier_idurl, key_id))
         result.addErrback(lambda err: lg.err('failed sending key %r : %r' %
                                              (
                                                  key_id,
                                                  err,
                                              )))
     else:
         lg.err(
             'failed requesting ListFiles() with %r for customer %r from supplier %r'
             % (
                 key_id,
                 customer_idurl,
                 supplier_idurl,
             ))
     return None
Exemple #20
0
 def doCheckFinished(self, *args, **kwargs):
     """
     Action method.
     """
     if _Debug:
         lg.args(_DebugLevel,
                 backup_job=self.backup_job,
                 backup_max_block_num=self.backup_max_block_num,
                 packets_out=list(self.packets_out.values()))
     if self.backup_job:
         # backup is not finished yet
         return
     if self.backup_max_block_num not in self.packets_out:
         # packets of the last block not sent yet
         return
     packets_in_progress = 0
     for block_num in self.packets_out.keys():
         packets_in_progress += list(
             self.packets_out[block_num].values()).count(None)
     if packets_in_progress:
         # some packets are still in progress
         return
     for block_num in self.packets_out.keys():
         block_packets_failed = list(
             self.packets_out[block_num].values()).count(False)
         if block_packets_failed > self.correctable_errors * 2:  # because each packet also have Parity()
             lg.err(
                 'all packets for block %d are sent, but too many errors: %d'
                 % (
                     block_num,
                     block_packets_failed,
                 ))
             self.automat('sending-failed')
             return
     self.automat('packets-delivered')
Exemple #21
0
def get_latest_ident(pub_key):
    global _KnownUsers
    from userid import identity
    user_path = _KnownUsers.get(pub_key)
    if not user_path:
        return None
    user_identity_files = sorted(map(int, os.listdir(user_path)))
    if len(user_identity_files) == 0:
        lg.warn('identity history is broken, public key is known, but no identity files found')
    latest_revision = -1
    latest_ident = None
    known_revisions = set()
    for_cleanup = []
    for id_file in user_identity_files:
        identity_file_path = os.path.join(user_path, strng.to_text(id_file))
        xmlsrc = local_fs.ReadBinaryFile(identity_file_path)
        one_id_obj = identity.identity(xmlsrc=xmlsrc)
        if not one_id_obj.isCorrect():
            lg.warn('identity history is broken, identity in the file %r is not correct' % identity_file_path)
            for_cleanup.append(identity_file_path)
            continue
        if not one_id_obj.Valid():
            lg.warn('identity history is broken, identity in the file %r is not valid' % identity_file_path)
            for_cleanup.append(identity_file_path)
            continue
        if pub_key != one_id_obj.getPublicKey():
            lg.err('identity history is broken, public key not matching in the file %r' % identity_file_path)
            for_cleanup.append(identity_file_path)
            continue
        known_revisions.add(one_id_obj.getRevisionValue())
        if one_id_obj.getRevisionValue() > latest_revision:
            latest_revision = one_id_obj.getRevisionValue()
            latest_ident = one_id_obj
    return latest_ident
Exemple #22
0
def SendContacts(remote_idurl, json_payload={}, wide=False, callbacks={}):
    """
    """
    MyID = my_id.getLocalID()
    if _Debug:
        lg.out(
            _DebugLevel,
            "p2p_service.SendContacts to %s" % nameurl.GetName(remote_idurl))
    PacketID = packetid.UniqueID()
    try:
        json_payload['type']
        json_payload['space']
    except:
        lg.err()
        return None
    Payload = serialization.DictToBytes(json_payload)
    result = signed.Packet(
        Command=commands.Contacts(),
        OwnerID=MyID,
        CreatorID=MyID,
        PacketID=PacketID,
        Payload=Payload,
        RemoteID=remote_idurl,
    )
    gateway.outbox(result, wide=wide, callbacks=callbacks)
    return result
Exemple #23
0
 def QueueSendFile(self,
                   fileName,
                   packetID,
                   remoteID,
                   ownerID,
                   callOnAck=None,
                   callOnFail=None):
     #out(10, "io_throttle.QueueSendFile %s to %s" % (packetID, nameurl.GetName(remoteID)))
     remoteID = id_url.field(remoteID)
     ownerID = id_url.field(ownerID)
     if not os.path.exists(fileName):
         lg.err("%s not exist" % fileName)
         if callOnFail is not None:
             reactor.callLater(.01, callOnFail, remoteID, packetID,
                               'not exist')  # @UndefinedVariable
         return False
     if remoteID not in list(self.supplierQueues.keys()):
         self.supplierQueues[remoteID] = SupplierQueue(
             remoteID, self.creatorID)
         lg.info("made a new sending queue for %s" %
                 nameurl.GetName(remoteID))
     return self.supplierQueues[remoteID].SupplierSendFile(
         fileName,
         packetID,
         ownerID,
         callOnAck,
         callOnFail,
     )
Exemple #24
0
def push_contact(idurl):
    global _Contacts
    global _PingDelayDict
    global _CurrentDelay
    ident = identitycache.FromCache(idurl)
    if ident is None:
        lg.err('"%s" not in the cache' % idurl)
        return None
    http_contact = ident.getProtoContact('http')
    if http_contact is None:
        if _Debug:
            lg.out(
                _DebugLevel * 2,
                'http_node.add_contact SKIP "%s" : no http contacts found in identity'
                % idurl)
        return None
    _, host, port, _ = nameurl.UrlParse(http_contact)
    new_item = False
    if idurl in _Contacts:
        new_item = True
    _Contacts[idurl] = (host, port)
    _PingDelayDict[idurl] = _CurrentDelay
    if new_item:
        if _Debug:
            lg.out(
                _DebugLevel, 'http_node.add_contact ADDED "%s" on %s:%s' %
                (idurl, host, port))
    else:
        if _Debug:
            lg.out(
                _DebugLevel, 'http_node.add_contact UPDATED "%s" on %s:%s' %
                (idurl, host, port))
    return idurl
Exemple #25
0
def on_delete_file(newpacket):
    # TODO: call verify_packet_ownership()
    if not newpacket.Payload:
        ids = [
            newpacket.PacketID,
        ]
    else:
        ids = strng.to_text(newpacket.Payload).split('\n')
    filescount = 0
    dirscount = 0
    lg.warn('going to erase files: %s' % ids)
    customer_id = global_id.UrlToGlobalID(newpacket.OwnerID)
    for pcktID in ids:
        glob_path = global_id.ParseGlobalID(pcktID)
        if not glob_path['customer']:
            glob_path = global_id.ParseGlobalID(customer_id + ':' + pcktID)
        if not glob_path['path']:
            lg.err("got incorrect PacketID")
            p2p_service.SendFail(newpacket, 'incorrect path')
            return False
        if customer_id != glob_path['customer']:
            lg.warn('trying to delete file stored for another cusomer')
            continue
        # TODO: add validation of customerGlobID
        # TODO: process requests from another customer
        filename = make_valid_filename(newpacket.OwnerID, glob_path)
        if not filename:
            lg.warn("got empty filename, bad customer or wrong packetID?")
            p2p_service.SendFail(newpacket,
                                 'not a customer, or file not found')
            return False
        if os.path.isfile(filename):
            try:
                os.remove(filename)
                filescount += 1
            except:
                lg.exc()
        elif os.path.isdir(filename):
            try:
                bpio._dir_remove(filename)
                dirscount += 1
            except:
                lg.exc()
        else:
            lg.warn("path not found %s" % filename)


#         if self.publish_event_supplier_file_modified:
#             events.send('supplier-file-modified', data=dict(
#                 action='delete',
#                 glob_path=glob_path['path'],
#                 owner_id=newpacket.OwnerID,
#             ))
    if _Debug:
        lg.dbg(
            _DebugLevel,
            "from [%s] with %d IDs, %d files and %d folders were removed" %
            (newpacket.OwnerID, len(ids), filescount, dirscount))
    p2p_service.SendAck(newpacket)
    return True
Exemple #26
0
def register_customer_key(customer_public_key_id, customer_public_key):
    """
    Check/refresh/store customer public key locally.
    """
    if not customer_public_key_id or not customer_public_key:
        lg.warn('customer public key was not provided in the request')
        return False
    customer_public_key_id = my_keys.latest_key_id(customer_public_key_id)
    if my_keys.is_key_registered(customer_public_key_id):
        known_customer_public_key = my_keys.get_public_key_raw(
            customer_public_key_id)
        if known_customer_public_key == customer_public_key:
            lg.info(
                'customer public key %r already known and public key is matching'
                % customer_public_key_id)
        else:
            lg.warn('rewriting customer public key %r' %
                    customer_public_key_id)
            my_keys.erase_key(customer_public_key_id)
    key_id, key_object = my_keys.read_key_info(customer_public_key)
    if not my_keys.register_key(key_id, key_object):
        lg.err('failed to register customer public key: %r' %
               customer_public_key_id)
        return False
    lg.info('new customer public key registered: %r' % customer_public_key_id)
    return True
Exemple #27
0
def saveLocalIdentity():
    """
    Save identity object from memory into local file.

    Do sign the identity than serialize to write to the file.
    """
    global _LocalIdentity
    if not isLocalIdentityReady():
        lg.warn("ERROR local identity not exist!")
        return False
    if not _LocalIdentity.isCorrect():
        lg.warn('local identity is not correct')
        return False
    _LocalIdentity.sign()
    if not _LocalIdentity.Valid():
        lg.err('local identity is not valid')
        return False
    xmlid = _LocalIdentity.serialize(as_text=True)
    filename = bpio.portablePath(settings.LocalIdentityFilename())
    bpio.WriteTextFile(filename, xmlid)
    setTransportOrder(getOrderFromContacts(_LocalIdentity))
    events.send('local-identity-written', data=dict(idurl=_LocalIdentity.getIDURL(), filename=filename))
    if _Debug:
        lg.out(_DebugLevel, "my_id.saveLocalIdentity %d bytes wrote to %s" % (len(xmlid), filename))
    return True
Exemple #28
0
def backuptardir_thread(directorypath, arcname=None, recursive_subfolders=True, compress=None):
    """
    Makes tar archive of a single file inside a thread.
    Returns `BytesLoop` object instance which can be used to read produced data in parallel.
    """
    if not bpio.pathIsDir(directorypath):
        lg.err('folder %s not found' % directorypath)
        return None
    if arcname is None:
        arcname = os.path.basename(directorypath)
    p = BytesLoop()

    def _run():
        from storage import tar_file
        ret = tar_file.writetar(
            sourcepath=directorypath,
            arcname=arcname,
            subdirs=recursive_subfolders,
            compression=compress or 'none',
            encoding='utf-8',
            fileobj=p,
        )
        p.mark_finished()
        if _Debug:
            lg.out(_DebugLevel, 'backup_tar.backuptardir_thread writetar() finished')
        return ret

    reactor.callInThread(_run)  # @UndefinedVariable
    return p
 def _do_check_sync_keys(self, result):
     from logs import lg
     from interface import api
     from storage import keys_synchronizer
     from userid import global_id
     from userid import my_id
     self.sync_keys_requested = False
     global_keys_folder_path = global_id.MakeGlobalID(
         key_alias='master', customer=my_id.getGlobalID(), path='.keys')
     res = api.file_exists(global_keys_folder_path)
     if res['status'] != 'OK' or not res['result'] or not res['result'].get(
             'exist'):
         res = api.file_create(global_keys_folder_path, as_folder=True)
         if res['status'] != 'OK':
             lg.err(
                 'failed to create ".keys" folder "%s" in the catalog: %r' %
                 (global_keys_folder_path, res))
             result.errback(
                 Exception(
                     'failed to create keys folder "%s" in the catalog: %r'
                     % (global_keys_folder_path, res)))
             return
         lg.info('created new remote folder ".keys" in the catalog: %r' %
                 global_keys_folder_path)
     keys_synchronizer.A('sync', result)
Exemple #30
0
 def OnDataReceived(self, newpacket, result):
     #         if result == 'timeout':
     #             packetID = global_id.CanonicalID(newpacket)
     #             if packetID in self.fileRequestDict:
     #                 self.fileRequestDict[packetID].fileReceivedTime = time.time()
     #                 self.fileRequestDict[packetID].result = 'timeout'
     #                 for callBack in self.fileRequestDict[packetID].callOnReceived:
     #                     callBack(None, 'timeout')
     #             return
     # we requested some data from a supplier, just received it
     packetID = global_id.CanonicalID(newpacket.PacketID)
     if self.shutdown:
         # if we're closing down this queue (supplier replaced, don't any anything new)
         if packetID in self.fileRequestDict:
             for callBack in self.fileRequestDict[packetID].callOnReceived:
                 callBack(newpacket, 'shutdown')
         if packetID in self.fileRequestDict:
             del self.fileRequestDict[packetID]
         lg.warn('supplier queue is shutting down')
         return
     if _Debug:
         lg.out(
             _DebugLevel,
             "io_throttle.OnDataReceived  %s with result=[%s]" % (
                 newpacket,
                 result,
             ))
     if packetID in self.fileRequestQueue:
         self.fileRequestQueue.remove(packetID)
         if _Debug:
             lg.out(
                 _DebugLevel,
                 "    removed %s from %s receiving queue, %d more items" %
                 (packetID, self.remoteName, len(self.fileRequestQueue)))
     if newpacket.Command == commands.Data():
         wrapped_packet = signed.Unserialize(newpacket.Payload)
         if not wrapped_packet or not wrapped_packet.Valid():
             lg.err('incoming Data() is not valid')
             return
         if packetID in self.fileRequestDict:
             self.fileRequestDict[packetID].fileReceivedTime = time.time()
             self.fileRequestDict[packetID].result = 'received'
             for callBack in self.fileRequestDict[packetID].callOnReceived:
                 callBack(wrapped_packet, 'received')
     elif newpacket.Command == commands.Fail():
         if packetID in self.fileRequestDict:
             self.fileRequestDict[packetID].fileReceivedTime = time.time()
             self.fileRequestDict[packetID].result = 'failed'
             for callBack in self.fileRequestDict[packetID].callOnReceived:
                 callBack(newpacket, 'failed')
     else:
         lg.err('incorrect response command')
     if packetID in self.fileRequestDict:
         del self.fileRequestDict[packetID]
     if _Debug:
         lg.out(
             _DebugLevel,
             "io_throttle.OnDataReceived %s from %s, queue=%d" %
             (newpacket, self.remoteName, len(self.fileRequestQueue)))
     self.DoRequest()
Exemple #31
0
def cancel_outbox_file(proto, host, filename, why=None):
    pkt_out, work_item = packet_out.search(proto, host, filename)
    if pkt_out is None:
        lg.err("gateway.cancel_outbox_file ERROR packet_out not found: %r" % ((proto, host, filename),))
        return None
    pkt_out.automat("cancel", why)
 def call(self, method_name, *args):
     method = getattr(self.interface, method_name, None)
     if method is None:
         lg.err('method %s not found in protos' % (method_name, self.proto))
         return fail(Exception('Method %s not found in the transport %s interface' % (method_name, self.proto)))
     return method(*args)
Exemple #33
0
def inbox(info):
    """
    1) The protocol modules write to temporary files and gives us that filename
    2) We unserialize 3) We check that it is for us 4) We check that it is from
    one of our contacts.

    5) We use signed.validate() to check signature and that number
    fields are numbers 6) Any other sanity checks we can do and if
    anything funny we toss out the packet . 7) Then change the filename
    to the PackedID that it should be.    and call the right function(s)
    for this new packet    (encryptedblock, scrubber, remotetester,
    customerservice, ...)    to dispatch it to right place(s). 8) We
    have to keep track of bandwidth to/from everyone, and make a report
    every 24 hours    which we send to BitDust sometime in the 24 hours
    after that.
    """
    global _DoingShutdown
    global _LastInboxPacketTime
    if _DoingShutdown:
        if _Debug:
            lg.out(_DebugLevel - 4, "gateway.inbox ignoring input since _DoingShutdown ")
        return None
    if info.filename == "" or not os.path.exists(info.filename):
        lg.err("bad filename=" + info.filename)
        return None
    try:
        data = bpio.ReadBinaryFile(info.filename)
    except:
        lg.err("gateway.inbox ERROR reading file " + info.filename)
        return None
    if len(data) == 0:
        lg.err("gateway.inbox ERROR zero byte file from %s://%s" % (info.proto, info.host))
        return None
    try:
        newpacket = signed.Unserialize(data)
    except:
        lg.err("gateway.inbox ERROR during Unserialize data from %s://%s" % (info.proto, info.host))
        lg.exc()
        return None
    if newpacket is None:
        lg.warn("newpacket from %s://%s is None" % (info.proto, info.host))
        return None
    try:
        Command = newpacket.Command
        OwnerID = newpacket.OwnerID
        CreatorID = newpacket.CreatorID
        PacketID = newpacket.PacketID
        Date = newpacket.Date
        Payload = newpacket.Payload
        RemoteID = newpacket.RemoteID
        Signature = newpacket.Signature
        packet_sz = len(data)
    except:
        lg.err("gateway.inbox ERROR during Unserialize data from %s://%s" % (info.proto, info.host))
        lg.err("data length=" + str(len(data)))
        lg.exc()
        fd, filename = tmpfile.make("other", ".bad")
        os.write(fd, data)
        os.close(fd)
        return None
    _LastInboxPacketTime = time.time()
    if _Debug:
        lg.out(
            _DebugLevel - 8,
            "gateway.inbox [%s] signed by %s|%s (for %s) from %s://%s"
            % (
                Command,
                nameurl.GetName(OwnerID),
                nameurl.GetName(CreatorID),
                nameurl.GetName(RemoteID),
                info.proto,
                info.host,
            ),
        )
    if _Debug and lg.is_debug(_DebugLevel):
        monitoring()
    control.request_update([("packet", newpacket.PacketID)])
    return newpacket