Exemplo n.º 1
0
    def _go(nodes):
#         lg.out(_DebugLevel, 'Connected nodes: %r' % nodes)
#         lg.out(_DebugLevel, 'DHT node is active, ID=[%s]' % base64.b64encode(node().id))
        try:
            if len(args) == 0:
                pass

            elif len(args) > 0:

                def _r(x):
                    lg.info(x)
                    # reactor.stop()  #@UndefinedVariable

                cmd = args[0]
                if cmd == 'get':
                    get_value(args[1]).addBoth(_r)
                elif cmd == 'set':
                    set_value(args[1], args[2], expire=int(args[3])).addBoth(_r)
                elif cmd == 'get_json':
                    get_json_value(args[1]).addBoth(_r)
                elif cmd == 'set_json':
                    set_json_value(args[1], jsn.loads(args[2]),
                                   expire=(int(args[3]) if len(args)>=4 else 9999)).addBoth(_r)
                elif cmd == 'get_valid_data':
                    get_valid_data(args[1], rules=jsn.loads(args[2]), return_details=True).addBoth(_r)
                elif cmd == 'set_valid_data':
                    set_valid_data(args[1], jsn.loads(args[2]),
                                   expire=(int(args[3]) if len(args)>=4 else 9999),
                                   rules=jsn.loads(args[4])).addBoth(_r)
                elif cmd == 'read_customer_suppliers':
                    dht_relations.read_customer_suppliers(args[1]).addBoth(_r)
                elif cmd == 'write_customer_suppliers':
                    dht_relations.write_customer_suppliers(args[1], args[2].split(',')).addBoth(_r)
                elif cmd == 'write_verify_republish':
                    write_verify_republish_data(args[1], args[2], expire=int(args[3])).addBoth(_r)
                elif cmd == 'find':
                    find_node(key_to_hash(args[1])).addBoth(_r)
                elif cmd == 'ping':
                    find_node(random_key()).addBoth(_r)
                elif cmd == 'get_node_data':
                    pprint.pprint(get_node_data(args[1]))
                elif cmd == 'observe_data':
                    def _p(val, n):
                        print('observed', n, val)
                    def _o(result):
                        for n in result:
                            d = n.request(args[2])
                            d.addCallback(_p, n)
                    d = find_node(key_to_hash(args[1]))
                    d.addErrback(_r)
                    d.addCallback(_o)
                elif cmd == 'discover':
                    def _l(x):
                        lg.info(x)
                        find_node(random_key()).addBoth(_l)
                    _l('')
                elif cmd == 'dump_db':
                    pprint.pprint(dump_local_db(value_as_json=True))
        except:
            lg.exc()
Exemplo n.º 2
0
def BytesToDict(inp,
                encoding='latin1',
                errors='strict',
                keys_to_text=False,
                values_to_text=False,
                unpack_types=False):
    """
    A smart way to extract input bytes into python dictionary object.
    All input bytes will be decoded into text and then loaded via `json.loads()` method.
    Finally every text key and value in result dict will be encoded back to bytes if `values_to_text` is False.
    Smart feature `unpack_types` can be used to "extract" real types of keys and values from input bytes.
    Can be used to extract dictionaries of mixed types - binary and text values.   
    """
    if not inp:
        return {}
    _t = strng.to_text(inp, encoding=encoding)
    if values_to_text:
        return jsn.loads_text(_t, encoding=encoding)
    if unpack_types:
        return jsn.unpack_dict(jsn.loads(_t, encoding=encoding),
                               encoding=encoding,
                               errors=errors)
    if keys_to_text:
        return jsn.dict_keys_to_text(jsn.loads(_t, encoding=encoding))
    return jsn.loads(_t, encoding=encoding)
Exemplo n.º 3
0
 def _on_rotate_broker_connected(self, response_info, broker_pos, event,
                                 *args, **kwargs):
     try:
         # skip leading "accepted:" marker
         cooperated_brokers = jsn.loads(
             strng.to_text(response_info[0].Payload)[9:])
         cooperated_brokers.pop('archive_folder_path', None)
         cooperated_brokers = {
             int(k): id_url.field(v)
             for k, v in cooperated_brokers.items()
         }
     except:
         lg.exc()
         self.automat('broker-rotate-failed')
         return
     if _Debug:
         lg.args(_DebugLevel,
                 cooperated=cooperated_brokers,
                 pos=broker_pos,
                 e=event)
     if id_url.is_the_same(cooperated_brokers.get(broker_pos),
                           self.my_broker_idurl):
         self.automat('broker-rotate-accepted',
                      cooperated_brokers=cooperated_brokers)
         return
     self.automat('broker-rotate-rejected',
                  cooperated_brokers=cooperated_brokers)
Exemplo n.º 4
0
def ReadIndex(text_data, encoding='utf-8'):
    """
    Read index data base, ``input`` is a ``StringIO.StringIO`` object which
    keeps the data.

    This is a simple text format, see ``p2p.backup_fs.Serialize()``
    method. The first line keeps revision number.
    """
    global _LoadingFlag
    if _LoadingFlag:
        return False
    _LoadingFlag = True
    backup_fs.Clear()
    count = 0
    try:
        json_data = jsn.loads(
            text_data,
            encoding=encoding,
        )
    except:
        lg.exc()
        json_data = text_data
    if _Debug:
        lg.args(_DebugLevel, json_data=json_data)
    for customer_id in json_data.keys():
        if customer_id == 'items':
            try:
                count = backup_fs.Unserialize(json_data,
                                              from_json=True,
                                              decoding=encoding)
            except:
                lg.exc()
                return False
        else:
            customer_idurl = global_id.GlobalUserToIDURL(customer_id)
            if not id_url.is_cached(customer_idurl):
                lg.warn(
                    'identity %r is not yet cached, skip reading related catalog items'
                    % customer_idurl)
                identitycache.immediatelyCaching(customer_idurl,
                                                 try_other_sources=False,
                                                 ignore_errors=True)
                continue
            try:
                count = backup_fs.Unserialize(
                    json_data[customer_id],
                    iter=backup_fs.fs(customer_idurl),
                    iterID=backup_fs.fsID(customer_idurl),
                    from_json=True,
                    decoding=encoding,
                )
            except:
                lg.exc()
                return False
    if _Debug:
        lg.out(_DebugLevel, 'backup_control.ReadIndex %d items loaded' % count)
    # local_site.update_backup_fs(backup_fs.ListAllBackupIDsSQL())
    # commit(new_revision)
    _LoadingFlag = False
    return True
Exemplo n.º 5
0
def read_json_response(response, key, result_defer=None, as_bytes=False):
    if _Debug:
        lg.out(_DebugLevel + 6,
               'dht_service.read_json_response [%r] : %r' % (key, response))
    value = None
    if isinstance(response, list):
        if _Debug:
            lg.out(_DebugLevel, '        response is a list, value not found')
        if result_defer:
            result_defer.callback(response)
        return None
    if isinstance(response, dict):
        if response.get('values'):
            try:
                latest = 0
                if as_bytes:
                    value = jsn.loads(response['values'][0][0])
                else:
                    value = jsn.loads_text(response['values'][0][0])
                for v in response['values']:
                    if v[1] > latest:
                        latest = v[1]
                        value = jsn.loads(v[0])
            except:
                lg.exc()
                if _Debug:
                    lg.out(
                        _DebugLevel,
                        '        invalid json value found in DHT, return None')
                if result_defer:
                    result_defer.errback(
                        Exception('invalid json value found in DHT'))
                return None
        else:
            if _Debug:
                lg.out(
                    _DebugLevel,
                    '        response is a dict, "values" field is empty, value not found'
                )
            if result_defer:
                result_defer.callback(response.get('activeContacts', []))
            return None
    if _Debug:
        lg.out(_DebugLevel, '        response is a dict, value is OK')
    if result_defer:
        result_defer.callback(value)
    return value
Exemplo n.º 6
0
def BytesToDict(inp, encoding='latin1'):
    """
    """
    return jsn.loads(
        strng.to_text(
            inp,
            encoding=encoding,
        ),
        encoding=encoding,
    )
Exemplo n.º 7
0
 def test_jsn(self):
     data1 = os.urandom(1024)
     dct1 = {
         'd': {
             'data': data1,
         },
     }
     raw = jsn.dumps(dct1, encoding='latin1')
     dct2 = jsn.loads(raw, encoding='latin1')
     data2 = dct2['d']['data']
     self.assertEqual(data1, data2)
Exemplo n.º 8
0
def ReadIndex(text_data, encoding='utf-8'):
    """
    Read index data base, ``input`` is a ``StringIO.StringIO`` object which
    keeps the data.

    This is a simple text format, see ``p2p.backup_fs.Serialize()``
    method. The first line keeps revision number.
    """
    global _LoadingFlag
    if _LoadingFlag:
        return False
    _LoadingFlag = True
    backup_fs.Clear()
    count = 0
    try:
        json_data = jsn.loads(
            text_data,
            encoding=encoding,
        )
    except:
        lg.exc()
        json_data = text_data
    if _Debug:
        import pprint
        lg.out(_DebugLevel, pprint.pformat(json_data))
    for customer_id in json_data.keys():
        if customer_id == 'items':
            try:
                count = backup_fs.Unserialize(json_data,
                                              from_json=True,
                                              decoding=encoding)
            except:
                lg.exc()
                return False
        else:
            customer_idurl = global_id.GlobalUserToIDURL(customer_id)
            try:
                count = backup_fs.Unserialize(
                    json_data[customer_id],
                    iter=backup_fs.fs(customer_idurl),
                    iterID=backup_fs.fsID(customer_idurl),
                    from_json=True,
                    decoding=encoding,
                )
            except:
                lg.exc()
                return False
    if _Debug:
        lg.out(_DebugLevel, 'backup_control.ReadIndex %d items loaded' % count)
    # local_site.update_backup_fs(backup_fs.ListAllBackupIDsSQL())
    # commit(new_revision)
    _LoadingFlag = False
    return True
Exemplo n.º 9
0
def BytesToDict(inp, encoding='latin1'):
    """
    A smart way to extract input bytes into python dictionary object.
    bytes will be encoded into text and then loaded via `json.loads()` method.
    Finally every text value in result dict will be encoded back to bytes.
    """
    return jsn.loads(
        strng.to_text(
            inp,
            encoding=encoding,
        ),
        encoding=encoding,
    )
Exemplo n.º 10
0
def load_customers(path=None):
    """
    Load customers list from disk.
    """
    global _CustomersMetaInfo
    if path is None:
        path = settings.CustomerIDsFilename()
    lst = bpio._read_list(path)
    if lst is None:
        lst = list()
    lst = list(map(strng.to_bin, lst))
    set_customers(lst)
    _CustomersMetaInfo = jsn.loads(
        local_fs.ReadTextFile(settings.CustomersMetaInfoFilename()) or '{}')
    lg.out(4, 'contactsdb.load_customers %d items' % len(lst))
Exemplo n.º 11
0
def read_json_response(response, key, result_defer=None):
    value = None
    if isinstance(response, list):
        if result_defer:
            result_defer.callback(response)
        return None
    if isinstance(response, dict):
        try:
            value = jsn.loads(response[key])
        except:
            lg.exc()
            if result_defer:
                result_defer.errback(
                    Exception('invalid json value found in DHT'))
            return None
    if result_defer:
        result_defer.callback(value)
    return value
Exemplo n.º 12
0
def load_customers(path=None):
    """
    Load customers list from disk.
    """
    global _CustomersMetaInfo
    if path is None:
        path = settings.CustomerIDsFilename()
    lst = bpio._read_list(path)
    if lst is None:
        lst = list()
    lst = list(filter(id_url.is_cached, lst))
    set_customers(lst)
    _CustomersMetaInfo = jsn.loads(
        local_fs.ReadTextFile(settings.CustomersMetaInfoFilename()) or '{}',
        keys_to_bin=True,
    )
    _CustomersMetaInfo = id_url.to_bin_dict(_CustomersMetaInfo)
    _CustomersMetaInfo = jsn.dict_values_to_text(_CustomersMetaInfo)
    if _Debug:
        lg.out(_DebugLevel, 'contactsdb.load_customers %d items' % len(lst))
Exemplo n.º 13
0
 def _on_new_broker_hired(self, response_info, broker_pos, my_pos,
                          desired_pos, *args, **kwargs):
     try:
         # skip leading "accepted:" marker
         cooperated_brokers = jsn.loads(
             strng.to_text(response_info[0].Payload)[9:])
         cooperated_brokers.pop('archive_folder_path', None)
         cooperated_brokers = {
             int(k): id_url.field(v)
             for k, v in cooperated_brokers.items()
         }
     except:
         lg.exc()
         self.automat('hire-broker-failed')
         return
     if _Debug:
         lg.args(_DebugLevel,
                 cooperated=cooperated_brokers,
                 target=broker_pos,
                 my=my_pos,
                 desired=desired_pos,
                 args=args,
                 kwargs=kwargs)
     if my_pos >= 0:
         if id_url.is_the_same(cooperated_brokers.get(my_pos),
                               self.my_broker_idurl):
             self.automat('hire-broker-ok',
                          cooperated_brokers=cooperated_brokers)
             return
     if desired_pos >= 0:
         if id_url.is_the_same(cooperated_brokers.get(desired_pos),
                               self.my_broker_idurl):
             self.automat('hire-broker-ok',
                          cooperated_brokers=cooperated_brokers)
             return
     lg.warn(
         'new broker is not cooperative, my idurl is not found in the cooperation on the right place'
     )
     self.automat('new-broker-rejected',
                  cooperated_brokers=cooperated_brokers)
Exemplo n.º 14
0
def validate_before_store(key, value, originalPublisherID, age, expireSeconds, **kwargs):
    """
    Will be executed on receiver side for each (key,value) set request on DHT
    """
    try:
        json_new_value = jsn.loads(value)
    except:
        # not a json data to be written - this is not valid
        lg.exc()
        raise ValueError('input data is not a json value: %r' % value)
    if _Debug:
        lg.out(_DebugLevel + 8, 'dht_service.validate_before_store key=[%s] json=%r' % (key, json_new_value, ))
    new_record_type = json_new_value.get('type')
    if not new_record_type:
        if _Debug:
            lg.out(_DebugLevel, '        new json data do not have "type" field present, store operation FAILED')
        raise ValueError('input data do not have "type" field present: %r' % json_new_value)
    if key not in node()._dataStore:
        if _Debug:
            lg.out(_DebugLevel, '        previous value not exists yet, store OK')
        return True
    prev_value = node()._dataStore[key]
    try:
        json_prev_value = jsn.loads(prev_value)
    except:
        if _Debug:
            lg.out(_DebugLevel, '        current value in DHT is not a json data, will be overwritten, store OK')
        return True
    prev_record_type = json_prev_value.get('type')
    if prev_record_type and prev_record_type != new_record_type:
        if _Debug:
            lg.out(_DebugLevel, '        new json data type did not match to existing record type, store operation FAILED')
        raise ValueError('new json data type do not match to existing record type: %r' % json_prev_value)
    # TODO: need to include "key" field into DHT record and validate it as well 
    # new_record_key = json_new_value.get('key')
    # if not new_record_key:
    #     if _Debug:
    #         lg.out(_DebugLevel, '        new json data do not have "key" field present, store operation FAILED')
    #     return False
    # if new_record_key != key:
    #     if _Debug:
    #         lg.out(_DebugLevel, '        new json data do not have "key" field set properly, store operation FAILED')
    #     return False
    # prev_record_key = json_prev_value.get('key')
    # if prev_record_key and prev_record_key != new_record_key:
    #     if _Debug:
    #         lg.out(_DebugLevel, '        new json data "key" field do not match to existing record "key", store operation FAILED')
    #     return False
    try:
        prev_revision = int(json_prev_value['revision'])
    except:
        prev_revision = -1
    try:
        new_revision = int(json_new_value['revision'])
    except:
        new_revision = -1
    if prev_revision >= 0:
        if new_revision < 0:
            if _Debug:
                lg.out(_DebugLevel, '        new json data must have a revision, store operation FAILED')
            raise ValueError('new json data must have a revision')
        if new_revision < prev_revision:
            if _Debug:
                lg.out(_DebugLevel, '        new json data must increment revision number, store operation FAILED')
            raise ValueError('new json data must increment revision number, current revision is %d ' % prev_revision)
        if new_revision == prev_revision:
            if prev_record_type == 'suppliers':
                prev_ecc_map = json_prev_value.get('ecc_map')
                new_ecc_map = json_new_value.get('ecc_map')
                if prev_ecc_map and new_ecc_map != prev_ecc_map:
                    if _Debug:
                        lg.out(_DebugLevel, '        new json data have same revision but different ecc_map, store operation FAILED')
                    raise ValueError('new json data have same revision but different ecc_map, current revision is %d ' % prev_revision)
                prev_suppliers = [strng.to_bin(idurl.strip()) for idurl in json_prev_value.get('suppliers', [])]
                new_suppliers = [strng.to_bin(idurl.strip()) for idurl in json_new_value.get('suppliers', [])]
                if prev_suppliers != new_suppliers:
                    if _Debug:
                        lg.out(_DebugLevel, '        new json data have same revision but different suppliers list, store operation FAILED')
                    raise ValueError('new json data have same revision but different suppliers list, current revision is %d ' % prev_revision)
    if _Debug:
        lg.out(_DebugLevel, '        new json data is valid and matching existing DHT record, store OK')
    return True