Ejemplo n.º 1
0
class ToolsLib(object):
    
    log = None
    artifactPath = "install"
    
    def __init__(self):
        
        self.log = Logger()
        
    def findFile(self, needle, extension):

        # Find the zip file (We have to find like this because the date changes)
        found = False
        for file in os.listdir(self.artifactPath):            
            if string.find(file, needle) >= 0 and re.search(extension + "$", file, re.IGNORECASE):
                found = True
                break;
        
        if not found:
            self.log.debug("files=%s" % ','.join(os.listdir(self.artifactPath)))
            raise Exception("Unable to find a zip file that has [%s] in the name with [%s] extension" % (needle, extension))
        
        # Get the name of the file minus the extension
        reobj = re.compile(r"(?P<filename>.*?)(?P<ext>\.[0-9a-z]*$)", re.IGNORECASE)
        return reobj.search(file).group("filename")
Ejemplo n.º 2
0
    def run(self, options):
        """
        Runs the application. 'options' contains the CLI options dictionary.
        """
        
        # Setup logging
        if options.debug:
            Logger.set_log_level(Logger.DEBUG)
        
        # Load configuration
        self.config = ConfigParser.RawConfigParser()
        self.config.read(options.config)

        MidiEngine.initialize()
        
        Logger.debug("Command line options: %s" % (options))

        self.setup()
        
        if options.list_devices:
            self.list_devies()
        elif options.interactive:
            self.interactive_mode()
        else:
            self.loop()
Ejemplo n.º 3
0
    def __set_state(self, state):
        self.__state = state
        
        Logger.debug("Switching to state %s" % (self.state_names[self.__state]))

        if self.__state == self.STATE_NORMAL:
            self.__update_levels(127, 0, 0)
            self.send_ctrl('master', 127)
            time.sleep(self.SWITCH_INTERVAL)
            self.send_ctrl('send1', 0)
            self.send_ctrl('send2', 0)
        elif self.__state == self.STATE_MUTE:
            self.__update_levels(0, 0, 0)
            self.send_ctrl('master', 0)
            self.send_ctrl('send1', 0)
            self.send_ctrl('send2', 0)
        elif self.__state == self.STATE_FX1:
            self.__update_levels(0, 127, 0)
            self.send_ctrl('send1', 127)
            time.sleep(self.SWITCH_INTERVAL)
            self.send_ctrl('master', 0)
            self.send_ctrl('send2', 0)
        elif self.__state == self.STATE_FX2:
            self.__update_levels(0, 0, 127)
            time.sleep(self.SWITCH_INTERVAL)
            self.send_ctrl('send2', 127)
            self.send_ctrl('master', 0)
            self.send_ctrl('send1', 0)
            
        self.send_ctrl('selector', (self.__state / 3.0) * 127.0)
Ejemplo n.º 4
0
class MarketProtocol(RPCProtocol):
    implements(MessageProcessor)

    def __init__(self, node_proto, router):
        self.router = router
        RPCProtocol.__init__(self, node_proto, router)
        self.log = Logger(system=self)
        self.handled_commands = [GET_CONTRACT, GET_IMAGE]
        self.multiplexer = None
        self.hashmap = HashMap()

    def connect_multiplexer(self, multiplexer):
        self.multiplexer = multiplexer

    def rpc_get_contract(self, sender, contract_hash):
        self.log.info("Looking up contract ID %s" % contract_hash.encode('hex'))
        self.router.addContact(sender)
        try:
            with open(self.hashmap.get_file(contract_hash), "r") as file:
                contract = file.read()
            return [contract]
        except:
            return ["None"]

    def rpc_get_image(self, sender, image_hash):
        self.log.info("Looking up image with hash %s" % image_hash.encode('hex'))
        self.router.addContact(sender)
        try:
            with open(self.hashmap.get_file(image_hash), "r") as file:
                image = file.read()
            return [image]
        except:
            return ["None"]

    def callGetContract(self, nodeToAsk, contract_hash):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_contract(address, contract_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetImage(self, nodeToAsk, image_hash):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_image(address, image_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def handleCallResponse(self, result, node):
        """
        If we get a response, add the node to the routing table.  If
        we get no response, make sure it's removed from the routing table.
        """
        if result[0]:
            self.log.info("got response from %s, adding to router" % node)
            self.router.addContact(node)
        else:
            self.log.debug("no response from %s, removing from router" % node)
            self.router.removeContact(node)
        return result

    def __iter__(self):
        return iter(self.handled_commands)
Ejemplo n.º 5
0
Archivo: rating.py Proyecto: pscn/ads
 def by_key(key):
   if RatingFactory.__lib.has_key(key):
     return RatingFactory.__lib[key]
   Logger.debug(u"Rating.by_key((%s, %s, %s, %s, %s, %s, %s, %s, %s)): create from key" % (
       key[0], key[1], key[2], key[3], key[4], key[5], key[6], key[7],
       key[8]))
   return RatingFactory.get(key[0], key[1], key[2], key[3], key[4], key[5],
       key[6], key[7], key[8])
Ejemplo n.º 6
0
 def __receive(self, input, msg):
     # Filter message if it does not belong to the configured MIDI channel
     if msg.getChannel() != self.channel:
         return
     Logger.debug("[%s] Received message: %s" % (self.name, MidiEngine.dump_msg(msg)))
     # Dispatch received message
     if self.receive:
         self.receive(self, msg)
Ejemplo n.º 7
0
 def load(input):
   for i in range(len(Factories.__lib)):
     Logger.debug(u"Factories.load(): loading %s" % i)
     k = pickle.load(input)
     if k != i:
       # FIXME: we should die here
       Logger.error(u"Factories.load(): key mismatch %d vs %d" % (i, k))
     Factories.__lib[k].setstate(pickle.load(input))
Ejemplo n.º 8
0
    def __init__(self, name, input, output, channel):
        Logger.debug("[%s] Initializing channel: input='%s', output='%s, channel=%d" % (name, input, output, channel))
        self.name = name
        self.input = MidiEngine.get_input_by_name(input)
        self.output = MidiEngine.get_output_by_name(output)
        self.channel = channel
        self.receive = None

        if self.input:
            self.input.receive = self.__receive
Ejemplo n.º 9
0
 def __switch(self, fx):
     self.__state[fx] = not self.__state[fx]
     Logger.debug("Switching state of fx%d to %d" % (fx, self.__state[fx]))
     
     if self.__state[fx]:
         value = 127
     else:
         value = 0
         
     self.send_ctrl(['fx1', 'fx2', 'fx3', 'fx4'][fx], value)
     self.set_led(['led1', 'led2', 'led3', 'led4'][fx], self.__state[fx])
Ejemplo n.º 10
0
class Server(object):
    def __init__(self, kserver, signing_key, database):
        """
        A high level class for sending direct, market messages to other nodes.
        A node will need one of these to participate in buying and selling.
        Should be initialized after the Kademlia server.
        """
        self.kserver = kserver
        self.signing_key = signing_key
        self.router = kserver.protocol.router
        self.db = database
        self.log = Logger(system=self)
        self.protocol = MarketProtocol(kserver.node, self.router, signing_key, database)

        # TODO: we need a loop here that republishes keywords when they are about to expire

        # TODO: we also need a loop here to delete expiring contract (if they are set to expire)

    def querySeed(self, list_seed_pubkey):
        """
        Query an HTTP seed for known vendors and save the vendors to the db.

        Args:
            Receives a list of one or more tuples Example [(seed, pubkey)]
            seed: A `string` consisting of "ip:port" or "hostname:port"
            pubkey: The hex encoded public key to verify the signature on the response
        """

        for sp in list_seed_pubkey:
            seed, pubkey = sp
            try:
                self.log.debug("querying %s for vendors" % seed)
                c = httplib.HTTPConnection(seed)
                c.request("GET", "/?type=vendors")
                response = c.getresponse()
                self.log.debug("Http response from %s: %s, %s" % (seed, response.status, response.reason))
                data = response.read()
                reread_data = data.decode("zlib")
                proto = peers.PeerSeeds()
                proto.ParseFromString(reread_data)
                verify_key = nacl.signing.VerifyKey(pubkey, encoder=nacl.encoding.HexEncoder)
                verify_key.verify("".join(proto.serializedNode), proto.signature)
                v = self.db.VendorStore()
                for peer in proto.serializedNode:
                    try:
                        n = objects.Node()
                        n.ParseFromString(peer)
                        v.save_vendor(n.guid.encode("hex"), peer)
                    except Exception:
                        pass
            except Exception, e:
                self.log.error("failed to query seed: %s" % str(e))
Ejemplo n.º 11
0
class HeartbeatProtocol(protocol.Protocol):
    """
    For listening on the libbitcoin server heartbeat port
    """
    def __init__(self, libbitcoin_client):
        self.libbitcoin_client = libbitcoin_client
        self.timeout = reactor.callLater(7, self.call_timeout)
        self.log = Logger(system=self)

    def call_timeout(self):
        self.log.critical("Libbitcoin server offline")
        self.libbitcoin_client.connected = False

    def dataReceived(self, data):
        self.log.debug("libbitcoin heartbeat")
        self.timeout.cancel()
        self.libbitcoin_client.connected = True
        self.transport.loseConnection()
Ejemplo n.º 12
0
class MarketProtocol(RPCProtocol):
    implements(MessageProcessor)

    def __init__(self, node_proto, router):
        self.router = router
        RPCProtocol.__init__(self, node_proto, router)
        self.log = Logger(system=self)
        self.handled_commands = [GET_CONTRACT]

    def connect_multiplexer(self, multiplexer):
        self.multiplexer = multiplexer

    def rpc_get_contract(self, sender, contract_hash):
        self.log.info("Looking up contract ID" % long(contract_hash.encode('hex'), 16))
        self.router.addContact(sender)
        try:
            with open(get_data_folder() + "/Store/Listings/Contracts/" + str(long(contract_hash.encode('hex'), 16)) + '.json') as contract_file:
                data = json.load(contract_file)
                return str(data)
        except:
            return "None"

    def call_get_contract(self, nodeToAsk, contract_hash):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_contract(address, contract_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def handleCallResponse(self, result, node):
        """
        If we get a response, add the node to the routing table.  If
        we get no response, make sure it's removed from the routing table.
        """
        if result[0]:
            self.log.info("got response from %s, adding to router" % node)
            self.router.addContact(node)
        else:
            self.log.debug("no response from %s, removing from router" % node)
            self.router.removeContact(node)
        return result

    def __iter__(self):
        return iter(self.handled_commands)
Ejemplo n.º 13
0
class Crawler(threading.Thread):
    def __init__(self, cookie, manager):
        threading.Thread.__init__(self)
        cookie_handler = urllib2.HTTPCookieProcessor(cookie)
        self._opener = urllib2.build_opener(cookie_handler)
        self._manager = manager
        self._logger = Logger(u"crawler.log")

    def run(self):
        while True:
            try:
                # fetch url
                url = self._manager.get_url()
                self._logger.debug("Fetch a url " + url)
                # fetch page
                request = urllib2.Request(url)
                page = self._opener.open(request)
                self._logger.debug("Fetch the page for url " + url)
                # insert page
                while True:
                    try:
                        self._manager.insert_page(url, page)
                        self._logger.debug("Insert the page")
                        break
                    except PageQueueFullForNowError:
                        time.sleep(0.1)
            except urllib2.URLError:
                self._manager.insert_page(url, None)
                self._logger.error("URLError for " + url)
            except UrlQueueEmptyForNowError:
                time.sleep(0.1)
            except NoUrlToCrawlError:
                break
Ejemplo n.º 14
0
    def validate_letter_target(params):
        try:
            target = params['target']
            if target not in ['public', 'friend', 'subscriber', 'me', 'users', 'email']:
                raise MultipleInvalid('hello')

            elif target == 'public':
                assert isinstance(params['category_id'], int)
                assert params['category_id'] > 0
            elif target == 'users':
                assert isinstance(params['user_ids'], list)
                assert len(params['user_ids']) > 0
                for id in params['user_ids']:
                    assert isinstance(id, int)
            elif target == 'email':
                Logger.debug('x')
                assert isinstance(params['email'], unicode) or  isinstance(params['email'], str)

            return True
        except Exception as e:
            Logger.debug('y')
            raise MultipleInvalid(str(e))
Ejemplo n.º 15
0
    def handle(self):
        while True:
            self.data = self.request.recv(1024).strip()
            # cur_thread = threading.currentThread()
            info = "RECV from ", self.client_address[0], self.client_address[1]
            Logger.info(info)
            if self.data == None or len(self.data) == 0:
                Logger.error("empty command")
                break
            Logger.debug(self.data)
            # business logic here
            try:
                cmd = command.decode(self.data)
            except:
                Logger.error("format error")
                break

            #            try:
            response = application.dispatch(cmd)
            Logger.debug(response)
            self.send(response)
            if response["code"] == -1:
                # server.shutdown()
                break
Ejemplo n.º 16
0
class SpiderCrawl(object):
    """
    Crawl the network and look for given 160-bit keys.
    """

    def __init__(self, protocol, node, peers, ksize, alpha):
        """
        Create a new C{SpiderCrawl}er.

        Args:
            protocol: A :class:`~kademlia.protocol.KademliaProtocol` instance.
            node: A :class:`~kademlia.node.Node` representing the key we're looking for
            peers: A list of :class:`~kademlia.node.Node` instances that provide the entry point for the network
            ksize: The value for k based on the paper
            alpha: The value for alpha based on the paper
        """
        self.protocol = protocol
        self.ksize = ksize
        self.alpha = alpha
        self.node = node
        self.nearest = NodeHeap(self.node, self.ksize)
        self.lastIDsCrawled = []
        self.log = Logger(system=self)
        self.log.debug("creating spider with peers: %s" % peers)
        self.nearest.push(peers)

    def _find(self, rpcmethod):
        """
        Get either a value or list of nodes.

        Args:
            rpcmethod: The protocol's callFindValue or callFindNode.

        The process:
          1. calls find_* to current ALPHA nearest not already queried nodes,
             adding results to current nearest list of k nodes.
          2. current nearest list needs to keep track of who has been queried already
             sort by nearest, keep KSIZE
          3. if list is same as last time, next call should be to everyone not
             yet queried
          4. repeat, unless nearest list has all been queried, then ur done
        """
        self.log.debug("crawling with nearest: %s" % str(tuple(self.nearest)))
        count = self.alpha
        if self.nearest.getIDs() == self.lastIDsCrawled:
            self.log.debug("last iteration same as current - checking all in list now")
            count = len(self.nearest)
        self.lastIDsCrawled = self.nearest.getIDs()

        ds = {}
        for peer in self.nearest.getUncontacted()[:count]:
            ds[peer.id] = rpcmethod(peer, self.node)
            self.nearest.markContacted(peer)
        return deferredDict(ds).addCallback(self._nodesFound)
Ejemplo n.º 17
0
class User(object):
    def __init__(self):
        self.httpRequest = HttpRequest()
        self.httpParser = HttpParser()
        self.redisConn = RedisConnect()
        self.logger = Logger()
        self.user_file = open("/opt_c/dianping/file/user_urls_4.txt", "a")
        #self.mysqlConn = MysqlClient("127.0.0.1","root","homelink",'dianping',3306)
        self.mysqlConn = MysqlPool()

    def saveHtml(self, url, param, html, page):
        id = re.findall('[0-9]+', url)[0]
        print(id)
        #	path = '/Users/homelink/dianping/html/'+param+'/'+id[0:3]+'/'+id[3:6]+'/'
        path = '/opt_c/dianping/html/' + param + '/' + id[0:3] + '/' + id[
            3:6] + '/'
        if os.path.exists(path) == False:
            os.makedirs(path)
        html_path = path + id + '_' + param + '_' + str(page) + '.txt'
        f = open(html_path, "a")
        f.write(html)
        f.flush()

    def run(self):
        while self.redisConn.scard("dianping::store") > 0:
            #		while self.redisConn.scard("test")>0:
            store = self.redisConn.pop("dianping::store")
            #			store = self.redisConn.pop("test")
            url = "http://www.dianping.com" + store + '/review_more'
            print(url)
            self.logger.info(url)
            dic_list = [
                "user_url", "user_name", "user_image", "user_level",
                "create_time", "update_time"
            ]
            postDic = {}

            page = 0
            count = 20

            while count == 20:
                page = page + 1
                count = 0
                try:
                    print(url + '?pageno=' + str(page))
                    html, code = self.httpRequest.get(url + '?pageno=' +
                                                      str(page))
                    print(code)
                    if code == 404 or code == 403 or code == 429:
                        print("match error stop !!!")
                        self.redisConn.sadd("failed::store::user_1",
                                            url + '?pageno=' + str(page))
                        self.redisConn.sadd("failed::store", store)
                        time.sleep(60 * 20)
                    else:
                        #	time.sleep(random.randint(3,6))
                        time.sleep(2)
                        self.saveHtml(store, "user", html, page)
                        print(url + '?pageno=' + str(page))
                        self.logger.info(url + '?pageno=' + str(page))
                        sites = self.httpParser.parseNode(
                            html, '//div[@class="comment-list"]/ul/li')
                        print(sites[0])
                        for site in sites:
                            user_url = site.xpath('div/a/@href')
                            print(user_url[0])
                            self.redisConn.sadd("dianping::review::user",
                                                *user_url)
                            self.redisConn.sadd("dianping::wish::user",
                                                *user_url)
                            self.redisConn.sadd("dianping::checkin::user",
                                                *user_url)
                            user_name = site.xpath('div/p/a/text()')
                            user_image = site.xpath('div/a/img/@src')
                            user_level = site.xpath('div/p[2]/span/@class')
                            postDic["user_url"] = user_url[0]
                            postDic["user_name"] = user_name[0].replace(
                                "'", "")
                            postDic["user_image"] = user_image[0]
                            postDic["store"] = store
                            if user_level:
                                postDic["user_level"] = user_level[0]
                            else:
                                postDic["user_level"] = ''
                            postDic["create_time"] = time.strftime(
                                '%Y-%m-%d %H:%M:%S',
                                time.localtime(time.time()))
                            postDic["update_time"] = time.strftime(
                                '%Y-%m-%d %H:%M:%S',
                                time.localtime(time.time()))
                            count = len(sites)
                            #		self.mysqlConn.insert(dic_list,"user",**postDic)
                            line = json.dumps(dict(postDic),
                                              ensure_ascii=False)
                            self.user_file.write(line + '\n')
                            self.user_file.flush()
                        self.redisConn.sadd("success::store::user",
                                            url + '?pageno=' + str(page))
                        self.redisConn.sadd("success::store", store)

                except:
                    #	self.redisConn.sadd("failed::store::user",url+'?pageno='+str(page))
                    print(sys.exc_info())
                    self.redisConn.sadd("failed::store::user_1",
                                        url + '?pageno=' + str(page))
                    self.redisConn.sadd("failed::store", store)
                    self.logger.debug("start UserUrl:" + url + ' error :' +
                                      str(sys.exc_info()[0]) + ',' +
                                      str(sys.exc_info()[1]) + ',' +
                                      str(sys.exc_info()[2]))
                time.sleep(10)

    def UserReviewTrade(self):
        while self.redisConn.scard("dianping::review::user") > 0:
            #	while self.redisConn.scard("test")>0:
            user = self.redisConn.pop("dianping::review::user")
            #	user = self.redisConn.pop("test")
            url = "http://www.dianping.com" + user
            print(url)
            postDic = {}
            dic_list = [
                "user_url", "user_name", "store_name", "store_url",
                "store_score", "store_location", "review_time", "crawl_time",
                "create_time", "update_time"
            ]

            count = 15
            page = 1

            while count == 15:
                try:
                    review_html = self.httpRequest.get(url + '/reviews' +
                                                       '?pg=' + str(page) +
                                                       '&reviewCityId=2')
                    print(url + '/reviews' + '?pg=' + str(page) +
                          '&reviewCityId=2')
                    sites = self.httpParser.parseHref(
                        review_html,
                        '//div[@id="J_review"]/div[@class="pic-txt"]/ul/li')
                    for site in sites:
                        store_url = site.xpath('div/div[1]/h6/a/@href')
                        print(store_url)
                        store_name = site.xpath('div/div[1]/h6/a/text()')
                        print(store_name[0])
                        store_score = site.xpath(
                            'div/div[2]/div[2]/span/@class')
                        store_location = site.xpath(
                            'div/div[2]/div[1]/p/text()')
                        review_time = site.xpath(
                            'div/div[2]/div[@class="mode-tc info"]/span[1]/text()'
                        )
                        review_crawl_time = time.strftime(
                            '%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
                        postDic["user_url"] = user
                        postDic["user_name"] = ''
                        postDic["store_name"] = store_name[0]
                        postDic["store_url"] = store_url[0]
                        postDic["store_score"] = store_score[0]
                        postDic["store_location"] = store_location[0]
                        postDic["review_time"] = review_time[0]
                        postDic["crawl_time"] = review_crawl_time[0]
                        postDic["create_time"] = time.strftime(
                            '%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
                        postDic["update_time"] = time.strftime(
                            '%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
                        #self.mysqlConn.insert(dic_list,"user_review",**postDic)

                    self.redisConn.sadd("success::review", user)
                    page = page + 1
                    count = len(sites)

                except:
                    print(sys.exc_info())
                    self.redisConn.sadd("failed::review", user)

    def UserWishTrade(self):
        #	while self.redisConn.scard("dianping::wish::user")>0:
        while self.redisConn.scard("test") > 0:
            #	user = self.redisConn.pop("dianping::wish::user")
            user = self.redisConn.pop("test")
            url = "http://www.dianping.com" + user

            dic_list = [
                "user_url", "user_name", "store_name", "store_url",
                "store_score", "store_location", "wish_time", "crawl_time",
                "create_time", "update_time"
            ]
            postDic = {}

            count = 30
            page = 1

            while count == 30:
                try:
                    wish_html = self.httpRequest.get(url + '/wishlists?pg=' +
                                                     str(page) +
                                                     '&favorTag=s-1_c-1_t-1')
                    sites = self.httpParser.parseNode(
                        wish_html, '//div[@class="pic-txt favor-list"]/ul/li')

                    for site in sites:
                        wish_store_url = site.xpath('div/div[1]/h6/a/@href')
                        wish_store_name = site.xpath('div/div[1]/h6/a/text()')
                        wish_store_score = site.xpath(
                            'div/div[2]/div/p/span[2]/@class')
                        wish_store_location = site.xpath(
                            'div/div[2]/div[1]/p/text()')
                        wish_time = site.xpath(
                            'div/div[2]/div[2]/span/i/text()')
                        wish_crawl_time = time.strftime(
                            '%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
                        postDic["user_url"] = user
                        postDic["user_name"] = ''
                        postDic["store_name"] = wish_store_name[0]
                        postDic["store_url"] = wish_store_url[0]
                        postDic["store_score"] = wish_store_score[0]
                        postDic["store_location"] = wish_store_location[0]
                        postDic["wish_time"] = wish_time[0]
                        postDic["crawl_time"] = wish_crawl_time[0]
                        postDic["create_time"] = time.strftime(
                            '%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
                        postDic["update_time"] = time.strftime(
                            '%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
                        #self.mysqlConn.insert(dic_list,"user_wish",**postDic)

                    self.redisConn.sadd("success::wish", user)
                    page = page + 1
                    count = len(sites)

                except:
                    self.redisConn.sadd("failed::wish", user)
                    print(sys.exc_info())

    def UserCheckinTrade(self):

        #	while self.redisConn.scard("dianping::checkin::user")>0:
        while self.redisConn.scard("test") > 0:
            #	member = self.redisConn.pop("dianping::checkin::user")
            member = self.redisConn.pop("test")
            url = "http://www.dianping.com" + member

            memberId = member.split("/")[2]
            dic_list = [
                "user_url", "user_name", "store_name", "store_url",
                "store_location", "checkin_time", "crawl_time", "create_time",
                "update_time"
            ]
            postDic = {}

            try:
                checkin_html = self.httpRequest.get(url + '/checkin')
                print(url + '/checkin')
                total_count = self.httpParser.parseText(
                    checkin_html,
                    '//div[@class="pic-txt head-user"]/div[2]/div[3]/ul/li[4]/a/text()'
                )
                total = re.findall("[1-9]+", total_count[0].encode("utf-8"))[0]
                page = int(total) / 20

                sites = self.httpParser.parseNode(checkin_html,
                                                  '//ul[@id="J_list"]/li')
                for site in sites:
                    checkin_store_url = site.xpath('h6/a/@href')
                    checkin_store_name = site.xpath('h6/a/text()')
                    checkin_store_location = site.xpath('p/text()')
                    checkin_time = site.xpath('h6/span/text()')
                    chechin_crawl_time = time.strftime(
                        '%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
                    postDic["user_url"] = member
                    postDic["user_name"] = ''
                    postDic["store_name"] = checkin_store_name[0].replace(
                        "'", "")
                    print(checkin_store_name[0])
                    postDic["store_url"] = checkin_store_url[0]
                    postDic["store_location"] = checkin_store_location[0]
                    postDic["checkin_time"] = checkin_time[0]
                    postDic["crawl_time"] = chechin_crawl_time[0]
                    postDic["create_time"] = time.strftime(
                        '%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
                    postDic["update_time"] = time.strftime(
                        '%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
                    #self.mysqlConn.insert(dic_list,"user_checkin",**postDic)

                if page:
                    for i in range(page):
                        time.sleep(5)
                        data_form = {
                            "memberId": str(memberId),
                            "page": str(i + 2)
                        }
                        url = "http://www.dianping.com/ajax/member/checkin/checkinList"
                        result = self.httpRequest.post(url, data_form)
                        result_list = json.loads(result)["msg"]["checkinList"]
                        for checkin in result_list:
                            postDic["user_url"] = member
                            postDic["user_name"] = ''
                            postDic["store_name"] = checkin[
                                "shopName"].replace("'", "")
                            print(checkin["shopName"])
                            postDic["store_url"] = ''
                            postDic["store_location"] = checkin["shopAddress"]
                            postDic["checkin_time"] = checkin["time"]
                            postDic["crawl_time"] = time.strftime(
                                '%Y-%m-%d %H:%M:%S',
                                time.localtime(time.time()))
                            postDic["create_time"] = time.strftime(
                                '%Y-%m-%d %H:%M:%S',
                                time.localtime(time.time()))
                            postDic["update_time"] = time.strftime(
                                '%Y-%m-%d %H:%M:%S',
                                time.localtime(time.time()))
                            #self.mysqlConn.insert(dic_list,"user_checkin",**postDic)
                self.redisConn.sadd("success::checkin", member)

            except:
                self.redisConn.sadd("failed::checkin", member)
                print(sys.exc_info())
Ejemplo n.º 18
0
 def send(self, msg):
     # Override MIDI channel
     msg.setChannel(self.channel)
     Logger.debug("[%s] Sending message: %s" % (self.name, MidiEngine.dump_msg(msg)))
     if self.output:
         self.output.send(msg)
Ejemplo n.º 19
0
class UrlRunnable:
    def __init__(self):
        self.httpRequest = HttpRequest()
        self.httpParser = HttpParser()
        self.redisConn = RedisConnect()
        self.start_url = "http://www.dianping.com/shopall/2/0"
        self.logger = Logger()
        #	self.mysqlConn = MysqlClient("127.0.0.1","root","homelink",'dianping',3306)
        self.mysqlConn = MysqlPool()
        self.store_file = open("/opt_c/dianping/file/store_urls.txt", "a")

    #	self.store_file = open("/homelink/dianping/file/store_urls.txt","a")

    def saveHtml(self, url, param, html):
        id = re.findall('[0-9]+', url)[0]
        #	path = '/Users/homelink/dianping/html/'+param+'/'+id[0:3]+'/'+id[3:6]+'/'
        path = '/opt_c/dianping/html/' + param + '/' + id[0:3] + '/' + id[
            3:6] + '/'
        if os.path.exists(path) == False:
            os.makedirs(path)
        html_path = path + id + '_' + param + '.txt'
        f = open(html_path, "a")
        f.write(html)
        f.flush()

    def regUrl(self, link):
        try:
            url = "http://www.dianping.com" + link
            print(url)
            html, code = self.httpRequest.get(url)
            time.sleep(2)
            #	hrefs = self.httpParser.parseHref(html,'//div[@id="region-nav"]/a/@href')
            hrefs = self.httpParser.parseHref(
                html, '//div[@id="region-nav-sub"]/a/@href')
            print(hrefs)
            self.redisConn.sadd("dianping::tag::reg_sub", *hrefs)
        except:
            print(sys.exc_info())
            self.redisConn.sadd("failed::tag::reg_sub", link)

    def linksUrl(self):
        try:
            html, code = self.httpRequest.get(self.start_url)
            print(code)
            sites = self.httpParser.parseNode(
                html, '//div[@class="main_w"]/div/div[1]/dl[17]')
            print(sites)
            postDic = {}
            dic_list = [
                "tag_level_1", "tag_level_2", "tag_link", "create_time",
                "update_time"
            ]
            for site in sites:
                tags = site.xpath('dt/a/text()')
                link_urls = site.xpath('dd/ul/li/a/@href')
                print(link_urls)
                #	self.redisConn.sadd("dianping::tag",*link_urls)
                link_tags = site.xpath('dd/ul/li/a/text()')
                for i in range(len(link_tags)):
                    postDic["tag_level_1"] = tags[0]
                    postDic["tag_level_2"] = link_tags[i]
                    postDic["tag_link"] = link_urls[i]
                    self.regUrl(link_urls[i])
                    postDic["create_time"] = time.strftime(
                        '%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
                    postDic["update_time"] = time.strftime(
                        '%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
            #		self.mysqlConn.insert(dic_list,"storeTag",**postDic)
        except:
            print(sys.exc_info())
        #return link_url

    def run(self):
        #	while self.redisConn.scard("dianping::tag")>0:
        while self.redisConn.scard("dianping::tag::reg") > 0:
            #	while self.redisConn.scard("test_1")>0:
            #		tag = self.redisConn.pop("dianping::tag")
            tag = self.redisConn.pop("dianping::tag::reg")
            #		tag = self.redisConn.pop("test_1")
            url = "http://www.dianping.com" + tag
            print(url)
            self.logger.info("start StoreUrl:" + url)
            postDic = {}
            dic_list = [
                "store_url", "store_name", "father_url", "father_tag",
                "store_score", "trade_area", "location", "cost", "review",
                "create_time", "update_time", "longitude", "latitude"
            ]

            page = 0
            count = 19

            while count >= 15 and page <= 50:
                page = page + 1
                count = 0

                try:
                    html, code = self.httpRequest.get(url + 'p' + str(page))
                    time.sleep(2)
                    print(code)
                    self.logger.info("start StoreUrl: " + url + 'p' +
                                     str(page))
                    print("start StoreUrl: " + url + 'p' + str(page))
                    sites = self.httpParser.parseNode(
                        html, '//div[@id="shop-all-list"]/ul/li')
                    print(sites)
                    count = len(sites)
                    for site in sites:
                        store_urls = site.xpath('div[2]/div[1]/a[1]/@href')
                        time.sleep(2)
                        store_html, code = self.httpRequest.get(
                            "http://www.dianping.com" + store_urls[0])
                        print("http://www.dianping.com" + store_urls[0])
                        self.logger.info(
                            "storeUrl request : http://www.dianping.com" +
                            store_urls[0])
                        extract_address = re.findall("({lng:(.*),lat:(.*)})",
                                                     store_html)
                        if extract_address:
                            longitude = extract_address[0][1]
                            latitude = extract_address[0][2]
                            postDic["longitude"] = longitude
                            postDic["latitude"] = latitude
                        self.saveHtml(store_urls[0], "store", store_html)

                        store_names = site.xpath('div[2]/div[1]/a[1]/@title')
                        father_tag = site.xpath(
                            'div[2]/div[3]/a[1]/span/text()')
                        store_score = site.xpath('div[2]/div[2]/span/@class')
                        trade_area = site.xpath(
                            'div[2]/div[3]/a[2]/span/text()')
                        location = site.xpath(
                            'div[2]/div[3]/span[@class="addr"]/text()')
                        cost = site.xpath('div[2]/div[2]/a[2]/b/text()')
                        review = site.xpath('div[2]/div[2]/a/b/text()')
                        father_url = tag
                        if not self.redisConn.sismember(
                                "dianping::store::bak", store_urls[0]):
                            self.redisConn.sadd("dianping::store",
                                                store_urls[0])
                            self.redisConn.sadd("dianping::store::bak",
                                                store_urls[0])
                        postDic["store_url"] = store_urls[0]
                        postDic["store_name"] = store_names[0].replace("'", "")
                        self.logger.info(store_names[0])
                        postDic["father_url"] = tag
                        postDic["father_tag"] = father_tag[0]
                        postDic["store_score"] = store_score[0]
                        if trade_area:
                            postDic["trade_area"] = trade_area[0]
                        else:
                            postDic["trade_area"] = ''
                        postDic["location"] = location[0]
                        if cost:
                            postDic["cost"] = cost[0]
                        else:
                            postDic["cost"] = ''
                        if review:
                            postDic["review"] = review[0]
                        else:
                            postDic["review"] = ''
                        postDic["create_time"] = time.strftime(
                            '%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
                        postDic["update_time"] = time.strftime(
                            '%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
                        line = json.dumps(dict(postDic), ensure_ascii=False)
                        self.store_file.write(line + '\n')
                        self.store_file.flush()
                        self.mysqlConn.insert(dic_list, "store", **postDic)
                    self.redisConn.sadd("success::tag::url",
                                        url + 'p' + str(page))

                except:
                    self.redisConn.sadd("failed::tag::url",
                                        url + 'p' + str(page))
                    self.redisConn.sadd("failed::tag", tag)
                    self.logger.debug("start StoreUrl:" + url + 'p' +
                                      str(page) + ' error :' +
                                      str(sys.exc_info()[0]) + ',' +
                                      str(sys.exc_info()[1]) + ',' +
                                      str(sys.exc_info()[2]))
                    print(sys.exc_info())
Ejemplo n.º 20
0
class KademliaProtocol(RPCProtocol):
    implements(MessageProcessor)

    def __init__(self, sourceNode, storage, ksize, database):
        self.ksize = ksize
        self.router = RoutingTable(self, ksize, sourceNode)
        self.storage = storage
        self.sourceNode = sourceNode
        self.multiplexer = None
        self.db = database
        self.log = Logger(system=self)
        self.handled_commands = [
            PING, STUN, STORE, DELETE, FIND_NODE, FIND_VALUE, HOLE_PUNCH, INV,
            VALUES
        ]
        RPCProtocol.__init__(self, sourceNode, self.router)

    def connect_multiplexer(self, multiplexer):
        self.multiplexer = multiplexer

    def getRefreshIDs(self):
        """
        Get ids to search for to keep old buckets up to date.
        """
        ids = []
        for bucket in self.router.getLonelyBuckets():
            ids.append(random.randint(*bucket.range))
        return ids

    def rpc_stun(self, sender):
        self.addToRouter(sender)
        return [sender.ip, str(sender.port)]

    def rpc_ping(self, sender):
        self.addToRouter(sender)
        return [self.sourceNode.getProto().SerializeToString()]

    def rpc_store(self, sender, keyword, key, value, ttl):
        self.addToRouter(sender)
        self.log.debug("got a store request from %s, storing value" %
                       str(sender))
        if len(keyword) == 20 and len(key) <= 33 and len(
                value) <= 2100 and int(ttl) <= 604800:
            self.storage[keyword] = (key, value, int(ttl))
            return ["True"]
        else:
            return ["False"]

    def rpc_delete(self, sender, keyword, key, signature):
        self.addToRouter(sender)
        value = self.storage.getSpecific(keyword, key)
        if value is not None:
            # Try to delete a message from the dht
            if keyword == digest(sender.id):
                try:
                    verify_key = nacl.signing.VerifyKey(
                        sender.signed_pubkey[64:])
                    verify_key.verify(key, signature)
                    self.storage.delete(keyword, key)
                    return ["True"]
                except Exception:
                    return ["False"]
            # Or try to delete a pointer
            else:
                try:
                    node = objects.Node()
                    node.ParseFromString(value)
                    pubkey = node.signedPublicKey[64:]
                    try:
                        verify_key = nacl.signing.VerifyKey(pubkey)
                        verify_key.verify(signature + key)
                        self.storage.delete(keyword, key)
                        return ["True"]
                    except Exception:
                        return ["False"]
                except Exception:
                    pass
        return ["False"]

    def rpc_find_node(self, sender, key):
        self.log.debug("finding neighbors of %s in local table" %
                       key.encode('hex'))
        self.addToRouter(sender)
        node = Node(key)
        nodeList = self.router.findNeighbors(node, exclude=sender)
        ret = []
        for n in nodeList:
            ret.append(n.getProto().SerializeToString())
        return ret

    def rpc_find_value(self, sender, keyword):
        self.addToRouter(sender)
        ret = ["value"]
        value = self.storage.get(keyword, None)
        if value is None:
            return self.rpc_find_node(sender, keyword)
        ret.extend(value)
        return ret

    def rpc_inv(self, sender, *serlialized_invs):
        self.addToRouter(sender)
        ret = []
        for inv in serlialized_invs:
            try:
                i = objects.Inv()
                i.ParseFromString(inv)
                if self.storage.getSpecific(i.keyword, i.valueKey) is None:
                    ret.append(inv)
            except Exception:
                pass
        return ret

    def rpc_values(self, sender, *serialized_values):
        self.addToRouter(sender)
        for val in serialized_values:
            try:
                v = objects.Value()
                v.ParseFromString(val)
                self.storage[v.keyword] = (v.valueKey, v.serializedData,
                                           int(v.ttl))
            except Exception:
                pass
        return ["True"]

    def callFindNode(self, nodeToAsk, nodeToFind):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.find_node(address, nodeToFind.id)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callFindValue(self, nodeToAsk, nodeToFind):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.find_value(address, nodeToFind.id)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callPing(self, nodeToAsk):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.ping(address)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callStore(self, nodeToAsk, keyword, key, value, ttl):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.store(address, keyword, key, value, str(int(round(ttl))))
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callDelete(self, nodeToAsk, keyword, key, signature):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.delete(address, keyword, key, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callInv(self, nodeToAsk, serlialized_inv_list):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.inv(address, *serlialized_inv_list)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callValues(self, nodeToAsk, serlialized_values_list):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.values(address, *serlialized_values_list)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def transferKeyValues(self, node):
        """
        Given a new node, send it all the keys/values it should be storing.

        @param node: A new node that just joined (or that we just found out
        about).

        Process:
        For each key in storage, get k closest nodes.  If newnode is closer
        than the furtherst in that list, and the node for this server
        is closer than the closest in that list, then store the key/value
        on the new node (per section 2.5 of the paper)
        """
        def send_values(inv_list):
            values = []
            if inv_list[0]:
                for requested_inv in inv_list[1]:
                    try:
                        i = objects.Inv()
                        i.ParseFromString(requested_inv)
                        value = self.storage.getSpecific(i.keyword, i.valueKey)
                        if value is not None:
                            v = objects.Value()
                            v.keyword = i.keyword
                            v.valueKey = i.valueKey
                            v.serializedData = value
                            v.ttl = int(
                                round(
                                    self.storage.get_ttl(
                                        i.keyword, i.valueKey)))
                            values.append(v.SerializeToString())
                    except Exception:
                        pass
                if len(values) > 0:
                    self.callValues(node, values)

        inv = []
        for keyword in self.storage.iterkeys():
            keynode = Node(keyword)
            neighbors = self.router.findNeighbors(keynode, exclude=node)
            if len(neighbors) > 0:
                newNodeClose = node.distanceTo(
                    keynode) < neighbors[-1].distanceTo(keynode)
                thisNodeClosest = self.sourceNode.distanceTo(
                    keynode) < neighbors[0].distanceTo(keynode)
            if len(neighbors) == 0 \
                    or (newNodeClose and thisNodeClosest) \
                    or (thisNodeClosest and len(neighbors) < self.ksize):
                # pylint: disable=W0612
                for k, v in self.storage.iteritems(keyword):
                    i = objects.Inv()
                    i.keyword = keyword
                    i.valueKey = k
                    inv.append(i.SerializeToString())
        if len(inv) > 0:
            self.callInv(node, inv).addCallback(send_values)

    def handleCallResponse(self, result, node):
        """
        If we get a response, add the node to the routing table.  If
        we get no response, make sure it's removed from the routing table.
        """
        if result[0]:
            if self.router.isNewNode(node):
                self.transferKeyValues(node)
            self.router.addContact(node)
        else:
            self.log.debug("no response from %s, removing from router" % node)
            self.router.removeContact(node)
        return result

    def addToRouter(self, node):
        """
        Called by rpc_ functions when a node sends them a request.
        We add the node to our router and transfer our stored values
        if they are new and within our neighborhood.
        """
        if self.router.isNewNode(node):
            self.log.debug("Found a new node, transferring key/values")
            reactor.callLater(1, self.transferKeyValues, node)
        self.router.addContact(node)

    def __iter__(self):
        return iter(self.handled_commands)
Ejemplo n.º 21
0
class QRCodeConverter:
    def __init__(self):
        self.util = Util()
        self.log = Logger()
        self.block_size = 8
        self.circle_radius = self.block_size * 5
        self.qrcode_img_name_prefix = "qrcode"
        self.qrcode_img_name_surffix = ".png"

    def set_inner_img_path(self, path):
        self.inner_img_path = path

    def set_qrcode_content(self, content):
        self.content = content

    def set_qrcode_size(self, size):
        self.size = size

    def get_qrcode_thumbnail_size(self):
        assert self.size
        assert isinstance(self.size, tuple)
        assert len(self.size) == 2
        assert self.size[0]
        assert self.size[1]

        res_val = max(self.size[0], self.size[1])
        return (res_val, res_val)

    def fetch_file(self, path):
        import os
        if path.startswith('http'):
            f = cStringIO.StringIO(urllib.urlopen(path).read())
        else:
            assert os.path.isfile(path)
            f = cStringIO.StringIO(path)
        return f

    def process(self, successCallback, failureCallback):
        im = qrcode.make(self.content).convert('RGBA')
        outputname = self.qrcode_img_name_prefix + str(
            self.util.now()) + self.qrcode_img_name_surffix

        try:
            inner_img_file = self.fetch_file(self.inner_img_path)
            inner_img = Image.open(inner_img_file.read()).convert('RGBA')

            (width, height) = inner_img.size

            dim = height
            if (width > dim):
                dim = width

            xTrans = (im.size[0] - width) / 2.0
            yTrans = (im.size[1] - dim) / 2.0

            im.paste(inner_img, (int(xTrans), int(yTrans)), inner_img)

            try:
                (img_width, img_height) = get_qrcode_thumbnail_size()
                self.log.debug('Get width and height for qrcode image')
                self.log.debug('Set thunbnail for qrcode with size: (%d, %d)' %
                               (img_width, img_height))
                im.thumbnail(self.size)
            except Exception as thumbnail_exception:
                self.log.debug(
                    'Cannot get suitable width and height for qrcode image, do nothing'
                )
        except Exception as e:
            self.log.warn('Cannot fetch inner image with path: %s' %
                          (self.inner_img_path))
            self.log.warn(e)

        im.save(outputname)

        self.log.info(
            "success to process, now call successCallback, qrcode file: " +
            outputname)
        successCallback(self)
Ejemplo n.º 22
0
class KademliaProtocol(RPCProtocol):
    implements(MessageProcessor)

    def __init__(self, sourceNode, storage, ksize, database):
        self.ksize = ksize
        self.router = RoutingTable(self, ksize, sourceNode)
        self.storage = storage
        self.sourceNode = sourceNode
        self.multiplexer = None
        self.db = database
        self.log = Logger(system=self)
        self.handled_commands = [PING, STUN, STORE, DELETE, FIND_NODE, FIND_VALUE, HOLE_PUNCH]
        RPCProtocol.__init__(self, sourceNode.getProto(), self.router)

    def connect_multiplexer(self, multiplexer):
        self.multiplexer = multiplexer

    def getRefreshIDs(self):
        """
        Get ids to search for to keep old buckets up to date.
        """
        ids = []
        for bucket in self.router.getLonelyBuckets():
            ids.append(random.randint(*bucket.range))
        return ids

    def rpc_stun(self, sender):
        self.addToRouter(sender)
        return [sender.ip, str(sender.port)]

    def rpc_ping(self, sender):
        self.addToRouter(sender)
        return [self.sourceNode.getProto().SerializeToString()]

    def rpc_store(self, sender, keyword, key, value):
        self.addToRouter(sender)
        self.log.debug("got a store request from %s, storing value" % str(sender))
        if len(keyword) == 20 and len(key) <= 33 and len(value) <= 1800:
            self.storage[keyword] = (key, value)
            return ["True"]
        else:
            return ["False"]

    def rpc_delete(self, sender, keyword, key, signature):
        self.addToRouter(sender)
        value = self.storage.getSpecific(keyword, key)
        if value is not None:
            # Try to delete a message from the dht
            if keyword == digest(sender.id):
                try:
                    verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
                    verify_key.verify(key, signature)
                    self.storage.delete(keyword, key)
                    return ["True"]
                except Exception:
                    return ["False"]
            # Or try to delete a pointer
            else:
                try:
                    node = objects.Node()
                    node.ParseFromString(value)
                    pubkey = node.signedPublicKey[64:]
                    try:
                        verify_key = nacl.signing.VerifyKey(pubkey)
                        verify_key.verify(signature + key)
                        self.storage.delete(keyword, key)
                        return ["True"]
                    except Exception:
                        return ["False"]
                except Exception:
                    pass
        return ["False"]

    def rpc_find_node(self, sender, key):
        self.log.info("finding neighbors of %s in local table" % key.encode('hex'))
        self.addToRouter(sender)
        node = Node(key)
        nodeList = self.router.findNeighbors(node, exclude=sender)
        ret = []
        for n in nodeList:
            ret.append(n.getProto().SerializeToString())
        return ret

    def rpc_find_value(self, sender, key):
        self.addToRouter(sender)
        ret = ["value"]
        value = self.storage.get(key, None)
        if value is None:
            return self.rpc_find_node(sender, key)
        ret.extend(value)
        return ret

    def callFindNode(self, nodeToAsk, nodeToFind):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.find_node(address, nodeToFind.id)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callFindValue(self, nodeToAsk, nodeToFind):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.find_value(address, nodeToFind.id)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callPing(self, nodeToAsk):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.ping(address)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callStore(self, nodeToAsk, keyword, key, value):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.store(address, keyword, key, value)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callDelete(self, nodeToAsk, keyword, key, signature):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.delete(address, keyword, key, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def transferKeyValues(self, node):
        """
        Given a new node, send it all the keys/values it should be storing.

        @param node: A new node that just joined (or that we just found out
        about).

        Process:
        For each key in storage, get k closest nodes.  If newnode is closer
        than the furtherst in that list, and the node for this server
        is closer than the closest in that list, then store the key/value
        on the new node (per section 2.5 of the paper)
        """
        ds = []
        for keyword in self.storage.iterkeys():
            keynode = Node(keyword)
            neighbors = self.router.findNeighbors(keynode, exclude=node)
            if len(neighbors) > 0:
                newNodeClose = node.distanceTo(keynode) < neighbors[-1].distanceTo(keynode)
                thisNodeClosest = self.sourceNode.distanceTo(keynode) < neighbors[0].distanceTo(keynode)
            if len(neighbors) == 0 \
                    or (newNodeClose and thisNodeClosest) \
                    or (thisNodeClosest and len(neighbors) < self.ksize):
                for k, v in self.storage.iteritems(keyword):
                    ds.append(self.callStore(node, keyword, k, v))
        return defer.gatherResults(ds)

    def handleCallResponse(self, result, node):
        """
        If we get a response, add the node to the routing table.  If
        we get no response, make sure it's removed from the routing table.
        """
        if result[0]:
            if self.router.isNewNode(node):
                self.transferKeyValues(node)
            self.log.info("got response from %s, adding to router" % node)
            self.router.addContact(node)
        else:
            self.log.debug("no response from %s, removing from router" % node)
            self.router.removeContact(node)
        return result

    def addToRouter(self, node):
        """
        Called by rpc_ functions when a node sends them a request.
        We add the node to our router and transfer our stored values
        if they are new and within our neighborhood.
        """
        if self.router.isNewNode(node):
            self.log.debug("Found a new node, transferring key/values")
            self.transferKeyValues(node)
        self.router.addContact(node)

    def __iter__(self):
        return iter(self.handled_commands)
Ejemplo n.º 23
0
class SelfSignedJwt(object):

    NumCharIn128BitHexString = 128/8*2
    numCharIn160BitHexString = 160/8*2
    ThumbprintRegEx = "^[a-f\d]*$"

    def __init__(self, call_context, authority, client_id):
        self._log = Logger('SelfSignedJwt', call_context['log_context'])
        self._call_context = call_context

        self._authortiy = authority
        self._token_endpoint = authority.token_endpoint
        self._client_id = client_id

    def _get_date_now(self):
        return datetime.datetime.now()

    def _get_new_jwt_id(self):
        return str(uuid.uuid4())

    def _create_x5t_value(self, thumbprint):
        hex_str = thumbprint.replace(':', '').replace(' ', '')
        b64_str = base64.urlsafe_b64encode(hex_str.encode())
        return b64_str.decode()

    def _create_header(self, thumbprint):
        x5t = self._create_x5t_value(thumbprint)
        header = {'typ':'JWT', 'alg':'RS256', 'x5t':x5t}

        self._log.debug("Creating self signed JWT header. x5t: {0}".format(x5t))

        return header

    def _create_payload(self):

        now = self._get_date_now()
        minutes = datetime.timedelta(0, 0, 0, 0, Jwt.SELF_SIGNED_JWT_LIFETIME)
        expires = now + minutes

        self._log.debug('Creating self signed JWT payload. Expires: {0} NotBefore: {1}'.format(expires, now))

        jwt_payload = {}
        jwt_payload[Jwt.AUDIENCE] = self._token_endpoint
        jwt_payload[Jwt.ISSUER] = self._client_id
        jwt_payload[Jwt.SUBJECT] = self._client_id
        jwt_payload[Jwt.NOT_BEFORE] = int(time.mktime(now.timetuple()))
        jwt_payload[Jwt.EXPIRES_ON] = int(time.mktime(expires.timetuple()))
        jwt_payload[Jwt.JWT_ID] = self._get_new_jwt_id()

        return jwt_payload

    def _raise_on_invalid_jwt_signature(self, encoded_jwt):
        segments = encoded_jwt.split('.')
        if len(segments) < 3 or not segments[2]:
            raise self._log.create_error('Failed to sign JWT. This is most likely due to an invalid certificate.')

    def _raise_on_invalid_thumbprint(self, thumbprint):

        thumbprint_sizes = [self.NumCharIn128BitHexString, self.numCharIn160BitHexString]
        if len(thumbprint) not in thumbprint_sizes or not re.search(self.ThumbprintRegEx, thumbprint):
            raise self._log.create_error("The thumbprint does not match a known format")

    def _sign_jwt(self, header, payload, certificate):
        # TODO: Might want to load the cert and get the string proper.
        cert_start_str = '-----BEGIN RSA PRIVATE KEY-----'
        cert_end_str = '-----END RSA PRIVATE KEY-----\n'
        if not certificate.startswith(cert_start_str):
            raise Exception("Invalid Certificate: Expected Start of Certificate to be '{}'".format(cert_start_str))
        if not certificate.endswith(cert_end_str):
            raise Exception("Invalid Certificate: Expected End of Certificate to be '{}'".format(cert_end_str))

        # Strip '-----BEGIN RSA PRIVATE KEY-----' and '-----END RSA PRIVATE KEY-----'
        cert_string = "".join(certificate.strip().split("\n")[1:-1])
        cert_string_64 = base64.urlsafe_b64encode(cert_string.encode())

        encoded_jwt = self._encode_jwt(payload, cert_string_64, header)
        self._raise_on_invalid_jwt_signature(encoded_jwt)
        return encoded_jwt

    def _encode_jwt(self, payload, certificate, header):
        return jwt.encode(payload, certificate, headers=header).decode()

    def _reduce_thumbprint(self, thumbprint):

        canonical = thumbprint.lower().replace(' ', '').replace(':', '')
        self._raise_on_invalid_thumbprint(canonical)
        return canonical

    def create(self, certificate, thumbprint):
        thumbprint = self._reduce_thumbprint(thumbprint)
        header = self._create_header(thumbprint)
        payload = self._create_payload()
        signed_jwt = self._sign_jwt(header, payload, certificate)
        return signed_jwt
Ejemplo n.º 24
0
class Server(object):
    """
    High level view of a node instance.  This is the object that should be created
    to start listening as an active node on the network.
    """

    def __init__(self, node, ksize=20, alpha=3, storage=None):
        """
        Create a server instance.  This will start listening on the given port.

        Args:
            node: The node instance for this peer. It must contain (at minimum) an ID,
                public key, ip address, and port.
            ksize (int): The k parameter from the paper
            alpha (int): The alpha parameter from the paper
            storage: An instance that implements :interface:`~dht.storage.IStorage`
        """
        self.ksize = ksize
        self.alpha = alpha
        self.log = Logger(system=self)
        self.storage = storage or ForgetfulStorage()
        self.node = node
        self.protocol = KademliaProtocol(self.node, self.storage, ksize)
        self.refreshLoop = LoopingCall(self.refreshTable).start(3600)

    def listen(self, port):
        """
        Start listening on the given port.

        This is the same as calling::

            reactor.listenUDP(port, server.protocol)
        """
        return reactor.listenUDP(port, self.protocol)

    def refreshTable(self):
        """
        Refresh buckets that haven't had any lookups in the last hour
        (per section 2.3 of the paper).
        """
        ds = []
        for id in self.protocol.getRefreshIDs():
            node = Node(id)
            nearest = self.protocol.router.findNeighbors(node, self.alpha)
            spider = NodeSpiderCrawl(self.protocol, node, nearest)
            ds.append(spider.find())

        def republishKeys(_):
            ds = []
            # Republish keys older than one hour
            for keyword in self.storage.iterkeys():
                for k, v in self.storage.iteritems(keyword):
                    if self.storage.get_ttl(keyword, k) < 601200:
                        ds.append(self.set(keyword, k, v))

        return defer.gatherResults(ds).addCallback(republishKeys)

    def querySeed(self, seed, pubkey):
        """
        Query an HTTP seed and return a `list` if (ip, port) `tuple` pairs.

        Args:
           seed: A `string` consisting of "ip:port" or "hostname:port"
           pubkey: The hex encoded public key to verify the signature on the response
        """
        nodes = []
        c = httplib.HTTPConnection(seed)
        c.request("GET", "/")
        response = c.getresponse()
        self.log.info("Https response from %s: %s, %s" % (seed, response.status, response.reason))
        data = response.read()
        reread_data = data.decode("zlib")
        seeds = peers.PeerSeeds()
        try:
            seeds.ParseFromString(reread_data)
            for peer in seeds.peer_data:
                p = peers.PeerData()
                p.ParseFromString(peer)
                tup = (str(p.ip_address), p.port)
                nodes.append(tup)
            verify_key = nacl.signing.VerifyKey(pubkey, encoder=nacl.encoding.HexEncoder)
            verify_key.verify(seed.signature + "".join(seeds.peer_data))
        except:
            self.log.error("Error parsing seed response.")
        return nodes

    def bootstrappableNeighbors(self):
        """
        Get a :class:`list` of (ip, port) :class:`tuple` pairs suitable for use as an argument
        to the bootstrap method.

        The server should have been bootstrapped
        already - this is just a utility for getting some neighbors and then
        storing them if this server is going down for a while.  When it comes
        back up, the list of nodes can be used to bootstrap.
        """
        neighbors = self.protocol.router.findNeighbors(self.node)
        return [tuple(n)[-2:] for n in neighbors]

    def bootstrap(self, addrs):
        """
        Bootstrap the server by connecting to other known nodes in the network.

        Args:
            addrs: A `list` of (ip, port) `tuple` pairs.  Note that only IP addresses
                   are acceptable - hostnames will cause an error.
        """

        # if the transport hasn't been initialized yet, wait a second
        if self.protocol.multiplexer.transport is None:
            return task.deferLater(reactor, 1, self.bootstrap, addrs)

        def initTable(results):
            nodes = []
            for addr, result in results.items():
                if result[0]:
                    n = objects.Node()
                    try:
                        n.ParseFromString(result[1][0])
                        pubkey = n.signedPublicKey[len(n.signedPublicKey) - 32:]
                        verify_key = nacl.signing.VerifyKey(pubkey)
                        verify_key.verify(n.signedPublicKey)
                        h = nacl.hash.sha512(n.signedPublicKey)
                        pow = h[64:128]
                        if int(pow[:6], 16) >= 50 or hexlify(n.guid) != h[:40]:
                            raise Exception('Invalid GUID')
                        nodes.append(Node(n.guid, addr[0], addr[1], n.signedPublicKey))
                    except:
                        self.log.msg("Bootstrap node returned invalid GUID")
            spider = NodeSpiderCrawl(self.protocol, self.node, nodes, self.ksize, self.alpha)
            return spider.find()

        ds = {}
        for addr in addrs:
            ds[addr] = self.protocol.ping((addr[0], addr[1]))
        return deferredDict(ds).addCallback(initTable)

    def inetVisibleIP(self):
        """
        Get the internet visible IP's of this node as other nodes see it.

        Returns:
            A `list` of IP's.  If no one can be contacted, then the `list` will be empty.
        """

        def handle(results):
            ips = []
            for result in results:
                if result[0]:
                    ips.append((result[1][0], int(result[1][1])))
            self.log.debug("other nodes think our ip is %s" % str(ips))
            return ips

        ds = []
        for neighbor in self.bootstrappableNeighbors():
            ds.append(self.protocol.stun(neighbor))
        return defer.gatherResults(ds).addCallback(handle)

    def get(self, keyword):
        """
        Get a key if the network has it.

        Returns:
            :class:`None` if not found, the value otherwise.
        """
        dkey = digest(keyword)
        if self.storage.get(dkey) is not None:
            return defer.succeed(self.storage.get(dkey))
        node = Node(dkey)
        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to get key %s" % keyword)
            return None
        spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
        return spider.find()

    def set(self, keyword, key, value):
        """
        Set the given key/value tuple at the hash of the given keyword.
        All values stored in the DHT are stored as dictionaries of key/value
        pairs. If a value already exists for a given keyword, the new key/value
        pair will be appended to the dictionary.

        Args:
            keyword: a `string` keyword. The SHA1 hash of which will be used as
                the key when inserting in the DHT.
            key: the 20 byte hash of the data.
            value: a serialized `protos.objects.Node` object which serves as a
                pointer to the node storing the data.

        Return: True if at least one peer responded. False if the store rpc
            completely failed.
        """
        self.log.debug("setting '%s' = '%s':'%s' on network" % (keyword, hexlify(key), hexlify(value)))
        dkey = digest(keyword)

        def store(nodes):
            self.log.info("setting '%s' on %s" % (keyword, map(str, nodes)))
            ds = [self.protocol.callStore(node, dkey, key, value) for node in nodes]

            keynode = Node(dkey)
            if self.node.distanceTo(keynode) < max([n.distanceTo(keynode) for n in nodes]):
                self.storage[dkey] = (key, value)
                self.log.debug("got a store request from %s, storing value" % str(self.node))

            return defer.DeferredList(ds).addCallback(self._anyRespondSuccess)

        node = Node(dkey)
        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to set key %s" % key)
            return defer.succeed(False)
        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
        return spider.find().addCallback(store)

    def delete(self, keyword, key, signature):
        """
        Delete the given key/value pair from the keyword dictionary on the network.
        To delete you must provide a signature covering the key that you wish to
        delete. It will be verified against the public key stored in the value. We
        use our ksize as alpha to make sure we reach as many nodes storing our value
        as possible.

        Args:
            keyword: the `string` keyword where the data being deleted is stored.
            key: the 20 byte hash of the data.
            signature: a signature covering the key.

        """
        self.log.debug("deleting '%s':'%s' from the network" % (keyword, hexlify(key)))
        dkey = digest(keyword)

        def delete(nodes):
            self.log.info("deleting '%s' on %s" % (key, map(str, nodes)))
            ds = [self.protocol.callDelete(node, dkey, key, signature) for node in nodes]

            if self.storage.getSpecific(keyword, key) is not None:
                self.storage.delete(keyword, key)

            return defer.DeferredList(ds).addCallback(self._anyRespondSuccess)

        node = Node(dkey)
        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to delete key %s" % key)
            return defer.succeed(False)
        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.ksize)
        return spider.find().addCallback(delete)

    def get_node(self, guid):
        """
        Given a guid return a `Node` object containing its ip and port or none if it's
        not found.

        Args:
            guid: the 20 raw bytes representing the guid.
        """
        node_to_find = Node(guid)

        def check_for_node(nodes):
            for node in nodes:
                if node.id == node_to_find.id:
                    return node
            return None
        index = self.protocol.router.getBucketFor(node_to_find)
        nodes = self.protocol.router.buckets[index].getNodes()
        for node in nodes:
            if node.id == node_to_find.id:
                return defer.succeed(node)
        nearest = self.protocol.router.findNeighbors(node_to_find)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to find node %s" % node_to_find.id.encode("hex"))
            return defer.succeed(None)
        spider = NodeSpiderCrawl(self.protocol, node_to_find, nearest, self.ksize, self.alpha)
        return spider.find().addCallback(check_for_node)

    def _anyRespondSuccess(self, responses):
        """
        Given the result of a DeferredList of calls to peers, ensure that at least
        one of them was contacted and responded with a Truthy result.
        """
        for deferSuccess, result in responses:
            peerReached, peerResponse = result
            if deferSuccess and peerReached and peerResponse:
                return True
        return False

    def saveState(self, fname):
        """
        Save the state of this node (the alpha/ksize/id/immediate neighbors)
        to a cache file with the given fname.
        """
        data = {'ksize': self.ksize,
                'alpha': self.alpha,
                'id': self.node.id,
                'signed_pubkey': self.node.signed_pubkey,
                'neighbors': self.bootstrappableNeighbors()}
        if len(data['neighbors']) == 0:
            self.log.warning("No known neighbors, so not writing to cache.")
            return
        with open(fname, 'w') as f:
            pickle.dump(data, f)

    @classmethod
    def loadState(self, fname, ip_address, port, multiplexer, storage=None):
        """
        Load the state of this node (the alpha/ksize/id/immediate neighbors)
        from a cache file with the given fname.
        """
        with open(fname, 'r') as f:
            data = pickle.load(f)
        n = Node(data['id'], ip_address, port, data['signed_pubkey'])
        s = Server(n, data['ksize'], data['alpha'], storage=storage)
        s.protocol.connect_multiplexer(multiplexer)
        if len(data['neighbors']) > 0:
            s.bootstrap(data['neighbors'])
        return s

    def saveStateRegularly(self, fname, frequency=600):
        """
        Save the state of node with a given regularity to the given
        filename.

        Args:
            fname: File name to save retularly to
            frequencey: Frequency in seconds that the state should be saved.
                        By default, 10 minutes.
        """
        loop = LoopingCall(self.saveState, fname)
        loop.start(frequency)
        return loop
Ejemplo n.º 25
0
class Lint:
    def __init__(self, options):
        self.log = Logger()

        if not options.workdir:
            raise RuntimeError, "Lint: No working directory specified."
        
        lintResult = self.__getLintResult(options.workdir)
        
        ignoreMessages = []
        if hasattr(options, "ignoremessages"):
            optionType = type(options.ignoremessages).__name__
            if optionType == "list":
                ignoreMessages = options.ignoremessages
            elif optionType == "str":
                ignoreMessages = options.ignoremessages.split(",")
        
        ignoreClasses = []
        if hasattr(options, "ignoreclasses"):
            optionType = type(options.ignoreclasses).__name__
            if optionType == "list":
                ignoreClasses = options.ignoreclasses
            elif optionType == "str":
                ignoreClasses = options.ignoreclasses.split(",")
        
        self.lintData = self.__parseLintResult(lintResult, ignoreMessages, ignoreClasses)
        
        
    def __getLintResult(self, workdir):
        self.log.info("Running lint in directory %s" %workdir)
        startdir = os.getcwd()
        os.chdir(workdir)
        ret,out,err = util.invokePiped("python generate.py lint")
        
        if (ret > 0):
            raise RuntimeError, "Lint run failed. " + err
        
        os.chdir(startdir)
        return out

    
    def __parseLintResult(self, text, ignoreMessages=[], ignoreClasses=[]):
        self.log.info("Lint parsing lint output")
        self.log.debug("Lint ignoring messages: %s" %repr(ignoreMessages))
        self.log.debug("Lint ignoring classes: %s" %repr(ignoreClasses))
        log = ""
        if (isinstance(text,str)):
            import string
            log = string.split(text,"\n")
        else:
            log = text
          
        data = {}
        for line in log:
            msgre = re.compile('.*\): (.*)$')
            msgma = msgre.match(line)
            msg = None
            if not msgma:
                continue
            msg = msgma.group(1)
            genericmsg = None
            member = None
            hint = None
            (genericmsg, member, hint) = self.__getMessage(msg)      
      
            if not genericmsg:
                self.log.error("Lint.parseLintResult couldn't extract generic message from line:\n" + line)
            if (genericmsg[len(genericmsg)-3:] == " in"):
                genericmsg = genericmsg[0:len(genericmsg)-3]
            
            if genericmsg in ignoreMessages:
                continue
            
            msgid = genericmsg
            if (not msgid in data):
                data[msgid] = []
  
            if (hint[0:2] == "! "):
                hint = hint[2:]
  
            info = {}
            info['member'] = member
            if (hint != ""):
                info['hint'] = hint
            info['path'] = ''
            info['line'] = ''
  
            pathre = re.compile('^.*([\\\/]source[\\\/].*) \(')
            pathma = pathre.match(line)
            if (pathma):
                info['path'] = pathma.group(1)
  
            linecolre = re.compile('.*(\(.*\)).*')
            linecolma = linecolre.match(line)
            if (linecolma):
                info['line'] = linecolma.group(1)
  
            ignoreClass = False
            for cls in ignoreClasses:
                classPath = cls.replace(".","/")
                clsre = re.compile("^.*" + classPath + ".*$")
                clsma = clsre.match(info['path'])
                if (clsma):
                    ignoreClass = True

            if not ignoreClass:
                data[msgid].append(info)  
          
        del_keys = []
        for key, value in data.iteritems():
            if (len(value) == 0):
                del_keys.append(key)
        for k in del_keys:
            del data[k]
      
        return data


    def __getMessage(self,fullmsg):
        genericmsg = None
        member = None
        hint = None
      
        msgre = re.compile("^([\w\- ]+)'([^\s]*)'([\w ]*)[\. ]*(.*)$")
        msgrma = msgre.match(fullmsg)
        if msgrma:        
            genericmsg = msgrma.group(1) + msgrma.group(3)
            if (genericmsg[len(genericmsg)-1] == " "):
                genericmsg = genericmsg[:-1]
            member = msgrma.group(2)
            hint = msgrma.group(4)
    
        return (genericmsg, member, hint)

    
    def getResult(self):
        return self.lintData  
    
    
    def getFlatResult(self):
        flatData = []
        data = self.getResult()
        for message in data:
            for messageDetails in data[message]:
                flatMessage = {
                  "message" : message,
                  "member" : messageDetails["member"],
                  "path" : messageDetails["path"],
                  "line" : messageDetails["line"]
                }
                flatData.append(flatMessage)
        return flatData

    
    def getResultJson(self):
        return json.dumps(self.lintData, sort_keys=True, indent=2)
Ejemplo n.º 26
0
class Server(object):
    """
    High level view of a node instance.  This is the object that should be created
    to start listening as an active node on the network.
    """

    def __init__(self, node, db, signing_key, ksize=20, alpha=3, storage=None):
        """
        Create a server instance.  This will start listening on the given port.

        Args:
            node: The node instance for this peer. It must contain (at minimum) an ID,
                public key, ip address, and port.
            ksize (int): The k parameter from the paper
            alpha (int): The alpha parameter from the paper
            storage: An instance that implements :interface:`~dht.storage.IStorage`
        """
        self.ksize = ksize
        self.alpha = alpha
        self.log = Logger(system=self)
        self.storage = storage or ForgetfulStorage()
        self.node = node
        self.protocol = KademliaProtocol(self.node, self.storage, ksize, db, signing_key)
        self.refreshLoop = LoopingCall(self.refreshTable)
        reactor.callLater(1800, self.refreshLoop.start, 3600)

    def listen(self, port):
        """
        Start listening on the given port.

        This is the same as calling::

            reactor.listenUDP(port, server.protocol)
        """
        return reactor.listenUDP(port, self.protocol)

    def refreshTable(self):
        """
        Refresh buckets that haven't had any lookups in the last hour
        (per section 2.3 of the paper).
        """
        ds = []
        refresh_ids = self.protocol.getRefreshIDs()
        refresh_ids.append(digest(random.getrandbits(255)))  # random node so we get more diversity
        for rid in refresh_ids:
            node = Node(rid)
            nearest = self.protocol.router.findNeighbors(node, self.alpha)
            spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
            ds.append(spider.find())

        def republishKeys(_):
            self.log.debug("Republishing key/values...")
            neighbors = self.protocol.router.findNeighbors(self.node, exclude=self.node)
            for node in neighbors:
                self.protocol.transferKeyValues(node)

        return defer.gatherResults(ds).addCallback(republishKeys)

    def querySeed(self, list_seed_pubkey):
        """
        Query an HTTP seed and return a `list` if (ip, port) `tuple` pairs.

        Args:
            Receives a list of one or more tuples Example [(seed, pubkey)]
            seed: A `string` consisting of "ip:port" or "hostname:port"
            pubkey: The hex encoded public key to verify the signature on the response
        """

        nodes = []
        if not list_seed_pubkey:
            self.log.error('failed to query seed {0} from ob.cfg'.format(list_seed_pubkey))
            return nodes
        else:
            for sp in list_seed_pubkey:
                seed, pubkey = sp
                try:
                    self.log.info("querying %s for peers" % seed)
                    c = httplib.HTTPConnection(seed)
                    c.request("GET", "/")
                    response = c.getresponse()
                    self.log.debug("Http response from %s: %s, %s" % (seed, response.status, response.reason))
                    data = response.read()
                    reread_data = data.decode("zlib")
                    proto = peers.PeerSeeds()
                    proto.ParseFromString(reread_data)
                    for peer in proto.serializedNode:
                        n = objects.Node()
                        n.ParseFromString(peer)
                        tup = (str(n.nodeAddress.ip), n.nodeAddress.port)
                        nodes.append(tup)
                    verify_key = nacl.signing.VerifyKey(pubkey, encoder=nacl.encoding.HexEncoder)
                    verify_key.verify("".join(proto.serializedNode), proto.signature)
                    self.log.info("%s returned %s addresses" % (seed, len(nodes)))
                except Exception, e:
                    self.log.error("failed to query seed: %s" % str(e))
            return nodes
Ejemplo n.º 27
0
class BitcoinProtocol(Protocol):

    def __init__(self, user_agent, inventory, subscriptions, bloom_filter, blockchain, download_listener):
        self.user_agent = user_agent
        self.inventory = inventory
        self.subscriptions = subscriptions
        self.bloom_filter = bloom_filter
        self.blockchain = blockchain
        self.download_count = 0
        self.download_tracker = [0, 0]
        self.download_listener = download_listener
        self.timeouts = {}
        self.callbacks = {}
        self.state = State.CONNECTING
        self.version = None
        self.buffer = ""
        self.log = Logger(system=self)

    def connectionMade(self):
        """
        Send the version message and start the handshake
        """
        self.timeouts["verack"] = reactor.callLater(5, self.response_timeout, "verack")
        self.timeouts["version"] = reactor.callLater(5, self.response_timeout, "version")
        msg_version2(PROTOCOL_VERSION, self.user_agent, nStartingHeight=self.blockchain.get_height() if self.blockchain else -1).stream_serialize(self.transport)

    def dataReceived(self, data):
        self.buffer += data
        header = MsgHeader.from_bytes(self.buffer)
        if len(self.buffer) < header.msglen + 24:
            return
        try:
            stream = BytesIO(self.buffer)
            m = MsgSerializable.stream_deserialize(stream)
            self.buffer = stream.read()

            if m.command == "verack":
                self.timeouts["verack"].cancel()
                del self.timeouts["verack"]
                if "version" not in self.timeouts:
                    self.on_handshake_complete()

            elif m.command == "version":
                self.version = m
                if m.nVersion < 70001 or m.nServices != 1:
                    self.transport.loseConnection()
                self.timeouts["version"].cancel()
                del self.timeouts["version"]
                msg_verack().stream_serialize(self.transport)
                if self.blockchain is not None:
                    self.to_download = self.version.nStartingHeight - self.blockchain.get_height()
                if "verack" not in self.timeouts:
                    self.on_handshake_complete()

            elif m.command == "getdata":
                for item in m.inv:
                    if item.hash in self.inventory and item.type == 1:
                        transaction = msg_tx()
                        transaction.tx = self.inventory[item.hash]
                        transaction.stream_serialize(self.transport)

            elif m.command == "inv":
                for item in m.inv:
                    # This is either an announcement of tx we broadcast ourselves or a tx we have already downloaded.
                    # In either case we only need to callback here.
                    if item.type == 1 and item.hash in self.subscriptions:
                        self.subscriptions[item.hash]["callback"](item.hash)

                    # This is the first time we are seeing this txid. Let's download it and check to see if it sends
                    # coins to any addresses in our subscriptions.
                    elif item.type == 1 and item.hash not in self.inventory:
                        self.timeouts[item.hash] = reactor.callLater(5, self.response_timeout, item.hash)

                        cinv = CInv()
                        cinv.type = 1
                        cinv.hash = item.hash

                        getdata_packet = msg_getdata()
                        getdata_packet.inv.append(cinv)

                        getdata_packet.stream_serialize(self.transport)

                    # The peer announced a new block. Unlike txs, we should download it, even if we've previously
                    # downloaded it from another peer, to make sure it doesn't contain any txs we didn't know about.
                    elif item.type == 2 or item.type == 3:
                        if self.state == State.DOWNLOADING:
                            self.download_tracker[0] += 1
                        cinv = CInv()
                        cinv.type = 3
                        cinv.hash = item.hash

                        getdata_packet = msg_getdata()
                        getdata_packet.inv.append(cinv)

                        getdata_packet.stream_serialize(self.transport)

                    if self.state != State.DOWNLOADING:
                        self.log.debug("Peer %s:%s announced new %s %s" % (self.transport.getPeer().host, self.transport.getPeer().port, CInv.typemap[item.type], b2lx(item.hash)))

            elif m.command == "tx":
                if m.tx.GetHash() in self.timeouts:
                    self.timeouts[m.tx.GetHash()].cancel()
                for out in m.tx.vout:
                    try:
                        addr = str(CBitcoinAddress.from_scriptPubKey(out.scriptPubKey))
                    except Exception:
                        addr = None

                    if addr in self.subscriptions:
                        if m.tx.GetHash() not in self.subscriptions:
                            # It's possible the first time we are hearing about this tx is following block
                            # inclusion. If this is the case, let's make sure we include the correct number
                            # of confirmations.
                            in_blocks = self.inventory[m.tx.GetHash()] if m.tx.GetHash() in self.inventory else []
                            confirms = []
                            if len(in_blocks) > 0:
                                for block in in_blocks:
                                    confirms.append(self.blockchain.get_confirmations(block))
                            self.subscriptions[m.tx.GetHash()] = {
                                "announced": 0,
                                "ann_threshold": self.subscriptions[addr][0],
                                "confirmations": max(confirms) if len(confirms) > 0 else 0,
                                "last_confirmation": 0,
                                "callback": self.subscriptions[addr][1],
                                "in_blocks": in_blocks,
                                "tx": m.tx
                            }
                            self.subscriptions[addr][1](m.tx.GetHash())
                        if m.tx.GetHash() in self.inventory:
                            del self.inventory[m.tx.GetHash()]

            elif m.command == "merkleblock":
                if self.blockchain is not None:
                    self.blockchain.process_block(m.block)
                    if self.state != State.DOWNLOADING:
                        self.blockchain.save()
                    # check for block inclusion of subscribed txs
                    for match in m.block.get_matched_txs():
                        if match in self.subscriptions:
                            self.subscriptions[match]["in_blocks"].append(m.block.GetHash())
                        else:
                            # stick the hash here in case this is the first we are hearing about this tx.
                            # when the tx comes over the wire after this block, we will append this hash.
                            self.inventory[match] = [m.block.GetHash()]
                    # run through subscriptions and callback with updated confirmations
                    for txid in self.subscriptions:
                        try:
                            confirms = []
                            for block in self.subscriptions[txid]["in_blocks"]:
                                confirms.append(self.blockchain.get_confirmations(block))
                            self.subscriptions[txid]["confirmations"] = max(confirms)
                            self.subscriptions[txid]["callback"](txid)
                        except Exception:
                            pass

                    # If we are in the middle of an initial chain download, let's check to see if we have
                    # either reached the end of the download or if we need to loop back around and make
                    # another get_blocks call.
                    if self.state == State.DOWNLOADING:
                        self.download_count += 1
                        percent = int((self.download_count / float(self.to_download))*100)
                        if self.download_listener is not None:
                            self.download_listener.progress(percent, self.download_count)
                            self.download_listener.on_block_downloaded((self.transport.getPeer().host, self.transport.getPeer().port), header, self.to_download - self.download_count + 1)
                        if percent == 100:
                            if self.download_listener is not None:
                                self.download_listener.download_complete()
                            self.log.info("Chain download 100% complete")
                        self.download_tracker[1] += 1
                        # We've downloaded every block in the inv packet and still have more to go.
                        if (self.download_tracker[0] == self.download_tracker[1] and
                           self.blockchain.get_height() < self.version.nStartingHeight):
                            if self.timeouts["download"].active():
                                self.timeouts["download"].cancel()
                            self.download_blocks(self.callbacks["download"])
                        # We've downloaded everything so let's callback to the client.
                        elif self.blockchain.get_height() >= self.version.nStartingHeight:
                            self.blockchain.save()
                            self.state = State.CONNECTED
                            self.callbacks["download"]()
                            if self.timeouts["download"].active():
                                self.timeouts["download"].cancel()

            elif m.command == "headers":
                if self.timeouts["download"].active():
                    self.timeouts["download"].cancel()
                for header in m.headers:
                    # If this node sent a block with no parent then disconnect from it and callback
                    # on client.check_for_more_blocks.
                    if self.blockchain.process_block(header) is None:
                        self.blockchain.save()
                        self.callbacks["download"]()
                        self.transport.loseConnection()
                        return
                    self.download_count += 1
                    percent = int((self.download_count / float(self.to_download))*100)
                    if self.download_listener is not None:
                        self.download_listener.progress(percent, self.download_count)
                        self.download_listener.on_block_downloaded((self.transport.getPeer().host, self.transport.getPeer().port), header, self.to_download - self.download_count + 1)
                    if percent == 100:
                        if self.download_listener is not None:
                            self.download_listener.download_complete()
                        self.log.info("Chain download 100% complete")
                # The headers message only comes in batches of 500 blocks. If we still have more blocks to download
                # loop back around and call get_headers again.
                if self.blockchain.get_height() < self.version.nStartingHeight:
                    self.download_blocks(self.callbacks["download"])
                else:
                    self.blockchain.save()
                    self.callbacks["download"]()
                    self.state = State.CONNECTED

            elif m.command == "ping":
                msg_pong(nonce=m.nonce).stream_serialize(self.transport)

            else:
                self.log.debug("Received message %s from %s:%s" % (m.command, self.transport.getPeer().host, self.transport.getPeer().port))

            if len(self.buffer) >= 24: self.dataReceived("")
        except Exception:
            traceback.print_exc()

    def on_handshake_complete(self):
        self.log.info("Connected to peer %s:%s" % (self.transport.getPeer().host, self.transport.getPeer().port))
        self.load_filter()
        self.state = State.CONNECTED

    def response_timeout(self, id):
        if id == "download":
            self.callbacks["download"]()
        del self.timeouts[id]
        for t in self.timeouts.values():
            if t.active():
                t.cancel()
        if self.state != State.SHUTDOWN:
            self.log.warning("Peer %s:%s unresponsive, disconnecting..." % (self.transport.getPeer().host, self.transport.getPeer().port))
        self.transport.loseConnection()
        self.state = State.SHUTDOWN

    def download_blocks(self, callback):
        if self.state == State.CONNECTING:
            return task.deferLater(reactor, 1, self.download_blocks, callback)
        if self.blockchain is not None:
            if self.download_listener is not None and self.download_count == 0:
                self.download_listener.download_started((self.transport.getPeer().host, self.transport.getPeer().port), self.to_download)
            self.log.info("Downloading blocks from %s:%s" % (self.transport.getPeer().host, self.transport.getPeer().port))
            self.state = State.DOWNLOADING
            self.callbacks["download"] = callback
            self.timeouts["download"] = reactor.callLater(30, self.response_timeout, "download")
            if len(self.subscriptions) > 0:
                get = msg_getblocks()
                self.download_tracker = [0, 0]
            else:
                get = msg_getheaders()
            get.locator = self.blockchain.get_locator()
            get.stream_serialize(self.transport)

    def send_message(self, message_obj):
        if self.state == State.CONNECTING:
            return task.deferLater(reactor, 1, self.send_message, message_obj)
        message_obj.stream_serialize(self.transport)

    def load_filter(self):
        msg_filterload(filter=self.bloom_filter).stream_serialize(self.transport)

    def connectionLost(self, reason):
        self.state = State.SHUTDOWN
        self.log.info("Connection to %s:%s closed" % (self.transport.getPeer().host, self.transport.getPeer().port))
Ejemplo n.º 28
0
class RPCProtocol:
    """
    This is an abstract class for processing and sending rpc messages.
    A class that implements the `MessageProcessor` interface probably should
    extend this as it does most of the work of keeping track of messages.
    """
    __metaclass__ = abc.ABCMeta

    def __init__(self, sourceNode, router, waitTimeout=30):
        """
        Args:
            sourceNode: A protobuf `Node` object containing info about this node.
            router: A `RoutingTable` object from dht.routing. Implies a `network.Server` object
                    must be started first.
            waitTimeout: Timeout for whole messages. Note the txrudp layer has a per-packet
                    timeout but invalid responses wont trigger it. The waitTimeout on this
                     layer needs to be long enough to allow whole messages (ex. images) to
                     transmit.

        """
        self.sourceNode = sourceNode
        self.router = router
        self._waitTimeout = waitTimeout
        self._outstanding = {}
        self.log = Logger(system=self)

    def receive_message(self, message, sender, connection, ban_score):
        if message.testnet != self.multiplexer.testnet:
            self.log.warning(
                "received message from %s with incorrect network parameters." %
                str(connection.dest_addr))
            connection.shutdown()
            return False

        if message.protoVer < PROTOCOL_VERSION:
            self.log.warning(
                "received message from %s with incompatible protocol version."
                % str(connection.dest_addr))
            connection.shutdown()
            return False

        self.multiplexer.vendors[sender.id] = sender

        msgID = message.messageID
        if message.command == NOT_FOUND:
            data = None
        else:
            data = tuple(message.arguments)
        if msgID in self._outstanding:
            self._acceptResponse(msgID, data, sender)
        elif message.command != NOT_FOUND:
            ban_score.process_message(connection.dest_addr, message)
            self._acceptRequest(msgID,
                                str(Command.Name(message.command)).lower(),
                                data, sender, connection)
        else:
            ban_score.process_message(connection.dest_addr, message)

    def _acceptResponse(self, msgID, data, sender):
        if data is not None:
            msgargs = (b64encode(msgID), sender)
            self.log.debug("received response for message id %s from %s" %
                           msgargs)
        else:
            self.log.warning("received 404 error response from %s" % sender)
        d = self._outstanding[msgID][0]
        if self._outstanding[msgID][2].active():
            self._outstanding[msgID][2].cancel()
        d.callback((True, data))
        del self._outstanding[msgID]

    def _acceptRequest(self, msgID, funcname, args, sender, connection):
        self.log.debug("received request from %s, command %s" %
                       (sender, funcname.upper()))
        f = getattr(self, "rpc_%s" % funcname, None)
        if f is None or not callable(f):
            msgargs = (self.__class__.__name__, funcname)
            self.log.error(
                "%s has no callable method rpc_%s; ignoring request" % msgargs)
            return False
        if funcname == "hole_punch":
            f(sender, *args)
        else:
            d = defer.maybeDeferred(f, sender, *args)
            d.addCallback(self._sendResponse, funcname, msgID, sender,
                          connection)
            d.addErrback(self._sendResponse, "bad_request", msgID, sender,
                         connection)

    def _sendResponse(self, response, funcname, msgID, sender, connection):
        self.log.debug("sending response for msg id %s to %s" %
                       (b64encode(msgID), sender))
        m = Message()
        m.messageID = msgID
        m.sender.MergeFrom(self.sourceNode.getProto())
        m.protoVer = PROTOCOL_VERSION
        m.testnet = self.multiplexer.testnet
        if response is None:
            m.command = NOT_FOUND
        else:
            m.command = Command.Value(funcname.upper())
            if not isinstance(response, list):
                response = [response]
            for arg in response:
                m.arguments.append(str(arg))
        m.signature = self.signing_key.sign(m.SerializeToString())[:64]
        connection.send_message(m.SerializeToString())

    def timeout(self, node):
        """
        This timeout is called by the txrudp connection handler. We will run through the
        outstanding messages and callback false on any waiting on this IP address.
        """
        address = (node.ip, node.port)
        for msgID, val in self._outstanding.items():
            if address == val[1]:
                val[0].callback((False, None))
                if self._outstanding[msgID][2].active():
                    self._outstanding[msgID][2].cancel()
                del self._outstanding[msgID]

        self.router.removeContact(node)
        try:
            self.multiplexer[address].shutdown()
        except Exception:
            pass

    def rpc_hole_punch(self, sender, ip, port, relay="False"):
        """
        A method for handling an incoming HOLE_PUNCH message. Relay the message
        to the correct node if it's not for us. Otherwise send a datagram to allow
        the other node to punch through our NAT.
        """
        if relay == "True":
            self.log.debug("relaying hole punch packet to %s:%s for %s:%s" %
                           (ip, port, sender.ip, str(sender.port)))
            self.hole_punch(
                Node(digest("null"), ip, int(port), nat_type=FULL_CONE),
                sender.ip, sender.port)
        else:
            self.log.debug("punching through NAT for %s:%s" % (ip, port))
            # pylint: disable=W0612
            for i in range(20):
                self.multiplexer.send_datagram("", (ip, int(port)))

    def __getattr__(self, name):
        if name.startswith("_") or name.startswith("rpc_"):
            return object.__getattr__(self, name)

        try:
            return object.__getattr__(self, name)
        except AttributeError:
            pass

        def func(node, *args):
            address = (node.ip, node.port)

            msgID = sha1(str(random.getrandbits(255))).digest()
            m = Message()
            m.messageID = msgID
            m.sender.MergeFrom(self.sourceNode.getProto())
            m.command = Command.Value(name.upper())
            m.protoVer = PROTOCOL_VERSION
            for arg in args:
                m.arguments.append(str(arg))
            m.testnet = self.multiplexer.testnet
            m.signature = self.signing_key.sign(m.SerializeToString())[:64]
            data = m.SerializeToString()

            relay_addr = None
            if node.nat_type == SYMMETRIC or \
                    (node.nat_type == RESTRICTED and self.sourceNode.nat_type == SYMMETRIC):
                relay_addr = node.relay_node

            d = defer.Deferred()
            if m.command != HOLE_PUNCH:
                timeout = reactor.callLater(self._waitTimeout, self.timeout,
                                            node)
                self._outstanding[msgID] = [d, address, timeout]
                self.log.debug("calling remote function %s on %s (msgid %s)" %
                               (name, address, b64encode(msgID)))

            self.multiplexer.send_message(data, address, relay_addr)

            if self.multiplexer[address].state != State.CONNECTED and \
                            node.nat_type == RESTRICTED and \
                            self.sourceNode.nat_type != SYMMETRIC and \
                            node.relay_node is not None:
                self.hole_punch(
                    Node(digest("null"),
                         node.relay_node[0],
                         node.relay_node[1],
                         nat_type=FULL_CONE), address[0], address[1], "True")
                self.log.debug("sending hole punch message to %s" %
                               address[0] + ":" + str(address[1]))

            return d

        return func
Ejemplo n.º 29
0
def run_test():
    logg = Logger()

    try:
        with open('/home/fabien/Images/blockchain1.png', 'rb') as img_file:
            base64.b64encode(img_file.read())
    except (IOError, TypeError):
        pass

    try:
        with open('/home/fabien/Images/matryoshka.png', 'rb') as img_file:
            base64.b64encode(img_file.read())
    except (IOError, TypeError):
        pass

    username = "******"
    password_hash = hashlib.sha1("pikcio_node_password").digest().enode('hex')
    client_api = ClientAPI(username=username, password=password_hash)

    access = client_api.get_access_token()
    logg.debug("\n===Access :\n {0} \n===\n".format(access))

    ###########################################################################

    ############################################################
    #                                                          #
    #               UPDATE TEST 31/08/18                       #
    #                                                          #
    ############################################################

    ##################################
    # Everything related to the user #
    ##################################

    # OK :
    '''
    json_update_password = {
        "current_password": "******",
        "password": "******",
        "retype_password": "******"
    }
    result = client_api.update_password(json_update_password)
    '''
    '''
    json_get_reset_token_forgotten_password = {
        "question_id": 1,
        "answer": "answer1"
    }
    result = client_api.get_reset_token_forgotten_password(
        json_get_reset_token_forgotten_password
    )
    '''
    '''
    json_reset_password = {
        "reset_token": "4525019d46b3c424d77917c9b2e1da781b9cec2e",
        "new_password": "******",
        "confirm_password": "******"
    }
    result = client_api.reset_password(json_reset_password)
    '''
    result = client_api.get_user_avatar()
    '''
    json_set_user_avatar = {
        "avatar":
            {
                "data": file_data,
                "value": "sangoku_api.png"
            }
    }
    result = client_api.set_user_avatar(json_set_user_avatar)
    '''
    # result = client_api.get_user_profile()
    '''
    json_update_user_profile = {
        "profile":
            {
                "children": {'value': 'azert', 'shared': False},
                "birth_date": {'value': '2', 'shared': True},
                "country": {'value': 'France', 'shared': False},
                "my new item": {'value': 'LE CRES', 'shared': False}
            }
    }
    result = client_api.update_user_profile(json_update_user_profile)
    '''
    # result = client_api.get_background()
    # result = client_api.get_tci()
    '''
    json_delete_custom_profile_item = {"item": "re test"}
    result = client_api.delete_custom_profile_item(
        json_delete_custom_profile_item
    )
    '''
    '''
    json_upload_data = {
        "data": "Jana kramer",
        "metadata": "say_my_name",
        "context": "Professional",
        "source": "API"
    }
    result = client_api.upload_data(json_upload_data)
    '''
    # TO BE TESTED :
    # not included in the current version :
    '''
    json_register = {
        "username": "",
        "firstname": "",
        "lastname": "",
        "email": ""
    }
    client_api.register()
    '''
    '''
    json_login = {
        "username": "******",
        "password": "******"
    }
    client_api.login(json_login)
    '''
    # client_api.logout()
    # result = client_api.set_background('data_45')
    # result = client_api.import_profile()
    # result = client_api.export_profile()

    ###########################################################################

    ##################################
    # Everything related to contacts #
    ##################################

    # OK :

    # result = client_api.find_user(
    #     query='41f91225e521b2bb03f7680f26a8b366cb134c3e'
    # )
    '''
    json_add_contact = {
        "matr_id": "414d544d385356486b50346a434668744e73596754393574573872444"
                   "34d7a463375",
        "username": "******"
    }
    result = client_api.add_contact(json_add_contact)
    '''
    # result = client_api.get_contacts()
    # result = client_api.get_contact_profile(
    #     matr_id='414d544d385356486b50346a57457387244434d7a463375'
    # )
    '''
    json_add_contact = {
        "matr_id": "414d544d385356486b50346a57457387244434d7a463375",
        "keep_files": True
    }
    result = client_api.remove_contact(json_add_contact)
    '''
    # result = client_api.accept_contact_request(
    #     matr_id="414d544d385356486b50346a57457387244434d7a463375"
    # )
    # result = client_api.reject_contact_request(
    #     matr_id="414d544d385356486b50346a57457387244434d7a463375"
    # )
    '''
    json_block_contact = {
        "matr_id": "414d544d385356486b50346a57457387244434d7a463375",
        "keep_files": False
    }
    result = client_api.block_contact(json_block_contact)
    '''
    # result = client_api.unblock_contact(
    #     matr_id="414d544d385356486b50346a57457387244434d7a463375"
    # )
    '''
    json_block_contact = {
        "matr_id": "414d544d385356486b50346a434668744e73596754393574573872444"
                   "34d7a463375",
        "action": "remove"
    }
    result = client_api.manage_replicant(json_block_contact)
    '''
    # TO BE TESTED :

    # not included in the current version :
    # client_api.import_contacts()
    # client_api.invite_someone()
    # result = client_api.get_notifications_online()

    ###########################################################################

    #########################################
    # Everything related to Pikcio messages #
    #########################################

    # OK :
    '''
    json_get_pikcio_messages = {
        "category": "received",
        "limit": 0,
        "date": "2017-04-01 12:12:12",
        "filter": "after"
    }
    result = client_api.get_pikcio_messages(json_get_pikcio_messages)

    # result = client_api.get_pikcio_message(
    #     msg_id='211197ad9109c3cf0f5793651e38f7dae5b5d28a'
    # )
    # result = client_api.delete_pikcio_message(
    #     msg_id='211197ad9109c3cf0f5793651e38f7dae5b5d28a'
    # )
    '''
    '''
    json_send_pikcio_message = {
        "content": "This is a text content of a Pikcio message",
        "subject": "TEST from API",
        "lifetime": '30',
        "certification": False,
        "external_receivers":
            [
                {"email": "dev@matchupbox", "name": "Support Matchupbox"}
            ],
        "receivers": [
            "414d544d385356486b50346a434668744e7359675439357457387244434d7a46"
            "3375"
        ]
    }
    result = client_api.send_pikcio_message(json_send_pikcio_message)
    '''
    # result = client_api.set_pikcio_message_to_read(
    #     msg_id='bfd927a239f2d8e61a9133e1fbe2b016d2de619b'
    # )
    # result = client_api.pikcio_message_request_certification(
    #     msg_id='bfd927a239f2d8e61a9133e1fbe2b016d2de619b'
    # )

    ###########################################################################

    #######################################
    # Everything related to chat messages #
    #######################################

    # OK :
    '''
    json_get_conversation = {
        "chat": {
            "matr_id": "414d544d385356486b50346a434668744e7359675439357457387"
                       "244434d7a463375",
            "date": "2018-01-01",
            "filter": "after",
            "limit": 1
        }
    }
    result = client_api.get_chat_conversation(json_get_conversation)
    '''
    '''
    json_send_chat_message = {
        "receivers":
            [
                {
                    'matr_id': '415133767636324c6d594c476246375138623943527a32'
                               '326972453566533933616b'
                },
                {
                    'matr_id': '414d544d385356486b50346a434668744e735967543935'
                               '7457387244434d7a463375'
                }
            ],
        "message": "Test from API NEW VERSION",
        "certify": False
    }
    result = client_api.send_chat_message(json_send_chat_message)
    '''
    # result = client_api.delete_chat_message(
    #     msg_id='0d1d588ad8393d7796066a67420641efb8776f71'
    # )
    # result = client_api.get_chat_message_status(
    #     msg_id='450f1a5002cbc8071dbd8e47e86f4bfed7509df5'
    # )
    # result = client_api.set_chat_message_to_read(
    #     msg_id='efa996a5a6216c696d112e1aa82578df05798fdd'
    # )

    # KO :

    # result = client_api.delete_chat_conversation(
    #     matr_id="414d544d385356486b50346a57457387244434d7a463375"
    # )

    ###########################################################################

    #######################################
    # Everything related to file messages #
    #######################################

    # OK:
    '''
    json_get_file_messages = {
        "file":
            {
                "matr_id": '414d544d385356486b50346a434668744e7359675439357457'
                           '387244434d7a463375',
                "date": '2016-04-18 09:06:16',
                "filter": "after",
                "limit": 2
            }
    }
    result = client_api.get_file_messages(json_get_file_messages)
    '''
    '''
    json_send_file_message = {
        "receivers":
            [
                {
                    'matr_id': '415133767636324c6d594c476246375138623943527a32'
                               '326972453566533933616b'
                },
                {
                    'matr_id': '414d544d385356486b50346a434668744e735967543935'
                               '7457387244434d7a463375'
                }
            ],
        "file_name": 'testingAPI.png',
        "file_content": file_data,
        "certify": True
    }

    result = client_api.send_file_message(json_send_file_message)
    '''
    # result = client_api.set_file_message_to_read(
    #     msg_id='459e1debe9310630e001ccd8acbaa2048f64c355'
    # )
    # result = client_api.delete_file_message(
    #     msg_id='459e1debe9310630e001ccd8acbaa2048f64c355'
    # )

    # MISSING PikcioChain.py :

    # client_api.get_file_message_meta_data()
    # client_api.get_file_message_matr_msg_id()

    ###########################################################################

    #######################################
    # Everything related to wall messages #
    #######################################

    # OK :
    '''
    json_get_wall_messages = {
        'wall':
            {
                "limit": 2,
                "date": "2017-01-28 14:14:14",
                "filter": "after"
            }
    }
    result = client_api.get_wall_messages(json_get_wall_messages)
    '''
    '''
    json_send_wall_file_message = {
        "file":
            {
                "data": file_data,
                "name": "test_wall from API.jpg"
            }
    }
    result = client_api.send_wall_message(json_send_wall_file_message)
    '''
    '''
    json_send_wall_text_message = {
        "message":
            {
                "data": "test from api"
            }
    }
    result = client_api.send_wall_message(json_send_wall_text_message)
    '''
    # result = client_api.delete_wall_message(
    #     message_id='a5d6e553c251c26680f8d887e815e0a59af5866f'
    # )
    # result = client_api.get_wall_message_comments(
    #     message_id='09152ea2963136d5ea5be98a79e8908f4dbf8dda'
    # )

    # to be tested :

    # not included in the current version
    # todo : all features concerning public wall (send, retrieve, ...)

    ###########################################################################

    #########################################
    # Everything related to file management #
    #########################################

    # OK :
    '''
    json_upload_file = {
        "files":
            [
                {
                    "file_name": "",
                    "file_content": file_data,
                    "tags": "api blockchain, test",
                    "contexts": ""
                },
                {
                    "file_name": "matryoshka.png",
                    "file_content": file_data2,
                    "tags": "",
                    "contexts": ""
                }
            ],
        "folder_name": "APIFolder"
    }
    result = client_api.upload_file(json_upload_file)
    '''
    '''
    json_delete_file = {
        "file_id": 1591,
        "check": False
    }
    result = client_api.delete_file(json_delete_file)
    '''
    '''
    json_get_files_info = {
        "folder_name": "APIFolder",
        "limit": 2
    }
    result = client_api.get_files_info(json_get_files_info)
    '''
    # result = client_api.get_file_info('8')
    # result = client_api.get_folders()
    # result = client_api.encrypt_file('6')
    # result = client_api.decrypt_file('8')
    # result = client_api.certify_file('8')
    # result = client_api.request_file_history('8')

    # KO :

    # result = client_api.delete_folder("APIFolder")
    # result = client_api.move_file(data_43)

    ###########################################################################

    ##################################
    # Everything related to Settings #
    ##################################

    # OK :

    # result = client_api.report_bug("test API")

    # to be tested :

    # not included in the current version :
    # result = client_api.edit_network()
    # result = client_api.edit_message_lifetime()
    # result = client_api.create_api()
    # result = client_api.delete_api()
    # result = client_api.get_api_info()
    # result = client_api.get_api_config_file()
    # result = client_api.get_api_qr_code()
    # result = client_api.toggle_popups()

    ###########################################################################

    ################################
    # Everything related to Groups #
    ################################

    # ok :

    # to be tested :

    # not included in the current version :
    # result = client_api.create_group(data_16)
    # result = client_api.delete_group(name="TestAPI")
    # result = client_api.list_group()
    # result = client_api.get_group_info(data_18)
    # result = client_api.add_contact_to_group(data_17)
    # result = client_api.remove_contact_from_group(data_17)
    # result = client_api.send_group_file_message(data_19)
    # result = client_api.send_group_chat_message(data_20)
    # result = client_api.delete_group_message(
    #     msg_id="73ba804f503b8c4f7c9ccf35fb369ec860871496"
    # )
    # result = client_api.get_group_messages(name="TestAPI")

    ###########################################################################

    ################################
    # Everything related to Agenda #
    ################################

    # ok :

    # to be tested :

    # not included in the current version :
    # client_api.get_agenda_events(data_24)
    # client_api.get_event("d368b18421fb30a45d8b262f526d7aa31d57526a")
    # client_api.create_event(data_22)
    # client_api.update_event(data_23)
    # client_api.delete_event(event_id=)
    # client_api.reject_event(event_id=)
    # client_api.accept_event(event_id=)

    ###########################################################################

    ###############################
    # Everything related to Email #
    ###############################

    # ok :

    # to be tested :

    # not included in the current version :
    # client_api.edit_emails_frequency(data=)
    # client_api.link_emails(data=)

    ###########################################################################

    ################################
    # Everything related to Wallet #
    ################################

    # ok :
    '''
    json_make_payment = {
        "matr_id": "40b3551fe8cd576e710d613f79c199360533269f",
        "amount": "1.125"
    }
    result = client_api.make_payment(json_make_payment)
    '''
    # result = client_api.update_wallet()
    # result = client_api.get_transactions()

    # to be tested :

    # not included in the current version :

    ###########################################################################

    #########################################
    # Everything related to Generic Message #
    #########################################

    # ok :
    '''
    custom_content = {
        'brand': 'BMW',
        'model': 'M6'
    }
    json_send_generic_message = {
        'type': 'Ouioui',
        'scope': 'film',
        'action': 'send',
        'content': custom_content,
        'receivers': ['1a3dc918c938a22ab4b3d8ab7d9fae55cdb2b8cb'],
        'certify': False,
        'attachments': ['6']
    }

    custom_content_msf_create_case = {
        'type': 'MSF',
        'scope': 'medical_case',
        'action': 'create',
        'receivers': ['b71b9522f2050ad93f064e66738bbabc33225cd5'],
        'certify': False,
        'content': {
            'type': 'Patient-related clinical query (to a specific patient)',
            'subject': 'Cancer',
            'language': 'EN',
            'localization': 'Dire dawa, Ethiopia',
            'patient_name': 'Lilian',
            'patient_lastname': 'Laslandes',
            'patient_age': '45',
            'gender': 'male',
            'priority': 'critical',
            'referrer_id': '2',
            'complaint': 'Lorem Ipsum is simply dummy text of the printing and'
                         ' typesetting industry.',
            'history': 'and a search for lorem ipsum will uncover many web '
                       'sites still in their infancy.',
            'past': 'The standard chunk of Lorem Ipsum used since the 1500s'
        },
        'attachments': []
    }
    # result = client_api.send_generic_message(json_send_generic_message)
    '''
    '''
    json_get_generic_messages1 = {
        'type': 'MSF',
        'scope': 'medical_case',
        'action': '',
        'is_read': True
    }
    json_get_generic_messages2 = {
        'type': 'Ouioui',
        'scope': 'film',
        'action': '',
        'limit': 2,
        'date': '',
        'filter_date': '',
        'is_read': True
    }
    result = client_api.get_generic_messages(json_get_generic_messages1)
    '''

    # result = client_api.delete_generic_messages(
    #     message_id='f798701d1249944e8cc8214b5b5e353fd50511ff'
    # )

    # result = client_api.get_generic_message_status(
    #     message_id='b8b5e24dcf446ca6faeda4a944f41a08a117740c'
    # )

    # to be tested :

    # not included in the current version :

    ###########################################################################

    if isinstance(result, str):
        logg.debug("\n1===\n{0} \n===\n".format(result))
    else:
        logg.debug("\n2===\n{0} \n===\n".format(result.content))
Ejemplo n.º 30
0
class MarketProtocol(RPCProtocol):
    implements(MessageProcessor)

    def __init__(self, node_proto, router, signing_key):
        self.router = router
        RPCProtocol.__init__(self, node_proto, router)
        self.log = Logger(system=self)
        self.multiplexer = None
        self.hashmap = HashMap()
        self.signing_key = signing_key
        self.listeners = []
        self.handled_commands = [GET_CONTRACT, GET_IMAGE, GET_PROFILE, GET_LISTINGS, GET_USER_METADATA,
                                 GET_CONTRACT_METADATA, FOLLOW, UNFOLLOW, GET_FOLLOWERS, GET_FOLLOWING,
                                 NOTIFY, MESSAGE]

    def connect_multiplexer(self, multiplexer):
        self.multiplexer = multiplexer

    def add_listener(self, listener):
        self.listeners.append(listener)

    def rpc_get_contract(self, sender, contract_hash):
        self.log.info("Looking up contract ID %s" % contract_hash.encode('hex'))
        self.router.addContact(sender)
        try:
            with open(self.hashmap.get_file(contract_hash), "r") as filename:
                contract = filename.read()
            return [contract]
        except Exception:
            self.log.warning("Could not find contract %s" % contract_hash.encode('hex'))
            return ["None"]

    def rpc_get_image(self, sender, image_hash):
        self.log.info("Looking up image with hash %s" % image_hash.encode('hex'))
        self.router.addContact(sender)
        try:
            with open(self.hashmap.get_file(image_hash), "r") as filename:
                image = filename.read()
            return [image]
        except Exception:
            self.log.warning("Could not find image %s" % image_hash.encode('hex'))
            return ["None"]

    def rpc_get_profile(self, sender):
        self.log.info("Fetching profile")
        self.router.addContact(sender)
        try:
            proto = Profile().get(True)
            return [proto, self.signing_key.sign(proto)[:64]]
        except Exception:
            self.log.error("Unable to load the profile")
            return ["None"]

    def rpc_get_user_metadata(self, sender):
        self.log.info("Fetching metadata")
        self.router.addContact(sender)
        try:
            proto = Profile().get(False)
            m = Metadata()
            m.name = proto.name
            m.handle = proto.handle
            m.short_description = proto.short_description
            m.avatar_hash = proto.avatar_hash
            m.nsfw = proto.nsfw
            return [m.SerializeToString(), self.signing_key.sign(m.SerializeToString())[:64]]
        except Exception:
            self.log.error("Unable to get the profile metadata")
            return ["None"]

    def rpc_get_listings(self, sender):
        self.log.info("Fetching listings")
        self.router.addContact(sender)
        try:
            p = Profile().get()
            l = Listings()
            l.ParseFromString(ListingsStore().get_proto())
            l.handle = p.handle
            l.avatar_hash = p.avatar_hash
            return [l.SerializeToString(), self.signing_key.sign(l.SerializeToString())[:64]]
        except Exception:
            self.log.warning("Could not find any listings in the database")
            return ["None"]

    def rpc_get_contract_metadata(self, sender, contract_hash):
        self.log.info("Fetching metadata for contract %s" % hexlify(contract_hash))
        self.router.addContact(sender)
        try:
            proto = ListingsStore().get_proto()
            l = Listings()
            l.ParseFromString(proto)
            for listing in l.listing:
                if listing.contract_hash == contract_hash:
                    ser = listing.SerializeToString()
            return [ser, self.signing_key.sign(ser)[:64]]
        except Exception:
            self.log.warning("Could not find metadata for contract %s" % hexlify(contract_hash))
            return ["None"]

    def rpc_follow(self, sender, proto, signature):
        self.log.info("Follow request from %s" % sender.id.encode("hex"))
        self.router.addContact(sender)
        try:
            verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
            verify_key.verify(proto, signature)
            f = Followers.Follower()
            f.ParseFromString(proto)
            if f.guid != sender.id:
                raise Exception('GUID does not match sending node')
            if f.following != self.proto.guid:
                raise Exception('Following wrong node')
            f.signature = signature
            FollowData().set_follower(f)
            proto = Profile().get(False)
            m = Metadata()
            m.name = proto.name
            m.handle = proto.handle
            m.avatar_hash = proto.avatar_hash
            m.nsfw = proto.nsfw
            return ["True", m.SerializeToString(), self.signing_key.sign(m.SerializeToString())[:64]]
        except Exception:
            self.log.warning("Failed to validate follower")
            return ["False"]

    def rpc_unfollow(self, sender, signature):
        self.log.info("Unfollow request from %s" % sender.id.encode("hex"))
        self.router.addContact(sender)
        try:
            verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
            verify_key.verify("unfollow:" + self.proto.guid, signature)
            f = FollowData()
            f.delete_follower(sender.id)
            return ["True"]
        except Exception:
            self.log.warning("Failed to validate follower signature")
            return ["False"]

    def rpc_get_followers(self, sender):
        self.log.info("Fetching followers list from db")
        self.router.addContact(sender)
        ser = FollowData().get_followers()
        if ser is None:
            return ["None"]
        else:
            return [ser, self.signing_key.sign(ser)[:64]]

    def rpc_get_following(self, sender):
        self.log.info("Fetching following list from db")
        self.router.addContact(sender)
        ser = FollowData().get_following()
        if ser is None:
            return ["None"]
        else:
            return [ser, self.signing_key.sign(ser)[:64]]

    def rpc_notify(self, sender, message, signature):
        if len(message) <= 140 and FollowData().is_following(sender.id):
            try:
                verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
                verify_key.verify(message, signature)
            except Exception:
                return ["False"]
            self.log.info("Received a notification from %s" % sender)
            self.router.addContact(sender)
            for listener in self.listeners:
                try:
                    verifyObject(NotificationListener, listener)
                    listener.notify(sender.id, message)
                except DoesNotImplement:
                    pass
            return ["True"]
        else:
            return ["False"]

    def rpc_message(self, sender, pubkey, encrypted):
        try:
            box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
            plaintext = box.decrypt(encrypted)
            p = Plaintext_Message()
            p.ParseFromString(plaintext)
            signature = p.signature
            p.ClearField("signature")
            verify_key = nacl.signing.VerifyKey(p.signed_pubkey[64:])
            verify_key.verify(p.SerializeToString(), signature)
            h = nacl.hash.sha512(p.signed_pubkey)
            pow_hash = h[64:128]
            if int(pow_hash[:6], 16) >= 50 or hexlify(p.sender_guid) != h[:40] or p.sender_guid != sender.id:
                raise Exception('Invalid guid')
            self.log.info("Received a message from %s" % sender)
            self.router.addContact(sender)
            for listener in self.listeners:
                try:
                    verifyObject(MessageListener, listener)
                    listener.notify(p, signature)
                except DoesNotImplement:
                    pass
            return ["True"]
        except Exception:
            self.log.error("Received invalid message from %s" % sender)
            return ["False"]

    def callGetContract(self, nodeToAsk, contract_hash):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_contract(address, contract_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetImage(self, nodeToAsk, image_hash):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_image(address, image_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetProfile(self, nodeToAsk):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_profile(address)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetUserMetadata(self, nodeToAsk):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_user_metadata(address)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetListings(self, nodeToAsk):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_listings(address)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetContractMetadata(self, nodeToAsk, contract_hash):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_contract_metadata(address, contract_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callFollow(self, nodeToAsk, proto, signature):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.follow(address, proto, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callUnfollow(self, nodeToAsk, signature):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.unfollow(address, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetFollowers(self, nodeToAsk):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_followers(address)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetFollowing(self, nodeToAsk):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_following(address)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callNotify(self, nodeToAsk, message, signature):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.notify(address, message, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callMessage(self, nodeToAsk, ehemeral_pubkey, ciphertext):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.message(address, ehemeral_pubkey, ciphertext)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def handleCallResponse(self, result, node):
        """
        If we get a response, add the node to the routing table.  If
        we get no response, make sure it's removed from the routing table.
        """
        if result[0]:
            self.log.info("got response from %s, adding to router" % node)
            self.router.addContact(node)
        else:
            self.log.debug("no response from %s, removing from router" % node)
            self.router.removeContact(node)
        return result

    def __iter__(self):
        return iter(self.handled_commands)
Ejemplo n.º 31
0
class RPCProtocol:
    """
    This is an abstract class for processing and sending rpc messages.
    A class that implements the `MessageProcessor` interface probably should
    extend this as it does most of the work of keeping track of messages.
    """
    __metaclass__ = abc.ABCMeta

    def __init__(self, proto, router, waitTimeout=5):
        """
        Args:
            proto: A protobuf `Node` object containing info about this node.
            router: A `RoutingTable` object from dht.routing. Implies a `network.Server` object
                    must be started first.
            waitTimeout: Consider it a connetion failure if no response
                    within this time window.
            noisy: Whether or not to log the output for this class.
            testnet: The network parameters to use.

        """
        self.proto = proto
        self.router = router
        self._waitTimeout = waitTimeout
        self._outstanding = {}
        self.log = Logger(system=self)

    def receive_message(self, datagram, connection):
        m = Message()
        try:
            m.ParseFromString(datagram)
            sender = node.Node(m.sender.guid, m.sender.ip, m.sender.port, m.sender.signedPublicKey, m.sender.vendor)
        except Exception:
            # If message isn't formatted property then ignore
            self.log.warning("received unknown message from %s, ignoring" % str(connection.dest_addr))
            return False

        if m.testnet != self.multiplexer.testnet:
            self.log.warning("received message from %s with incorrect network parameters." %
                             str(connection.dest_addr))
            connection.shutdown()
            return False

        if m.protoVer < PROTOCOL_VERSION:
            self.log.warning("received message from %s with incompatible protocol version." %
                             str(connection.dest_addr))
            connection.shutdown()
            return False

        # Check that the GUID is valid. If not, ignore
        if self.router.isNewNode(sender):
            try:
                pubkey = m.sender.signedPublicKey[len(m.sender.signedPublicKey) - 32:]
                verify_key = nacl.signing.VerifyKey(pubkey)
                verify_key.verify(m.sender.signedPublicKey)
                h = nacl.hash.sha512(m.sender.signedPublicKey)
                pow_hash = h[64:128]
                if int(pow_hash[:6], 16) >= 50 or hexlify(m.sender.guid) != h[:40]:
                    raise Exception('Invalid GUID')

            except Exception:
                self.log.warning("received message from sender with invalid GUID, ignoring")
                connection.shutdown()
                return False

        if m.sender.vendor:
            self.db.VendorStore().save_vendor(m.sender.guid, m.sender.ip, m.sender.port, m.sender.signedPublicKey)

        msgID = m.messageID
        if m.command == NOT_FOUND:
            data = None
        else:
            data = tuple(m.arguments)
        if msgID in self._outstanding:
            self._acceptResponse(msgID, data, sender)
        elif m.command != NOT_FOUND:
            self._acceptRequest(msgID, str(Command.Name(m.command)).lower(), data, sender, connection)

    def _acceptResponse(self, msgID, data, sender):
        if data is not None:
            msgargs = (b64encode(msgID), sender)
            self.log.debug("received response for message id %s from %s" % msgargs)
        else:
            self.log.warning("received 404 error response from %s" % sender)
        d, timeout = self._outstanding[msgID]
        timeout.cancel()
        d.callback((True, data))
        del self._outstanding[msgID]

    def _acceptRequest(self, msgID, funcname, args, sender, connection):
        self.log.debug("received request from %s, command %s" % (sender, funcname.upper()))
        f = getattr(self, "rpc_%s" % funcname, None)
        if f is None or not callable(f):
            msgargs = (self.__class__.__name__, funcname)
            self.log.error("%s has no callable method rpc_%s; ignoring request" % msgargs)
            return False
        if funcname == "hole_punch":
            f(sender, *args)
        else:
            d = defer.maybeDeferred(f, sender, *args)
            d.addCallback(self._sendResponse, funcname, msgID, sender, connection)

    def _sendResponse(self, response, funcname, msgID, sender, connection):
        self.log.debug("sending response for msg id %s to %s" % (b64encode(msgID), sender))
        m = Message()
        m.messageID = msgID
        m.sender.MergeFrom(self.proto)
        m.protoVer = PROTOCOL_VERSION
        m.testnet = self.multiplexer.testnet
        if response is None:
            m.command = NOT_FOUND
        else:
            m.command = Command.Value(funcname.upper())
            for arg in response:
                m.arguments.append(str(arg))
        data = m.SerializeToString()
        connection.send_message(data)

    def _timeout(self, msgID, address):
        """
        If a message times out we are first going to try hole punching because
        the node may be behind a restricted NAT. If it is successful, the original
        should get through. This timeout will only fire if the hole punching
        fails.
        """
        # pylint: disable=pointless-string-statement
        """
        Hole punching disabled for now

        seed = SEED_NODE_TESTNET if self.multiplexer.testnet else SEED_NODE
        if not hp and self.multiplexer.ip_address[0] != seed[0]:
            args = (address[0], address[1], b64encode(msgID))
            self.log.debug("did not receive reply from %s:%s for msgID %s, trying hole punching..." % args)
            self.hole_punch(seed, address[0], address[1], "True")
            timeout = reactor.callLater(self._waitTimeout, self._timeout, msgID, address, True)
            self._outstanding[msgID][1] = timeout
        else:
        """
        args = (b64encode(msgID), self._waitTimeout)
        self.log.warning("did not receive reply for msg id %s within %i seconds" % args)
        self._outstanding[msgID][0].callback((False, None))
        del self._outstanding[msgID]
        self.multiplexer[address].shutdown()

    def rpc_hole_punch(self, sender, ip, port, relay="False"):
        """
        A method for handling an incoming HOLE_PUNCH message. Relay the message
        to the correct node if it's not for us. Otherwise sent a datagram to allow
        the other node to punch through our NAT.
        """
        if relay == "True":
            self.hole_punch((ip, int(port)), sender.ip, sender.port)
        else:
            self.log.debug("punching through NAT for %s:%s" % (ip, port))
            self.multiplexer.send_datagram(" ", (ip, int(port)))

    def _get_waitTimeout(self, command):
        if command == GET_IMAGE or command == GET_CONTRACT:
            return 100
        else:
            return self._waitTimeout

    def __getattr__(self, name):
        if name.startswith("_") or name.startswith("rpc_"):
            return object.__getattr__(self, name)

        try:
            return object.__getattr__(self, name)
        except AttributeError:
            pass

        def func(address, *args):
            msgID = sha1(str(random.getrandbits(255))).digest()
            m = Message()
            m.messageID = msgID
            m.sender.MergeFrom(self.proto)
            m.command = Command.Value(name.upper())
            m.protoVer = PROTOCOL_VERSION
            for arg in args:
                m.arguments.append(str(arg))
            m.testnet = self.multiplexer.testnet
            data = m.SerializeToString()
            d = defer.Deferred()
            timeout = reactor.callLater(self._get_waitTimeout(m.command), self._timeout, msgID, address)
            self._outstanding[msgID] = [d, timeout]
            self.multiplexer.send_message(data, address)
            self.log.debug("calling remote function %s on %s (msgid %s)" % (name, address, b64encode(msgID)))
            return d

        return func
Ejemplo n.º 32
0
class RPCProtocol:
    """
    This is an abstract class for processing and sending rpc messages.
    A class that implements the `MessageProcessor` interface probably should
    extend this as it does most of the work of keeping track of messages.
    """
    __metaclass__ = abc.ABCMeta

    def __init__(self, sourceNode, router, waitTimeout=15):
        """
        Args:
            sourceNode: A protobuf `Node` object containing info about this node.
            router: A `RoutingTable` object from dht.routing. Implies a `network.Server` object
                    must be started first.
            waitTimeout: Timeout for whole messages. Note the txrudp layer has a per-packet
                    timeout but invalid responses wont trigger it. The waitTimeout on this
                     layer needs to be long enough to allow whole messages (ex. images) to
                     transmit.

        """
        self.sourceNode = sourceNode
        self.router = router
        self._waitTimeout = waitTimeout
        self._outstanding = {}
        self.log = Logger(system=self)

    def receive_message(self, message, sender, connection, ban_score):
        if message.testnet != self.multiplexer.testnet:
            self.log.warning("received message from %s with incorrect network parameters." %
                             str(connection.dest_addr))
            connection.shutdown()
            return False

        if message.protoVer < PROTOCOL_VERSION:
            self.log.warning("received message from %s with incompatible protocol version." %
                             str(connection.dest_addr))
            connection.shutdown()
            return False

        self.multiplexer.vendors[sender.id] = sender

        msgID = message.messageID
        if message.command == NOT_FOUND:
            data = None
        else:
            data = tuple(message.arguments)
        if msgID in self._outstanding:
            self._acceptResponse(msgID, data, sender)
        elif message.command != NOT_FOUND:
            # ban_score.process_message(message)
            self._acceptRequest(msgID, str(Command.Name(message.command)).lower(), data, sender, connection)

    def _acceptResponse(self, msgID, data, sender):
        if data is not None:
            msgargs = (b64encode(msgID), sender)
            self.log.debug("received response for message id %s from %s" % msgargs)
        else:
            self.log.warning("received 404 error response from %s" % sender)
        d = self._outstanding[msgID][0]
        if self._outstanding[msgID][2].active():
            self._outstanding[msgID][2].cancel()
        d.callback((True, data))
        del self._outstanding[msgID]

    def _acceptRequest(self, msgID, funcname, args, sender, connection):
        self.log.debug("received request from %s, command %s" % (sender, funcname.upper()))
        f = getattr(self, "rpc_%s" % funcname, None)
        if f is None or not callable(f):
            msgargs = (self.__class__.__name__, funcname)
            self.log.error("%s has no callable method rpc_%s; ignoring request" % msgargs)
            return False
        if funcname == "hole_punch":
            f(sender, *args)
        else:
            d = defer.maybeDeferred(f, sender, *args)
            d.addCallback(self._sendResponse, funcname, msgID, sender, connection)
            d.addErrback(self._sendResponse, "bad_request", msgID, sender, connection)

    def _sendResponse(self, response, funcname, msgID, sender, connection):
        self.log.debug("sending response for msg id %s to %s" % (b64encode(msgID), sender))
        m = Message()
        m.messageID = msgID
        m.sender.MergeFrom(self.sourceNode.getProto())
        m.protoVer = PROTOCOL_VERSION
        m.testnet = self.multiplexer.testnet
        if response is None:
            m.command = NOT_FOUND
        else:
            m.command = Command.Value(funcname.upper())
            if not isinstance(response, list):
                response = [response]
            for arg in response:
                m.arguments.append(str(arg))
        m.signature = self.signing_key.sign(m.SerializeToString())[:64]
        connection.send_message(m.SerializeToString())

    def timeout(self, node):
        """
        This timeout is called by the txrudp connection handler. We will run through the
        outstanding messages and callback false on any waiting on this IP address.
        """
        address = (node.ip, node.port)
        for msgID, val in self._outstanding.items():
            if address == val[1]:
                val[0].callback((False, None))
                if self._outstanding[msgID][2].active():
                    self._outstanding[msgID][2].cancel()
                del self._outstanding[msgID]

        self.router.removeContact(node)
        try:
            self.multiplexer[address].shutdown()
        except Exception:
            pass

    def rpc_hole_punch(self, sender, ip, port, relay="False"):
        """
        A method for handling an incoming HOLE_PUNCH message. Relay the message
        to the correct node if it's not for us. Otherwise send a datagram to allow
        the other node to punch through our NAT.
        """
        if relay == "True":
            self.log.debug("relaying hole punch packet to %s:%s for %s:%s" %
                           (ip, port, sender.ip, str(sender.port)))
            self.hole_punch(Node(digest("null"), ip, int(port), nat_type=FULL_CONE), sender.ip, sender.port)
        else:
            self.log.debug("punching through NAT for %s:%s" % (ip, port))
            # pylint: disable=W0612
            for i in range(20):
                self.multiplexer.send_datagram("", (ip, int(port)))

    def __getattr__(self, name):
        if name.startswith("_") or name.startswith("rpc_"):
            return object.__getattr__(self, name)

        try:
            return object.__getattr__(self, name)
        except AttributeError:
            pass

        def func(node, *args):
            msgID = sha1(str(random.getrandbits(255))).digest()

            m = Message()
            m.messageID = msgID
            m.sender.MergeFrom(self.sourceNode.getProto())
            m.command = Command.Value(name.upper())
            m.protoVer = PROTOCOL_VERSION
            for arg in args:
                m.arguments.append(str(arg))
            m.testnet = self.multiplexer.testnet
            m.signature = self.signing_key.sign(m.SerializeToString())[:64]
            data = m.SerializeToString()

            address = (node.ip, node.port)
            relay_addr = None
            if node.nat_type == SYMMETRIC or \
                    (node.nat_type == RESTRICTED and self.sourceNode.nat_type == SYMMETRIC):
                relay_addr = node.relay_node

            d = defer.Deferred()
            if m.command != HOLE_PUNCH:
                timeout = reactor.callLater(self._waitTimeout, self.timeout, node)
                self._outstanding[msgID] = [d, address, timeout]
                self.log.debug("calling remote function %s on %s (msgid %s)" % (name, address, b64encode(msgID)))

            self.multiplexer.send_message(data, address, relay_addr)

            if self.multiplexer[address].state != State.CONNECTED and \
                            node.nat_type == RESTRICTED and \
                            self.sourceNode.nat_type != SYMMETRIC:
                self.hole_punch(Node(digest("null"), node.relay_node[0], node.relay_node[1], nat_type=FULL_CONE),
                                address[0], address[1], "True")
                self.log.debug("sending hole punch message to %s" % address[0] + ":" + str(address[1]))

            return d

        return func
Ejemplo n.º 33
0
class KademliaProtocol(RPCProtocol):
    implements(MessageProcessor)

    def __init__(self, sourceNode, storage, ksize, database, signing_key):
        self.ksize = ksize
        self.router = RoutingTable(self, ksize, sourceNode)
        self.storage = storage
        self.sourceNode = sourceNode
        self.multiplexer = None
        self.db = database
        self.signing_key = signing_key
        self.log = Logger(system=self)
        self.handled_commands = [PING, STUN, STORE, DELETE, FIND_NODE, FIND_VALUE, HOLE_PUNCH, INV, VALUES]
        self.recent_transfers = set()
        RPCProtocol.__init__(self, sourceNode, self.router)

    def connect_multiplexer(self, multiplexer):
        self.multiplexer = multiplexer

    def getRefreshIDs(self):
        """
        Get ids to search for to keep old buckets up to date.
        """
        ids = []
        for bucket in self.router.getLonelyBuckets():
            ids.append(random.randint(*bucket.range))
        return ids

    def rpc_stun(self, sender):
        self.addToRouter(sender)
        return [sender.ip, str(sender.port)]

    def rpc_ping(self, sender):
        self.addToRouter(sender)
        return [self.sourceNode.getProto().SerializeToString()]

    def rpc_store(self, sender, keyword, key, value, ttl):
        self.addToRouter(sender)
        self.log.debug("got a store request from %s, storing value" % str(sender))
        if len(keyword) == 20 and len(key) <= 33 and len(value) <= 2100 and int(ttl) <= 604800:
            self.storage[keyword] = (key, value, int(ttl))
            return ["True"]
        else:
            return ["False"]

    def rpc_delete(self, sender, keyword, key, signature):
        self.addToRouter(sender)
        value = self.storage.getSpecific(keyword, key)
        if value is not None:
            # Try to delete a message from the dht
            if keyword == digest(sender.id):
                try:
                    verify_key = nacl.signing.VerifyKey(sender.pubkey)
                    verify_key.verify(key, signature)
                    self.storage.delete(keyword, key)
                    return ["True"]
                except Exception:
                    return ["False"]
            # Or try to delete a pointer
            else:
                try:
                    node = objects.Node()
                    node.ParseFromString(value)
                    pubkey = node.publicKey
                    try:
                        verify_key = nacl.signing.VerifyKey(pubkey)
                        verify_key.verify(key, signature)
                        self.storage.delete(keyword, key)
                        return ["True"]
                    except Exception:
                        return ["False"]
                except Exception:
                    pass
        return ["False"]

    def rpc_find_node(self, sender, key):
        self.log.debug("finding neighbors of %s in local table" % key.encode('hex'))
        self.addToRouter(sender)
        node = Node(key)
        nodeList = self.router.findNeighbors(node, exclude=sender)
        ret = []
        if self.sourceNode.id == key:
            ret.append(self.sourceNode.getProto().SerializeToString())
        for n in nodeList:
            ret.append(n.getProto().SerializeToString())
        return ret

    def rpc_find_value(self, sender, keyword):
        self.addToRouter(sender)
        ret = ["value"]
        value = self.storage.get(keyword, None)
        if value is None:
            return self.rpc_find_node(sender, keyword)
        ret.extend(value)
        return ret

    def rpc_inv(self, sender, *serlialized_invs):
        self.addToRouter(sender)
        ret = []
        for inv in serlialized_invs:
            try:
                i = objects.Inv()
                i.ParseFromString(inv)
                if self.storage.getSpecific(i.keyword, i.valueKey) is None:
                    ret.append(inv)
            except Exception:
                pass
        return ret

    def rpc_values(self, sender, *serialized_values):
        self.addToRouter(sender)
        for val in serialized_values[:100]:
            try:
                v = objects.Value()
                v.ParseFromString(val)
                self.storage[v.keyword] = (v.valueKey, v.serializedData, int(v.ttl))
            except Exception:
                pass
        return ["True"]

    def callFindNode(self, nodeToAsk, nodeToFind):
        d = self.find_node(nodeToAsk, nodeToFind.id)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callFindValue(self, nodeToAsk, nodeToFind):
        d = self.find_value(nodeToAsk, nodeToFind.id)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callPing(self, nodeToAsk):
        d = self.ping(nodeToAsk)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callStore(self, nodeToAsk, keyword, key, value, ttl):
        d = self.store(nodeToAsk, keyword, key, value, str(int(round(ttl))))
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callDelete(self, nodeToAsk, keyword, key, signature):
        d = self.delete(nodeToAsk, keyword, key, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callInv(self, nodeToAsk, serlialized_inv_list):
        d = self.inv(nodeToAsk, *serlialized_inv_list)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callValues(self, nodeToAsk, serlialized_values_list):
        d = self.values(nodeToAsk, *serlialized_values_list)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def transferKeyValues(self, node):
        """
        Given a new node, send it all the keys/values it should be storing.

        @param node: A new node that just joined (or that we just found out
        about).

        Process:
        For each key in storage, get k closest nodes.  If newnode is closer
        than the furtherst in that list, and the node for this server
        is closer than the closest in that list, then store the key/value
        on the new node (per section 2.5 of the paper)
        """
        def send_values(inv_list):
            values = []
            if inv_list[0]:
                for requested_inv in inv_list[1]:
                    try:
                        i = objects.Inv()
                        i.ParseFromString(requested_inv)
                        value = self.storage.getSpecific(i.keyword, i.valueKey)
                        if value is not None:
                            v = objects.Value()
                            v.keyword = i.keyword
                            v.valueKey = i.valueKey
                            v.serializedData = value
                            v.ttl = int(round(self.storage.get_ttl(i.keyword, i.valueKey)))
                            values.append(v.SerializeToString())
                    except Exception:
                        pass
                if len(values) > 0:
                    self.callValues(node, values)

        inv = []
        for keyword in self.storage.iterkeys():
            keyword = keyword[0].decode("hex")
            keynode = Node(keyword)
            neighbors = self.router.findNeighbors(keynode, exclude=node)
            if len(neighbors) > 0:
                newNodeClose = node.distanceTo(keynode) < neighbors[-1].distanceTo(keynode)
                thisNodeClosest = self.sourceNode.distanceTo(keynode) < neighbors[0].distanceTo(keynode)
            if len(neighbors) == 0 \
                    or (newNodeClose and thisNodeClosest) \
                    or (thisNodeClosest and len(neighbors) < self.ksize):
                # pylint: disable=W0612
                for k, v in self.storage.iteritems(keyword):
                    i = objects.Inv()
                    i.keyword = keyword
                    i.valueKey = k
                    inv.append(i.SerializeToString())
        if len(inv) > 100:
            random.shuffle(inv)
        if len(inv) > 0:
            self.callInv(node, inv[:100]).addCallback(send_values)

    def handleCallResponse(self, result, node):
        """
        If we get a response, add the node to the routing table.  If
        we get no response, make sure it's removed from the routing table.
        """
        if result[0]:
            if self.isNewConnection(node) and node.id not in self.recent_transfers:
                if len(self.recent_transfers) == 10:
                    self.recent_transfers.pop()
                self.recent_transfers.add(node.id)
                self.log.debug("call response from new node, transferring key/values")
                reactor.callLater(1, self.transferKeyValues, node)
            self.router.addContact(node)
        else:
            self.log.debug("no response from %s, removing from router" % node)
            self.router.removeContact(node)
        return result

    def addToRouter(self, node):
        """
        Called by rpc_ functions when a node sends them a request.
        We add the node to our router and transfer our stored values
        if they are new and within our neighborhood.
        """
        if self.isNewConnection(node) and node.id not in self.recent_transfers:
            if len(self.recent_transfers) == 10:
                self.recent_transfers.pop()
            self.recent_transfers.add(node.id)
            self.log.debug("found a new node, transferring key/values")
            reactor.callLater(1, self.transferKeyValues, node)
        self.router.addContact(node)

    def isNewConnection(self, node):
        if (node.ip, node.port) in self.multiplexer:
            return self.multiplexer[(node.ip, node.port)].handler.check_new_connection()
        else:
            return False

    def __iter__(self):
        return iter(self.handled_commands)
Ejemplo n.º 34
0
class MarketProtocol(RPCProtocol):
    implements(MessageProcessor)

    def __init__(self, node, router, signing_key, database):
        self.router = router
        self.node = node
        RPCProtocol.__init__(self, node, router)
        self.log = Logger(system=self)
        self.multiplexer = None
        self.db = database
        self.signing_key = signing_key
        self.listeners = []
        self.handled_commands = [
            GET_CONTRACT, GET_IMAGE, GET_PROFILE, GET_LISTINGS,
            GET_USER_METADATA, GET_CONTRACT_METADATA, FOLLOW, UNFOLLOW,
            GET_FOLLOWERS, GET_FOLLOWING, BROADCAST, MESSAGE, ORDER,
            ORDER_CONFIRMATION, COMPLETE_ORDER, DISPUTE_OPEN, DISPUTE_CLOSE,
            GET_RATINGS, REFUND
        ]

    def connect_multiplexer(self, multiplexer):
        self.multiplexer = multiplexer

    def add_listener(self, listener):
        self.listeners.append(listener)

    def rpc_get_contract(self, sender, contract_hash):
        self.log.info("serving contract %s to %s" %
                      (contract_hash.encode('hex'), sender))
        self.router.addContact(sender)
        try:
            with open(self.db.filemap.get_file(contract_hash.encode("hex")),
                      "r") as filename:
                contract = filename.read()
            return [contract]
        except Exception:
            self.log.warning("could not find contract %s" %
                             contract_hash.encode('hex'))
            return None

    def rpc_get_image(self, sender, image_hash):
        self.router.addContact(sender)
        try:
            if len(image_hash) != 20:
                self.log.warning("Image hash is not 20 characters %s" %
                                 image_hash)
                raise Exception("Invalid image hash")
            self.log.info("serving image %s to %s" %
                          (image_hash.encode('hex'), sender))
            with open(self.db.filemap.get_file(image_hash.encode("hex")),
                      "rb") as filename:
                image = filename.read()
            return [image]
        except Exception:
            self.log.warning("could not find image %s" %
                             image_hash[:20].encode('hex'))
            return None

    def rpc_get_profile(self, sender):
        self.log.info("serving profile to %s" % sender)
        self.router.addContact(sender)
        try:
            proto = Profile(self.db).get(True)
            return [proto, self.signing_key.sign(proto)[:64]]
        except Exception:
            self.log.error("unable to load the profile")
            return None

    def rpc_get_user_metadata(self, sender):
        self.log.info("serving user metadata to %s" % sender)
        self.router.addContact(sender)
        try:
            proto = Profile(self.db).get(False)
            m = Metadata()
            m.name = proto.name
            m.handle = proto.handle
            m.short_description = proto.short_description
            m.avatar_hash = proto.avatar_hash
            m.nsfw = proto.nsfw
            return [
                m.SerializeToString(),
                self.signing_key.sign(m.SerializeToString())[:64]
            ]
        except Exception:
            self.log.error("unable to load profile metadata")
            return None

    def rpc_get_listings(self, sender):
        self.log.info("serving store listings to %s" % sender)
        self.router.addContact(sender)
        try:
            p = Profile(self.db).get()
            l = Listings()
            l.ParseFromString(self.db.listings.get_proto())
            l.handle = p.handle
            l.avatar_hash = p.avatar_hash
            return [
                l.SerializeToString(),
                self.signing_key.sign(l.SerializeToString())[:64]
            ]
        except Exception:
            self.log.warning("could not find any listings in the database")
            return None

    def rpc_get_contract_metadata(self, sender, contract_hash):
        self.log.info("serving metadata for contract %s to %s" %
                      (contract_hash.encode("hex"), sender))
        self.router.addContact(sender)
        try:
            proto = self.db.listings.get_proto()
            p = Profile(self.db).get()
            l = Listings()
            l.ParseFromString(proto)
            for listing in l.listing:
                if listing.contract_hash == contract_hash:
                    listing.avatar_hash = p.avatar_hash
                    listing.handle = p.handle
                    ser = listing.SerializeToString()
            return [ser, self.signing_key.sign(ser)[:64]]
        except Exception:
            self.log.warning("could not find metadata for contract %s" %
                             contract_hash.encode("hex"))
            return None

    def rpc_follow(self, sender, proto, signature):
        self.log.info("received follow request from %s" % sender)
        self.router.addContact(sender)
        try:
            verify_key = nacl.signing.VerifyKey(sender.pubkey)
            verify_key.verify(proto, signature)
            f = Followers.Follower()
            f.ParseFromString(proto)
            if f.guid != sender.id:
                raise Exception('GUID does not match sending node')
            if f.following != self.node.id:
                raise Exception('Following wrong node')
            f.signature = signature
            self.db.follow.set_follower(f)
            proto = Profile(self.db).get(False)
            m = Metadata()
            m.name = proto.name
            m.handle = proto.handle
            m.avatar_hash = proto.avatar_hash
            m.short_description = proto.short_description
            m.nsfw = proto.nsfw
            for listener in self.listeners:
                try:
                    verifyObject(NotificationListener, listener)
                    listener.notify(sender.id, f.metadata.handle, "follow", "",
                                    "", f.metadata.avatar_hash)
                except DoesNotImplement:
                    pass
            return [
                "True",
                m.SerializeToString(),
                self.signing_key.sign(m.SerializeToString())[:64]
            ]
        except Exception:
            self.log.warning("failed to validate follower")
            return ["False"]

    def rpc_unfollow(self, sender, signature):
        self.log.info("received unfollow request from %s" % sender)
        self.router.addContact(sender)
        try:
            verify_key = nacl.signing.VerifyKey(sender.pubkey)
            verify_key.verify("unfollow:" + self.node.id, signature)
            f = self.db.follow
            f.delete_follower(sender.id)
            return ["True"]
        except Exception:
            self.log.warning(
                "failed to validate signature on unfollow request")
            return ["False"]

    def rpc_get_followers(self, sender):
        self.log.info("serving followers list to %s" % sender)
        self.router.addContact(sender)
        ser = self.db.follow.get_followers()
        if ser is None:
            return None
        else:
            return [ser, self.signing_key.sign(ser)[:64]]

    def rpc_get_following(self, sender):
        self.log.info("serving following list to %s" % sender)
        self.router.addContact(sender)
        ser = self.db.follow.get_following()
        if ser is None:
            return None
        else:
            return [ser, self.signing_key.sign(ser)[:64]]

    def rpc_broadcast(self, sender, message, signature):
        if len(message) <= 140 and self.db.follow.is_following(sender.id):
            try:
                verify_key = nacl.signing.VerifyKey(sender.pubkey)
                verify_key.verify(message, signature)
            except Exception:
                self.log.warning("received invalid broadcast from %s" % sender)
                return ["False"]
            self.log.info("received a broadcast from %s" % sender)
            self.router.addContact(sender)
            for listener in self.listeners:
                try:
                    verifyObject(BroadcastListener, listener)
                    listener.notify(sender.id, message)
                except DoesNotImplement:
                    pass
            return ["True"]
        else:
            return ["False"]

    def rpc_message(self, sender, pubkey, encrypted):
        try:
            box = Box(self.signing_key.to_curve25519_private_key(),
                      PublicKey(pubkey))
            plaintext = box.decrypt(encrypted)
            p = PlaintextMessage()
            p.ParseFromString(plaintext)
            signature = p.signature
            p.ClearField("signature")
            verify_key = nacl.signing.VerifyKey(p.pubkey)
            verify_key.verify(p.SerializeToString(), signature)
            h = nacl.hash.sha512(p.pubkey)
            pow_hash = h[40:]
            if int(pow_hash[:6], 16) >= 50 or p.sender_guid.encode(
                    "hex") != h[:40] or p.sender_guid != sender.id:
                raise Exception('Invalid guid')
            self.log.info("received a message from %s" % sender)
            self.router.addContact(sender)
            for listener in self.listeners:
                try:
                    verifyObject(MessageListener, listener)
                    listener.notify(p, signature)
                except DoesNotImplement:
                    pass
            return ["True"]
        except Exception:
            self.log.warning("received invalid message from %s" % sender)
            return ["False"]

    def rpc_order(self, sender, pubkey, encrypted):
        try:
            box = Box(self.signing_key.to_curve25519_private_key(),
                      PublicKey(pubkey))
            order = box.decrypt(encrypted)
            c = Contract(self.db,
                         contract=json.loads(order,
                                             object_pairs_hook=OrderedDict),
                         testnet=self.multiplexer.testnet)
            if c.verify(sender.pubkey):
                self.router.addContact(sender)
                self.log.info(
                    "received an order from %s, waiting for payment..." %
                    sender)
                payment_address = c.contract["buyer_order"]["order"][
                    "payment"]["address"]
                chaincode = c.contract["buyer_order"]["order"]["payment"][
                    "chaincode"]
                masterkey_b = c.contract["buyer_order"]["order"]["id"][
                    "pubkeys"]["bitcoin"]
                buyer_key = derive_childkey(masterkey_b, chaincode)
                amount = c.contract["buyer_order"]["order"]["payment"][
                    "amount"]
                listing_hash = c.contract["vendor_offer"]["listing"][
                    "contract_id"]
                signature = self.signing_key.sign(
                    str(payment_address) + str(amount) + str(listing_hash) +
                    str(buyer_key))[:64]
                c.await_funding(self.get_notification_listener(),
                                self.multiplexer.blockchain, signature, False)
                return [signature]
            else:
                self.log.warning("received invalid order from %s" % sender)
                return ["False"]
        except Exception:
            self.log.error("unable to decrypt order from %s" % sender)
            return ["False"]

    def rpc_order_confirmation(self, sender, pubkey, encrypted):
        try:
            box = Box(self.signing_key.to_curve25519_private_key(),
                      PublicKey(pubkey))
            order = box.decrypt(encrypted)
            c = Contract(self.db,
                         contract=json.loads(order,
                                             object_pairs_hook=OrderedDict),
                         testnet=self.multiplexer.testnet)
            contract_id = c.accept_order_confirmation(
                self.get_notification_listener())
            if contract_id:
                self.router.addContact(sender)
                self.log.info("received confirmation for order %s" %
                              contract_id)
                return ["True"]
            else:
                self.log.warning(
                    "received invalid order confirmation from %s" % sender)
                return ["False"]
        except Exception:
            self.log.error("unable to decrypt order confirmation from %s" %
                           sender)
            return ["False"]

    def rpc_complete_order(self, sender, pubkey, encrypted):
        try:
            box = Box(self.signing_key.to_curve25519_private_key(),
                      PublicKey(pubkey))
            order = box.decrypt(encrypted)
            c = Contract(self.db,
                         contract=json.loads(order,
                                             object_pairs_hook=OrderedDict),
                         testnet=self.multiplexer.testnet)

            contract_id = c.accept_receipt(self.get_notification_listener(),
                                           self.multiplexer.blockchain)
            self.router.addContact(sender)
            self.log.info("received receipt for order %s" % contract_id)
            return ["True"]
        except Exception:
            import traceback
            traceback.print_exc()
            self.log.error("unable to parse receipt from %s" % sender)
            return ["False"]

    def rpc_dispute_open(self, sender, pubkey, encrypted):
        try:
            box = Box(self.signing_key.to_curve25519_private_key(),
                      PublicKey(pubkey))
            order = box.decrypt(encrypted)
            contract = json.loads(order, object_pairs_hook=OrderedDict)
            process_dispute(contract, self.db, self.get_message_listener(),
                            self.get_notification_listener(),
                            self.multiplexer.testnet)
            self.router.addContact(sender)
            self.log.info("Contract dispute opened by %s" % sender)
            return ["True"]
        except Exception:
            self.log.error("unable to parse disputed contract from %s" %
                           sender)
            return ["False"]

    def rpc_dispute_close(self, sender, pubkey, encrypted):
        try:
            box = Box(self.signing_key.to_curve25519_private_key(),
                      PublicKey(pubkey))
            res = box.decrypt(encrypted)
            resolution_json = json.loads(res, object_pairs_hook=OrderedDict)
            close_dispute(resolution_json, self.db,
                          self.get_message_listener(),
                          self.get_notification_listener(),
                          self.multiplexer.testnet)
            self.router.addContact(sender)
            self.log.info("Contract dispute closed by %s" % sender)
            return ["True"]
        except Exception:
            self.log.error("unable to parse disputed close message from %s" %
                           sender)
            return ["False"]

    def rpc_get_ratings(self, sender, listing_hash=None):
        a = "ALL" if listing_hash is None else listing_hash.encode("hex")
        self.log.info("serving ratings for contract %s to %s" % (a, sender))
        self.router.addContact(sender)
        try:
            ratings = []
            if listing_hash:
                for rating in self.db.ratings.get_listing_ratings(
                        listing_hash.encode("hex")):
                    ratings.append(
                        json.loads(rating[0], object_pairs_hook=OrderedDict))
            else:
                for rating in self.db.ratings.get_all_ratings():
                    ratings.append(
                        json.loads(rating[0], object_pairs_hook=OrderedDict))
            ret = json.dumps(ratings).encode("zlib")
            return [str(ret), self.signing_key.sign(ret)[:64]]
        except Exception:
            self.log.warning("could not load ratings for contract %s" % a)
            return None

    def rpc_refund(self, sender, pubkey, encrypted):
        try:
            box = Box(self.signing_key.to_curve25519_private_key(),
                      PublicKey(pubkey))
            refund = box.decrypt(encrypted)
            refund_json = json.loads(refund, object_pairs_hook=OrderedDict)
            c = Contract(self.db,
                         hash_value=unhexlify(
                             refund_json["refund"]["order_id"]),
                         testnet=self.multiplexer.testnet)
            c.process_refund(refund_json, self.multiplexer.blockchain,
                             self.get_notification_listener())
            self.router.addContact(sender)
            self.log.info("order %s refunded by vendor" %
                          refund_json["refund"]["order_id"])
            return ["True"]
        except Exception:
            self.log.error("unable to parse refund message from %s" % sender)
            return ["False"]

    def callGetContract(self, nodeToAsk, contract_hash):
        d = self.get_contract(nodeToAsk, contract_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetImage(self, nodeToAsk, image_hash):
        d = self.get_image(nodeToAsk, image_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetProfile(self, nodeToAsk):
        d = self.get_profile(nodeToAsk)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetUserMetadata(self, nodeToAsk):
        d = self.get_user_metadata(nodeToAsk)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetListings(self, nodeToAsk):
        d = self.get_listings(nodeToAsk)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetContractMetadata(self, nodeToAsk, contract_hash):
        d = self.get_contract_metadata(nodeToAsk, contract_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callFollow(self, nodeToAsk, proto, signature):
        d = self.follow(nodeToAsk, proto, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callUnfollow(self, nodeToAsk, signature):
        d = self.unfollow(nodeToAsk, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetFollowers(self, nodeToAsk):
        d = self.get_followers(nodeToAsk)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetFollowing(self, nodeToAsk):
        d = self.get_following(nodeToAsk)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callBroadcast(self, nodeToAsk, message, signature):
        d = self.broadcast(nodeToAsk, message, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callMessage(self, nodeToAsk, ehemeral_pubkey, ciphertext):
        d = self.message(nodeToAsk, ehemeral_pubkey, ciphertext)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callOrder(self, nodeToAsk, ephem_pubkey, encrypted_contract):
        d = self.order(nodeToAsk, ephem_pubkey, encrypted_contract)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callOrderConfirmation(self, nodeToAsk, ephem_pubkey,
                              encrypted_contract):
        d = self.order_confirmation(nodeToAsk, ephem_pubkey,
                                    encrypted_contract)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callCompleteOrder(self, nodeToAsk, ephem_pubkey, encrypted_contract):
        d = self.complete_order(nodeToAsk, ephem_pubkey, encrypted_contract)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callDisputeOpen(self, nodeToAsk, ephem_pubkey, encrypted_contract):
        d = self.dispute_open(nodeToAsk, ephem_pubkey, encrypted_contract)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callDisputeClose(self, nodeToAsk, ephem_pubkey, encrypted_contract):
        d = self.dispute_close(nodeToAsk, ephem_pubkey, encrypted_contract)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetRatings(self, nodeToAsk, listing_hash=None):
        if listing_hash is None:
            d = self.get_ratings(nodeToAsk)
        else:
            d = self.get_ratings(nodeToAsk, listing_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callRefund(self, nodeToAsk, order_id, refund):
        d = self.refund(nodeToAsk, order_id, refund)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def handleCallResponse(self, result, node):
        """
        If we get a response, add the node to the routing table.  If
        we get no response, make sure it's removed from the routing table.
        """
        if result[0]:
            self.router.addContact(node)
        else:
            self.log.debug("no response from %s, removing from router" % node)
            self.router.removeContact(node)
        return result

    def get_notification_listener(self):
        for listener in self.listeners:
            try:
                verifyObject(NotificationListener, listener)
                return listener
            except DoesNotImplement:
                pass

    def get_message_listener(self):
        for listener in self.listeners:
            try:
                verifyObject(MessageListener, listener)
                return listener
            except DoesNotImplement:
                pass

    def __iter__(self):
        return iter(self.handled_commands)
Ejemplo n.º 35
0
class Server(object):
    """
    High level view of a node instance.  This is the object that should be created
    to start listening as an active node on the network.
    """

    def __init__(self, node, db, signing_key, ksize=20, alpha=3, storage=None):
        """
        Create a server instance.  This will start listening on the given port.

        Args:
            node: The node instance for this peer. It must contain (at minimum) an ID,
                public key, ip address, and port.
            ksize (int): The k parameter from the paper
            alpha (int): The alpha parameter from the paper
            storage: An instance that implements :interface:`~dht.storage.IStorage`
        """
        self.ksize = ksize
        self.alpha = alpha
        self.log = Logger(system=self)
        self.storage = storage or ForgetfulStorage()
        self.node = node
        self.protocol = KademliaProtocol(self.node, self.storage, ksize, db, signing_key)
        self.refreshLoop = LoopingCall(self.refreshTable)
        reactor.callLater(1800, self.refreshLoop.start, 3600)

    def listen(self, port):
        """
        Start listening on the given port.

        This is the same as calling::

            reactor.listenUDP(port, server.protocol)
        """
        return reactor.listenUDP(port, self.protocol)

    def refreshTable(self):
        """
        Refresh buckets that haven't had any lookups in the last hour
        (per section 2.3 of the paper).
        """
        ds = []
        refresh_ids = self.protocol.getRefreshIDs()
        refresh_ids.append(digest(random.getrandbits(255)))  # random node so we get more diversity
        for rid in refresh_ids:
            node = Node(rid)
            nearest = self.protocol.router.findNeighbors(node, self.alpha)
            spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
            ds.append(spider.find())

        def republishKeys(_):
            self.log.debug("Republishing key/values...")
            neighbors = self.protocol.router.findNeighbors(self.node, exclude=self.node)
            for node in neighbors:
                self.protocol.transferKeyValues(node)

        return defer.gatherResults(ds).addCallback(republishKeys)

    def querySeed(self, list_seed_pubkey):
        """
        Query an HTTP seed and return a `list` if (ip, port) `tuple` pairs.

        Args:
            Receives a list of one or more tuples Example [(seed, pubkey)]
            seed: A `string` consisting of "ip:port" or "hostname:port"
            pubkey: The hex encoded public key to verify the signature on the response
        """

        nodes = []
        if not list_seed_pubkey:
            self.log.error('failed to query seed {0} from ob.cfg'.format(list_seed_pubkey))
            return nodes
        else:
            for sp in list_seed_pubkey:
                seed, pubkey = sp
                try:
                    self.log.info("querying %s for peers" % seed)
                    c = httplib.HTTPConnection(seed)
                    c.request("GET", "/")
                    response = c.getresponse()
                    self.log.debug("Http response from %s: %s, %s" % (seed, response.status, response.reason))
                    data = response.read()
                    reread_data = data.decode("zlib")
                    proto = peers.PeerSeeds()
                    proto.ParseFromString(reread_data)
                    for peer in proto.serializedNode:
                        n = objects.Node()
                        n.ParseFromString(peer)
                        tup = (str(n.nodeAddress.ip), n.nodeAddress.port)
                        nodes.append(tup)
                    verify_key = nacl.signing.VerifyKey(pubkey, encoder=nacl.encoding.HexEncoder)
                    verify_key.verify("".join(proto.serializedNode), proto.signature)
                    self.log.info("%s returned %s addresses" % (seed, len(nodes)))
                except Exception, e:
                    self.log.error("failed to query seed: %s" % str(e))
            return nodes
Ejemplo n.º 36
0
class RPCProtocol:
    """
    This is an abstract class for processing and sending rpc messages.
    A class that implements the `MessageProcessor` interface probably should
    extend this as it does most of the work of keeping track of messages.
    """
    __metaclass__ = abc.ABCMeta

    def __init__(self, sourceNode, router, waitTimeout=2.5):
        """
        Args:
            proto: A protobuf `Node` object containing info about this node.
            router: A `RoutingTable` object from dht.routing. Implies a `network.Server` object
                    must be started first.
            waitTimeout: Consider it a connetion failure if no response
                    within this time window.
            noisy: Whether or not to log the output for this class.
            testnet: The network parameters to use.

        """
        self.sourceNode = sourceNode
        self.router = router
        self._waitTimeout = waitTimeout
        self._outstanding = {}
        self.log = Logger(system=self)

    def receive_message(self, datagram, connection, ban_score):
        m = Message()
        try:
            m.ParseFromString(datagram)
            sender = node.Node(m.sender.guid, m.sender.ip, m.sender.port, m.sender.signedPublicKey, m.sender.vendor)
        except Exception:
            # If message isn't formatted property then ignore
            self.log.warning("received unknown message from %s, ignoring" % str(connection.dest_addr))
            return False

        if m.testnet != self.multiplexer.testnet:
            self.log.warning("received message from %s with incorrect network parameters." %
                             str(connection.dest_addr))
            connection.shutdown()
            return False

        if m.protoVer < PROTOCOL_VERSION:
            self.log.warning("received message from %s with incompatible protocol version." %
                             str(connection.dest_addr))
            connection.shutdown()
            return False

        # Check that the GUID is valid. If not, ignore
        if self.router.isNewNode(sender):
            try:
                pubkey = m.sender.signedPublicKey[len(m.sender.signedPublicKey) - 32:]
                verify_key = nacl.signing.VerifyKey(pubkey)
                verify_key.verify(m.sender.signedPublicKey)
                h = nacl.hash.sha512(m.sender.signedPublicKey)
                pow_hash = h[64:128]
                if int(pow_hash[:6], 16) >= 50 or hexlify(m.sender.guid) != h[:40]:
                    raise Exception('Invalid GUID')

            except Exception:
                self.log.warning("received message from sender with invalid GUID, ignoring")
                connection.shutdown()
                return False

        if m.sender.vendor:
            self.db.VendorStore().save_vendor(m.sender.guid.encode("hex"), m.sender.ip,
                                              m.sender.port, m.sender.signedPublicKey)

        msgID = m.messageID
        if m.command == NOT_FOUND:
            data = None
        else:
            data = tuple(m.arguments)
        if msgID in self._outstanding:
            self._acceptResponse(msgID, data, sender)
        elif m.command != NOT_FOUND:
            #ban_score.process_message(m)
            self._acceptRequest(msgID, str(Command.Name(m.command)).lower(), data, sender, connection)

    def _acceptResponse(self, msgID, data, sender):
        if data is not None:
            msgargs = (b64encode(msgID), sender)
            self.log.debug("received response for message id %s from %s" % msgargs)
        else:
            self.log.warning("received 404 error response from %s" % sender)
        d = self._outstanding[msgID][0]
        if self._outstanding[msgID][2].active():
            self._outstanding[msgID][2].cancel()
        d.callback((True, data))
        del self._outstanding[msgID]

    def _acceptRequest(self, msgID, funcname, args, sender, connection):
        self.log.debug("received request from %s, command %s" % (sender, funcname.upper()))
        f = getattr(self, "rpc_%s" % funcname, None)
        if f is None or not callable(f):
            msgargs = (self.__class__.__name__, funcname)
            self.log.error("%s has no callable method rpc_%s; ignoring request" % msgargs)
            return False
        if funcname == "hole_punch":
            f(sender, *args)
        else:
            d = defer.maybeDeferred(f, sender, *args)
            d.addCallback(self._sendResponse, funcname, msgID, sender, connection)
            d.addErrback(self._sendResponse, "bad_request", msgID, sender, connection)

    def _sendResponse(self, response, funcname, msgID, sender, connection):
        self.log.debug("sending response for msg id %s to %s" % (b64encode(msgID), sender))
        m = Message()
        m.messageID = msgID
        m.sender.MergeFrom(self.sourceNode.getProto())
        m.protoVer = PROTOCOL_VERSION
        m.testnet = self.multiplexer.testnet
        if response is None:
            m.command = NOT_FOUND
        else:
            m.command = Command.Value(funcname.upper())
            if not isinstance(response, list):
                response = [response]
            for arg in response:
                m.arguments.append(str(arg))
        connection.send_message(m.SerializeToString())

    def timeout(self, address):
        """
        This timeout is called by the txrudp connection handler. We will run through the
        outstanding messages and callback false on any waiting on this IP address.
        """
        for msgID, val in self._outstanding.items():
            if address == val[1]:
                val[0].callback((False, None))
                del self._outstanding[msgID]
        try:
            node_to_remove = self.multiplexer[address].handler.node
            if node_to_remove is not None:
                self.router.removeContact(node_to_remove)
            self.multiplexer[address].shutdown()
        except Exception:
            pass

    def rpc_hole_punch(self, sender, ip, port, relay="False"):
        """
        A method for handling an incoming HOLE_PUNCH message. Relay the message
        to the correct node if it's not for us. Otherwise send a datagram to allow
        the other node to punch through our NAT.
        """
        if relay == "True":
            self.hole_punch((ip, int(port)), sender.ip, sender.port)
        else:
            self.log.debug("punching through NAT for %s:%s" % (ip, port))
            # pylint: disable=W0612
            for i in range(20):
                self.multiplexer.send_datagram("", (ip, int(port)))

    def __getattr__(self, name):
        if name.startswith("_") or name.startswith("rpc_"):
            return object.__getattr__(self, name)

        try:
            return object.__getattr__(self, name)
        except AttributeError:
            pass

        def func(address, *args):
            msgID = sha1(str(random.getrandbits(255))).digest()
            d = defer.Deferred()
            if name != "hole_punch":
                seed = SEED_NODE_TESTNET if self.multiplexer.testnet else SEED_NODE
                if address in self.multiplexer and self.multiplexer[address].state == State.CONNECTED:
                    timeout = timeout = reactor.callLater(self._waitTimeout, self.timeout, address)
                else:
                    timeout = reactor.callLater(self._waitTimeout, self.hole_punch, seed,
                                                address[0], address[1], "True", msgID)
                self._outstanding[msgID] = [d, address, timeout]
                self.log.debug("calling remote function %s on %s (msgid %s)" % (name, address, b64encode(msgID)))
            elif args[3] in self._outstanding:
                prev_msgID = args[3]
                args = args[:3]
                deferred, addr, hp = self._outstanding[prev_msgID]  # pylint: disable=W0612
                timeout = reactor.callLater(3, self.timeout, addr)
                self._outstanding[prev_msgID] = [deferred, addr, timeout]
                self.log.debug("sending hole punch message to %s" % args[0] + ":" + str(args[1]))

            m = Message()
            m.messageID = msgID
            m.sender.MergeFrom(self.sourceNode.getProto())
            m.command = Command.Value(name.upper())
            m.protoVer = PROTOCOL_VERSION
            for arg in args:
                m.arguments.append(str(arg))
            m.testnet = self.multiplexer.testnet
            data = m.SerializeToString()

            self.multiplexer.send_message(data, address)
            return d

        return func
Ejemplo n.º 37
0
class MarketProtocol(RPCProtocol):
    implements(MessageProcessor)

    def __init__(self, node, router, signing_key, database):
        self.router = router
        self.node = node
        RPCProtocol.__init__(self, node, router)
        self.log = Logger(system=self)
        self.multiplexer = None
        self.db = database
        self.signing_key = signing_key
        self.listeners = []
        self.handled_commands = [GET_CONTRACT, GET_IMAGE, GET_PROFILE, GET_LISTINGS, GET_USER_METADATA,
                                 GET_CONTRACT_METADATA, FOLLOW, UNFOLLOW, GET_FOLLOWERS, GET_FOLLOWING,
                                 BROADCAST, MESSAGE, ORDER, ORDER_CONFIRMATION, COMPLETE_ORDER, DISPUTE_OPEN,
                                 DISPUTE_CLOSE]

    def connect_multiplexer(self, multiplexer):
        self.multiplexer = multiplexer

    def add_listener(self, listener):
        self.listeners.append(listener)

    def rpc_get_contract(self, sender, contract_hash):
        self.log.info("serving contract %s to %s" % (contract_hash.encode('hex'), sender))
        self.router.addContact(sender)
        try:
            with open(self.db.HashMap().get_file(contract_hash.encode("hex")), "r") as filename:
                contract = filename.read()
            return [contract]
        except Exception:
            self.log.warning("could not find contract %s" % contract_hash.encode('hex'))
            return None

    def rpc_get_image(self, sender, image_hash):
        self.router.addContact(sender)
        try:
            if len(image_hash) != 20:
                raise Exception("Invalid image hash")
            self.log.info("serving image %s to %s" % (image_hash.encode('hex'), sender))
            with open(self.db.HashMap().get_file(image_hash.encode("hex")), "rb") as filename:
                image = filename.read()
            return [image]
        except Exception:
            self.log.warning("could not find image %s" % image_hash[:20].encode('hex'))
            return None

    def rpc_get_profile(self, sender):
        self.log.info("serving profile to %s" % sender)
        self.router.addContact(sender)
        try:
            proto = Profile(self.db).get(True)
            return [proto, self.signing_key.sign(proto)[:64]]
        except Exception:
            self.log.error("unable to load the profile")
            return None

    def rpc_get_user_metadata(self, sender):
        self.log.info("serving user metadata to %s" % sender)
        self.router.addContact(sender)
        try:
            proto = Profile(self.db).get(False)
            m = Metadata()
            m.name = proto.name
            m.handle = proto.handle
            m.short_description = proto.short_description
            m.avatar_hash = proto.avatar_hash
            m.nsfw = proto.nsfw
            return [m.SerializeToString(), self.signing_key.sign(m.SerializeToString())[:64]]
        except Exception:
            self.log.error("unable to load profile metadata")
            return None

    def rpc_get_listings(self, sender):
        self.log.info("serving store listings to %s" % sender)
        self.router.addContact(sender)
        try:
            p = Profile(self.db).get()
            l = Listings()
            l.ParseFromString(self.db.ListingsStore().get_proto())
            l.handle = p.handle
            l.avatar_hash = p.avatar_hash
            return [l.SerializeToString(), self.signing_key.sign(l.SerializeToString())[:64]]
        except Exception:
            self.log.warning("could not find any listings in the database")
            return None

    def rpc_get_contract_metadata(self, sender, contract_hash):
        self.log.info("serving metadata for contract %s to %s" % (contract_hash.encode("hex"), sender))
        self.router.addContact(sender)
        try:
            proto = self.db.ListingsStore().get_proto()
            p = Profile(self.db).get()
            l = Listings()
            l.ParseFromString(proto)
            for listing in l.listing:
                if listing.contract_hash == contract_hash:
                    listing.avatar_hash = p.avatar_hash
                    listing.handle = p.handle
                    ser = listing.SerializeToString()
            return [ser, self.signing_key.sign(ser)[:64]]
        except Exception:
            self.log.warning("could not find metadata for contract %s" % contract_hash.encode("hex"))
            return None

    def rpc_follow(self, sender, proto, signature):
        self.log.info("received follow request from %s" % sender)
        self.router.addContact(sender)
        try:
            verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
            verify_key.verify(proto, signature)
            f = Followers.Follower()
            f.ParseFromString(proto)
            if f.guid != sender.id:
                raise Exception('GUID does not match sending node')
            if f.following != self.node.id:
                raise Exception('Following wrong node')
            f.signature = signature
            self.db.FollowData().set_follower(f)
            proto = Profile(self.db).get(False)
            m = Metadata()
            m.name = proto.name
            m.handle = proto.handle
            m.avatar_hash = proto.avatar_hash
            m.short_description = proto.short_description
            m.nsfw = proto.nsfw
            for listener in self.listeners:
                try:
                    verifyObject(NotificationListener, listener)
                    listener.notify(sender.id, f.metadata.handle, "follow", "", "", f.metadata.avatar_hash)
                except DoesNotImplement:
                    pass
            return ["True", m.SerializeToString(), self.signing_key.sign(m.SerializeToString())[:64]]
        except Exception:
            self.log.warning("failed to validate follower")
            return ["False"]

    def rpc_unfollow(self, sender, signature):
        self.log.info("received unfollow request from %s" % sender)
        self.router.addContact(sender)
        try:
            verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
            verify_key.verify("unfollow:" + self.node.id, signature)
            f = self.db.FollowData()
            f.delete_follower(sender.id)
            return ["True"]
        except Exception:
            self.log.warning("failed to validate signature on unfollow request")
            return ["False"]

    def rpc_get_followers(self, sender):
        self.log.info("serving followers list to %s" % sender)
        self.router.addContact(sender)
        ser = self.db.FollowData().get_followers()
        if ser is None:
            return None
        else:
            return [ser, self.signing_key.sign(ser)[:64]]

    def rpc_get_following(self, sender):
        self.log.info("serving following list to %s" % sender)
        self.router.addContact(sender)
        ser = self.db.FollowData().get_following()
        if ser is None:
            return None
        else:
            return [ser, self.signing_key.sign(ser)[:64]]

    def rpc_broadcast(self, sender, message, signature):
        if len(message) <= 140 and self.db.FollowData().is_following(sender.id):
            try:
                verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
                verify_key.verify(message, signature)
            except Exception:
                self.log.warning("received invalid broadcast from %s" % sender)
                return ["False"]
            self.log.info("received a broadcast from %s" % sender)
            self.router.addContact(sender)
            for listener in self.listeners:
                try:
                    verifyObject(BroadcastListener, listener)
                    listener.notify(sender.id, message)
                except DoesNotImplement:
                    pass
            return ["True"]
        else:
            return ["False"]

    def rpc_message(self, sender, pubkey, encrypted):
        try:
            box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
            plaintext = box.decrypt(encrypted)
            p = PlaintextMessage()
            p.ParseFromString(plaintext)
            signature = p.signature
            p.ClearField("signature")
            verify_key = nacl.signing.VerifyKey(p.signed_pubkey[64:])
            verify_key.verify(p.SerializeToString(), signature)
            h = nacl.hash.sha512(p.signed_pubkey)
            pow_hash = h[64:128]
            if int(pow_hash[:6], 16) >= 50 or p.sender_guid.encode("hex") != h[:40] or p.sender_guid != sender.id:
                raise Exception('Invalid guid')
            self.log.info("received a message from %s" % sender)
            self.router.addContact(sender)
            for listener in self.listeners:
                try:
                    verifyObject(MessageListener, listener)
                    listener.notify(p, signature)
                except DoesNotImplement:
                    pass
            return ["True"]
        except Exception:
            self.log.warning("received invalid message from %s" % sender)
            return ["False"]

    def rpc_order(self, sender, pubkey, encrypted):
        try:
            box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
            order = box.decrypt(encrypted)
            c = Contract(self.db, contract=json.loads(order, object_pairs_hook=OrderedDict),
                         testnet=self.multiplexer.testnet)
            if c.verify(sender.signed_pubkey[64:]):
                self.router.addContact(sender)
                self.log.info("received an order from %s, waiting for payment..." % sender)
                payment_address = c.contract["buyer_order"]["order"]["payment"]["address"]
                chaincode = c.contract["buyer_order"]["order"]["payment"]["chaincode"]
                masterkey_b = c.contract["buyer_order"]["order"]["id"]["pubkeys"]["bitcoin"]
                buyer_key = derive_childkey(masterkey_b, chaincode)
                amount = c.contract["buyer_order"]["order"]["payment"]["amount"]
                listing_hash = c.contract["buyer_order"]["order"]["ref_hash"]
                signature = self.signing_key.sign(
                    str(payment_address) + str(amount) + str(listing_hash) + str(buyer_key))[:64]
                c.await_funding(self.get_notification_listener(), self.multiplexer.blockchain, signature, False)
                return [signature]
            else:
                self.log.warning("received invalid order from %s" % sender)
                return ["False"]
        except Exception:
            self.log.error("unable to decrypt order from %s" % sender)
            return ["False"]

    def rpc_order_confirmation(self, sender, pubkey, encrypted):
        try:
            box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
            order = box.decrypt(encrypted)
            c = Contract(self.db, contract=json.loads(order, object_pairs_hook=OrderedDict),
                         testnet=self.multiplexer.testnet)
            contract_id = c.accept_order_confirmation(self.get_notification_listener())
            if contract_id:
                self.router.addContact(sender)
                self.log.info("received confirmation for order %s" % contract_id)
                return ["True"]
            else:
                self.log.warning("received invalid order confirmation from %s" % sender)
                return ["False"]
        except Exception:
            self.log.error("unable to decrypt order confirmation from %s" % sender)
            return ["False"]

    def rpc_complete_order(self, sender, pubkey, encrypted):
        try:
            box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
            order = box.decrypt(encrypted)
            c = Contract(self.db, contract=json.loads(order, object_pairs_hook=OrderedDict),
                         testnet=self.multiplexer.testnet)

            contract_id = c.accept_receipt(self.get_notification_listener(), self.multiplexer.blockchain)
            self.router.addContact(sender)
            self.log.info("received receipt for order %s" % contract_id)
            return ["True"]
        except Exception:
            self.log.error("unable to parse receipt from %s" % sender)
            return ["False"]

    def rpc_dispute_open(self, sender, pubkey, encrypted):
        try:
            box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
            order = box.decrypt(encrypted)
            contract = json.loads(order, object_pairs_hook=OrderedDict)
            process_dispute(contract, self.db, self.get_message_listener(),
                            self.get_notification_listener(), self.multiplexer.testnet)
            self.router.addContact(sender)
            self.log.info("Contract dispute opened by %s" % sender)
            return ["True"]
        except Exception:
            self.log.error("unable to parse disputed contract from %s" % sender)
            return ["False"]

    def rpc_dispute_close(self, sender, pubkey, encrypted):
        try:
            box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
            order = box.decrypt(encrypted)
            contract = json.loads(order, object_pairs_hook=OrderedDict)
            close_dispute(contract, self.db, self.get_message_listener(),
                          self.get_notification_listener(), self.multiplexer.testnet)
            self.router.addContact(sender)
            self.log.info("Contract dispute closed by %s" % sender)
            return ["True"]
        except Exception:
            self.log.error("unable to parse disputed close message from %s" % sender)
            return ["False"]

    def callGetContract(self, nodeToAsk, contract_hash):
        d = self.get_contract(nodeToAsk, contract_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetImage(self, nodeToAsk, image_hash):
        d = self.get_image(nodeToAsk, image_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetProfile(self, nodeToAsk):
        d = self.get_profile(nodeToAsk)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetUserMetadata(self, nodeToAsk):
        d = self.get_user_metadata(nodeToAsk)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetListings(self, nodeToAsk):
        d = self.get_listings(nodeToAsk)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetContractMetadata(self, nodeToAsk, contract_hash):
        d = self.get_contract_metadata(nodeToAsk, contract_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callFollow(self, nodeToAsk, proto, signature):
        d = self.follow(nodeToAsk, proto, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callUnfollow(self, nodeToAsk, signature):
        d = self.unfollow(nodeToAsk, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetFollowers(self, nodeToAsk):
        d = self.get_followers(nodeToAsk)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetFollowing(self, nodeToAsk):
        d = self.get_following(nodeToAsk)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callBroadcast(self, nodeToAsk, message, signature):
        d = self.broadcast(nodeToAsk, message, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callMessage(self, nodeToAsk, ehemeral_pubkey, ciphertext):
        d = self.message(nodeToAsk, ehemeral_pubkey, ciphertext)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callOrder(self, nodeToAsk, ephem_pubkey, encrypted_contract):
        d = self.order(nodeToAsk, ephem_pubkey, encrypted_contract)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callOrderConfirmation(self, nodeToAsk, ephem_pubkey, encrypted_contract):
        d = self.order_confirmation(nodeToAsk, ephem_pubkey, encrypted_contract)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callCompleteOrder(self, nodeToAsk, ephem_pubkey, encrypted_contract):
        d = self.complete_order(nodeToAsk, ephem_pubkey, encrypted_contract)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callDisputeOpen(self, nodeToAsk, ephem_pubkey, encrypted_contract):
        d = self.dispute_open(nodeToAsk, ephem_pubkey, encrypted_contract)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callDisputeClose(self, nodeToAsk, ephem_pubkey, encrypted_contract):
        d = self.dispute_open(nodeToAsk, ephem_pubkey, encrypted_contract)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def handleCallResponse(self, result, node):
        """
        If we get a response, add the node to the routing table.  If
        we get no response, make sure it's removed from the routing table.
        """
        if result[0]:
            self.router.addContact(node)
        else:
            self.log.debug("no response from %s, removing from router" % node)
            self.router.removeContact(node)
        return result

    def get_notification_listener(self):
        for listener in self.listeners:
            try:
                verifyObject(NotificationListener, listener)
                return listener
            except DoesNotImplement:
                pass
    def get_message_listener(self):
        for listener in self.listeners:
            try:
                verifyObject(MessageListener, listener)
                return listener
            except DoesNotImplement:
                pass

    def __iter__(self):
        return iter(self.handled_commands)
Ejemplo n.º 38
0
class Server(object):
    """
    High level view of a node instance.  This is the object that should be created
    to start listening as an active node on the network.
    """

    def __init__(self, node, db, ksize=20, alpha=3, storage=None):
        """
        Create a server instance.  This will start listening on the given port.

        Args:
            node: The node instance for this peer. It must contain (at minimum) an ID,
                public key, ip address, and port.
            ksize (int): The k parameter from the paper
            alpha (int): The alpha parameter from the paper
            storage: An instance that implements :interface:`~dht.storage.IStorage`
        """
        self.ksize = ksize
        self.alpha = alpha
        self.log = Logger(system=self)
        self.storage = storage or ForgetfulStorage()
        self.node = node
        self.protocol = KademliaProtocol(self.node, self.storage, ksize, db)
        self.refreshLoop = LoopingCall(self.refreshTable).start(3600)

    def listen(self, port):
        """
        Start listening on the given port.

        This is the same as calling::

            reactor.listenUDP(port, server.protocol)
        """
        return reactor.listenUDP(port, self.protocol)

    def refreshTable(self):
        """
        Refresh buckets that haven't had any lookups in the last hour
        (per section 2.3 of the paper).
        """
        ds = []
        for rid in self.protocol.getRefreshIDs():
            node = Node(rid)
            nearest = self.protocol.router.findNeighbors(node, self.alpha)
            spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
            ds.append(spider.find())

        def republishKeys(_):
            ds = []
            # Republish keys older than one hour
            for keyword in self.storage.iterkeys():
                for k, v in self.storage.iteritems(keyword):
                    if self.storage.get_ttl(keyword, k) < 601200:
                        ds.append(self.set(keyword, k, v))

        return defer.gatherResults(ds).addCallback(republishKeys)

    def querySeed(self, seed, pubkey):
        """
        Query an HTTP seed and return a `list` if (ip, port) `tuple` pairs.

        Args:
           seed: A `string` consisting of "ip:port" or "hostname:port"
           pubkey: The hex encoded public key to verify the signature on the response
        """
        try:
            self.log.info("querying %s for peers" % seed)
            nodes = []
            c = httplib.HTTPConnection(seed)
            c.request("GET", "/")
            response = c.getresponse()
            self.log.debug("Http response from %s: %s, %s" % (seed, response.status, response.reason))
            data = response.read()
            reread_data = data.decode("zlib")
            proto = peers.PeerSeeds()
            proto.ParseFromString(reread_data)
            for peer in proto.peer_data:
                p = peers.PeerData()
                p.ParseFromString(peer)
                tup = (str(p.ip_address), p.port)
                nodes.append(tup)
            verify_key = nacl.signing.VerifyKey(pubkey, encoder=nacl.encoding.HexEncoder)
            verify_key.verify("".join(proto.peer_data), proto.signature)
            self.log.info("%s returned %s addresses" % (seed, len(nodes)))
            return nodes
        except Exception, e:
            self.log.error("failed to query seed: %s" % str(e))