예제 #1
0
 def __init__(self, node):
     """
     :param node: node object
     :type node: lndmanage.lib.node.LndNode
     """
     self.node = node
     self.network_info = NetworkAnalysis(self.node)
예제 #2
0
    def __init__(self, node, show_connected=False, show_addresses=False):
        self.node = node
        self.network_analysis = NetworkAnalysis(self.node)

        self.show_connected = show_connected
        self.show_address = show_addresses
        self.network_analysis = NetworkAnalysis(self.node)
예제 #3
0
class RecommendNodes(object):
    """
    A class to recommend nodes to connect to.
    """
    def __init__(self, node, show_connected=False, show_addresses=False):
        self.node = node
        self.show_connected = show_connected
        self.show_address = show_addresses
        self.network_analysis = NetworkAnalysis(self.node)

    def print_flow_analysis(self,
                            out_direction=True,
                            number_of_nodes=20,
                            forwarding_events=200,
                            sort_by='weight'):
        nodes = self.flow_analysis(
            out_direction, last_forwardings_to_analyze=forwarding_events)
        format_string = 'rpk,nchan,cap,cpc,alias'
        self.print_nodes(nodes,
                         number_of_nodes,
                         format_string,
                         sort_by=sort_by)

    def print_good_old(self, number_of_nodes=20, sort_by='tot'):
        nodes = self.good_old()
        format_string = 'rpk,tot,flow,nchan,cap,cpc,alias'
        self.print_nodes(nodes, number_of_nodes, format_string, sort_by)

    def print_external_source(self,
                              source,
                              distributing_nodes,
                              number_of_nodes=20,
                              sort_by='cap'):

        nodes = self.external_source(source, distributing_nodes)
        logger.info(f"Showing nodes from source {source}.")
        format_string = 'rpk,con,nchan,cap,cpc,alias'
        self.print_nodes(nodes,
                         number_of_nodes,
                         format_string,
                         sort_by=sort_by)

    def print_channel_openings(self,
                               from_days_ago=14,
                               number_of_nodes=20,
                               sort_by='open'):

        nodes = self.channel_opening_statistics(from_days_ago)
        format_string = 'rpk,open,opencap,openrel,opencaprel,openavgcap,' \
                        'openmed,openavg,nchan,cap,cpc,age,alias'
        self.print_nodes(nodes,
                         number_of_nodes,
                         format_string,
                         sort_by=sort_by)

    def print_second_neighbors(self, number_of_nodes=20, sort_by='sec'):
        nodes = self.second_neighbors(number_of_nodes)
        nodes = self.add_metadata_and_remove_pruned(nodes)
        format_string = 'rpk,sec,nchan,cap,cpc,alias'
        self.print_nodes(nodes, number_of_nodes, format_string, sort_by)

    def good_old(self):
        """
        Gives back a list of nodes to which we already had a good relationship
        with historic forwardings.

        :return: dict, nodes sorted by total amount forwarded
        """
        forwarding_analyzer = ForwardingAnalyzer(self.node)
        # analyze all historic forwardings
        forwarding_analyzer.initialize_forwarding_data(0, time.time())
        nodes = forwarding_analyzer.get_forwarding_statistics_nodes()
        nodes = self.add_metadata_and_remove_pruned(nodes)
        return nodes

    def flow_analysis(self,
                      out_direction=True,
                      last_forwardings_to_analyze=200):
        """
        Does a flow analysis and suggests nodes which have demand for
        inbound liquidity.

        :param out_direction: bool, if True outward flowing
                              nodes are displayed
        :param last_forwardings_to_analyze: int, number of
                                            forwardings in analysis
        :return: nodes dict with metadata
        """
        forwarding_analyzer = ForwardingAnalyzer(self.node)
        # analyze all historic forwardings
        forwarding_analyzer.initialize_forwarding_data(0, time.time())
        nodes_in, nodes_out = forwarding_analyzer.simple_flow_analysis(
            last_forwardings_to_analyze)
        raw_nodes = nodes_out if out_direction else nodes_in
        nodes = self.add_metadata_and_remove_pruned(raw_nodes)
        return nodes

    def external_source(self,
                        source,
                        distributing_nodes=False,
                        exclude_hubs=True):
        """
        Parses a file/url (source) for node public keys and displays
        additional info.

        If distributing_nodes is set to True, nodes which are well connected to
        the nodes in the external source are displayed.

        Big hubs can be excluded by exclude_hubs.
        :param source: str
        :param distributing_nodes: bool
        :param exclude_hubs: bool
        """
        source_found = False
        text = None

        # try first to find a web source behind source
        try:
            response = urllib.request.urlopen(source)
            data = response.read()
            text = data.decode('utf-8')
            logger.info("Found a web source for the node list.")
            source_found = True

        except HTTPError as e:
            logger.error("Something is not OK with your url.")
            logger.debug(e)
            return

        except ValueError as e:
            logger.warning("Entered source was not a url.")
            logger.warning(e)

        # if it was not a web source, try if it is a file
        if not source_found:
            try:
                with open(source, 'r') as file:
                    text = file.read()
                logger.info("Found a file source for the node list.")
                source_found = True
            except FileNotFoundError as e:
                logger.exception(e)

        if not source_found:
            raise FileNotFoundError(f"Didn't find anything under the source "
                                    f"you provided: {source}")

        # match the node public keys
        pattern = re.compile("[a-z0-9]{66}")
        nodes = re.finditer(pattern, text)

        # create an empty dict for nodes, connections is the number of
        # connections to the target nodes
        nodes = {n.group(): {'connections': 0} for n in nodes}

        # instead of analyzing the nodes extracted from the data source,
        # we can look at their neighbors these neighbors can be seen as nodes,
        # which distribute our capital to the target nodes
        if distributing_nodes:
            logger.info("Determining nodes that are well connected to the "
                        "nodes from the node file.")

            # it makes sense to exclude large hubs in the search,
            # because everybody is already connected to them
            if exclude_hubs:  # we exclude hubs in the neighbor analysis
                nodes_list = [
                    n for n in nodes.keys()
                    if self.node.network.number_channels(n) <
                    settings.NUMBER_CHANNELS_DEFINING_HUB
                ]
            else:
                nodes_list = nodes.keys()

            # we also want to avoid to count the nodes we are already
            # connected to with blacklist_nodes
            blacklist_nodes = list(
                self.node.network.neighbors(self.node.pub_key))

            node_neighbors_list = \
                self.node.network.nodes_in_neighborhood_of_nodes(
                    nodes_list, blacklist_nodes)
            # set the number of connections to target nodes in
            # the node dictionary
            nodes = {n[0]: {'connections': n[1]} for n in node_neighbors_list}

        nodes = self.add_metadata_and_remove_pruned(nodes, exclude_hubs)

        return nodes

    def channel_opening_statistics(self, from_days_ago):
        """
        Fetches the channel opening statistics of the last `from_days_ago`
        days for the network analysis class and adds some
        additional heuristics.

        :param from_days_ago: int
        :return: dict, keys: node public keys, values: several heuristics
        """

        nodes = \
            self.network_analysis.calculate_channel_opening_statistics(
                from_days_ago)
        nodes = self.add_metadata_and_remove_pruned(nodes)

        # add the node age and other interesting metrics
        for n, nv in nodes.items():
            node_age = self.node.network.node_age(n)
            nodes[n]['age_days'] = node_age

            # goal of this metric:
            # find bust-like channel openings of older nodes
            # the motivation behind this is that this could be the behavior
            # of a node which first did testing on some service,
            # but then all of a sudden goes live, whose moment we want to catch

            nodes[n]['metric_burst'] = \
                nv['relative_total_capacity'] * \
                (nv['openings_total_capacity']) ** 2 * node_age \
                / max(1, nv['opening_median_time'])  # avoid division by zero

            # goal of this metric:
            # find steady nodes with lots of capacity opening the motivation
            # behind this is that this could be the behavior of experienced
            # node operators who dedicate themselves to their nodes and add
            # a lot of value to the network
            nodes[n]['metric_steady'] = nv['opening_median_time'] * (
                nv['openings_total_capacity'])**2

        return nodes

    def second_neighbors(self, number_of_nodes):
        """
        Returns a dict of nodes, which would give the most second neighbors
        when would be opened to them.

        :param number_of_nodes: number of nodes returned
        :type number_of_nodes: int
        :return: nodes
        :rtype: dict
        """
        node_tuples = self.network_analysis.nodes_most_second_neighbors(
            self.node.pub_key, number_of_nodes)
        nodes = {}
        for n in node_tuples:
            nodes[n[0]] = {'new_second_neighbors': n[1]}

        return nodes

    def add_metadata_and_remove_pruned(self, nodes, exclude_hubs=False):
        """
        Adds metadata like the number of channels, total capacity,
        ip address to the dict of nodes.

        This should be added to every node recommendation method,
        as it cleans out the obvious bad nodes to
        which we don't want to connect to.

        If exclude_hubs is set to True, big nodes will be removed from nodes.

        :param nodes: dict
        :param exclude_hubs: bool
        :return: dict
        """

        nodes_new = {}
        for counter, (k, n) in enumerate(nodes.items()):
            try:
                # copy all the entries
                node_new = {k: n for k, n in n.items()}
                node_new['alias'] = self.node.network.node_alias(k)

                number_channels = self.node.network.number_channels(k)
                total_capacity = self.node.network.node_capacity(k)
                node_new['number_channels'] = number_channels
                if number_channels > 0:
                    node_new['total_capacity'] = \
                        float(total_capacity) / 1E8  # in btc
                    node_new['capacity_per_channel'] = \
                        float(total_capacity) / number_channels / 1E8  # in btc
                    node_new['address'] = self.node.network.node_address(k)
                    node_new['distance'] = \
                        self.network_analysis.distance(self.node.pub_key, k)
                    if exclude_hubs:
                        if node_new[
                                'number_channels'] < settings.NUMBER_CHANNELS_DEFINING_HUB:
                            nodes_new[k] = node_new
                    else:
                        nodes_new[k] = node_new
            # it was a pruned node if it is not found in the graph and
            # it shouldn't be recommended
            except KeyError:
                pass

        if exclude_hubs:
            logger.info(f"Excluding hubs (defined by number of channels > "
                        f"{settings.NUMBER_CHANNELS_DEFINING_HUB}).")
        if not self.show_connected:
            nodes_new = self.exclude_connected_nodes(nodes_new)

        return nodes_new

    def exclude_connected_nodes(self, nodes):
        """
        Excludes already connected nodes from the nodes dict.

        :param nodes: dict, keys are node pub keys
        :return: dict
        """

        logger.debug("Filtering nodes which are not connected to us.")
        open_channels = self.node.get_open_channels()
        connected_node_pubkeys = set()
        filtered_nodes = OrderedDict()

        for k, v in open_channels.items():
            connected_node_pubkeys.add(v['remote_pubkey'])

        for k, v in nodes.items():
            if k not in connected_node_pubkeys:
                filtered_nodes[k] = v

        return filtered_nodes

    def print_nodes(self, nodes, number_of_nodes, columns, sort_by):
        """
        General purpose printing function for flexible node tables.

        Columns is a string, which includes the order and items of columns
        with a comma delimiter like so:

        columns = "rpk,nchan,cap,cpc,a"

        Sorting can be reversed by adding a "rev_" string before the
        sorting string.

        :param nodes: dict
        :param number_of_nodes: int
        :param columns: str
        :param sort_by: str, sorting string, these can be the keys
                             of the node dictionary
        """

        if len(nodes) == 0:
            logger.info(">>> Did not find any nodes.")
        else:
            logger.info(f"Found {len(nodes.keys())} nodes for "
                        f"node recommendation.")

        # some logic to reverse the sorting order
        # largest first
        reverse_sorting = True

        # if there is a marker 'rev_' in front, reverse the sorting
        if sort_by[:4] == 'rev_':
            reverse_sorting = False
            sort_by = sort_by[4:]
        nodes = OrderedDict(
            sorted(nodes.items(),
                   key=lambda x: x[1][print_node_format[sort_by]['dict_key']],
                   reverse=reverse_sorting))

        logger.info(f"Sorting nodes by {sort_by}.")

        logger.info("-------- Description --------")
        columns = columns.split(',')
        for c in columns:
            logger.info(f"{c:<10} {print_node_format[c]['description']}")
        logger.info(f"-------- Nodes (limited to "
                    f"{number_of_nodes} nodes) --------")

        # prepare the column header
        column_header_list = [
            f"{c:{print_node_format[c]['align']}{print_node_format[c]['width']}}"
            for c in columns
        ]
        column_header = " ".join(column_header_list)
        logger.info(column_header)

        for ik, (k, v) in enumerate(nodes.items()):
            if ik > number_of_nodes:
                break
            # print each row in a formated way specified in the
            # print_node dictionary
            row = [
                f"{v[print_node_format[c]['dict_key']]:{print_node_format[c]['format']}}"
                for c in columns if c != 'rpk'
            ]
            # add whitespace buffers between columns
            row_string = " ".join(row)
            row_string = k + " " + row_string
            logger.info(row_string)

            if self.show_address:
                if v['address']:
                    logger.info('   ' + v['address'])
                else:
                    logger.info('   no address available')
예제 #4
0
class Info(object):
    """
    Implements the info command, which displays info on individual channels and
    nodes.
    """
    def __init__(self, node):
        """
        :param node: node object
        :type node: lndmanage.lib.node.LndNode
        """
        self.node = node
        self.network_info = NetworkAnalysis(self.node)

    def parse_and_print(self, info):
        """
        Parses an info string for a channel id or node public key and prints
        out the information gathered about the object.

        :param info: channel id or node public key
        :type info: str
        """

        # analyzer = NetworkAnalysis(self.node)
        try:
            channel_id, node_pub_key = parse_nodeid_channelid(info)
        except ValueError:
            logger.info("Info didn't represent neither a channel nor a node.")
            return

        # Info was a channel.
        if channel_id is not None:
            try:
                general_info = self.node.network.edges[channel_id]
            except KeyError:
                logger.info("Channel id %s is not known in the public graph.",
                            channel_id)
                return

            # Add some more information on the channel.
            general_info['node1_alias'] = \
                self.node.network.node_alias(general_info['node1_pub'])
            general_info['node2_alias'] = \
                self.node.network.node_alias(general_info['node2_pub'])
            general_info['blockheight'] = \
                convert_channel_id_to_short_channel_id(
                    channel_id)[0]
            general_info['open_timestamp'] = height_to_timestamp(
                self.node, general_info['blockheight'])

            # TODO: if it's our channel, add extra info
            extra_info = None

            self.print_channel_info(general_info)

        # Info was a node.
        else:
            try:
                general_info = self.network_info.node_info_basic(node_pub_key)
            except KeyError:
                return

            # TODO: if it's a (channel) peer or our node, add extra info
            extra_info = None

            self.print_node_info(general_info)

    def print_channel_info(self, general_info: Dict):
        """
        Prints the channel info with peer information.

        :param general_info: information about the channel in the public graph
        """

        logger.info("-------- Channel info --------")
        logger.info(f"channel id: {general_info['channel_id']}  "
                    f"channel point: {general_info['chan_point']}")

        # capactiy
        string = padded_column_string('capacity:', general_info['capacity'],
                                      'sat')
        logger.info(f"{string:{COL_WIDTH*2}}")

        # blockheight
        string = padded_column_string('blockheight:',
                                      general_info['blockheight'], '')
        logger.info(f"{string:{COL_WIDTH*2}}")

        # opening time
        time = datetime.datetime.utcfromtimestamp(
            general_info['open_timestamp']).strftime('%Y-%m-%d %H:%M:%S')
        string = padded_column_string('open since:', time, '')
        logger.info(f"{string:{COL_WIDTH*2}}")

        # channel age
        age = round(
            (self.node.blockheight - general_info['blockheight']) / 6 / 24, 2)
        string = padded_column_string('channel age:', age, 'days')
        logger.info(f"{string:{COL_WIDTH*2}}")

        # last update
        last_update = general_info['last_update']
        last_update_time = datetime.datetime.utcfromtimestamp(
            last_update).strftime('%Y-%m-%d %H:%M:%S')
        string = padded_column_string('last update:', last_update_time, '')
        logger.info(f"{string:{COL_WIDTH*2}}")
        logger.info("")

        # channel partner overview
        logger.info("-------- Channel partners --------")
        logger.info(f"{general_info['node1_pub']:{COL_WIDTH}} | "
                    f"{general_info['node2_pub']:{COL_WIDTH}}")
        logger.info(f"{general_info['node1_alias']:^{COL_WIDTH}} | "
                    f"{general_info['node2_alias']:^{COL_WIDTH}}")

        policies = general_info['policies']
        np1 = policies[general_info['node1_pub'] > general_info['node2_pub']]
        np2 = policies[general_info['node2_pub'] > general_info['node1_pub']]
        last_update_1 = np1['last_update']
        last_update_2 = np2['last_update']

        last_update_time_1 = datetime.datetime.utcfromtimestamp(
            last_update_1).strftime('%Y-%m-%d %H:%M:%S')
        last_update_time_2 = datetime.datetime.utcfromtimestamp(
            last_update_2).strftime('%Y-%m-%d %H:%M:%S')

        # base fee
        string_left = padded_column_string('base fee:', np1['fee_base_msat'],
                                           'msat')
        string_right = padded_column_string('base fee:', np2['fee_base_msat'],
                                            'msat')
        logger.info(f"{string_left:{COL_WIDTH}} | {string_right:{COL_WIDTH}}")

        # fee rate
        string_left = padded_column_string('fee rate:',
                                           np1['fee_rate_milli_msat'] / 1E6,
                                           'sat/sat')
        string_right = padded_column_string('fee rate:',
                                            np2['fee_rate_milli_msat'] / 1E6,
                                            'sat/sat')
        logger.info(f"{string_left:{COL_WIDTH}} | {string_right:{COL_WIDTH}}")

        # time lock delta
        string_left = padded_column_string('time lock delta:',
                                           np1['time_lock_delta'], 'blocks')
        string_right = padded_column_string('time lock delta:',
                                            np2['time_lock_delta'], 'blocks')
        logger.info(f"{string_left:{COL_WIDTH}} | {string_right:{COL_WIDTH}}")

        # disabled
        string_left = padded_column_string('disabled:', np1['disabled'], '')
        string_right = padded_column_string('disabled:', np2['disabled'], '')
        logger.info(f"{string_left:{COL_WIDTH}} | {string_right:{COL_WIDTH}}")

        # last update
        string_left = padded_column_string('last update:', last_update_time_1,
                                           '')
        string_right = padded_column_string('last update:', last_update_time_2,
                                            '')
        logger.info(f"{string_left:{COL_WIDTH}} | {string_right:{COL_WIDTH}}")

    def print_node_info(self, general_info):
        """
        Prints the node info.

        :param general_info: information about the node in the public graph
        :type general_info: dict
        """
        logger.info("-------- Node info --------")
        logger.info(general_info['pub_key'])

        # alias
        string = padded_column_string('alias:', general_info['alias'], '')
        logger.info(f"{string:{COL_WIDTH*2}}")

        # last update
        last_update = general_info['last_update']
        last_update_time = datetime.datetime.utcfromtimestamp(
            last_update).strftime('%Y-%m-%d %H:%M:%S')
        string = padded_column_string('last update:', last_update_time, '')
        logger.info(f"{string:{COL_WIDTH*2}}")

        # numer of channels
        string = padded_column_string('number of channels:',
                                      general_info['num_channels'], '')
        logger.info(f"{string:{COL_WIDTH*2}}")

        # total capacity
        string = padded_column_string('total capacity:',
                                      general_info['total_capacity'], 'sat')
        logger.info(f"{string:{COL_WIDTH*2}}")

        # capacity per channel
        string = padded_column_string('capacity (median):',
                                      general_info['median_capacity'], 'sat')
        logger.info(f"{string:{COL_WIDTH*2}}")
        string = padded_column_string('capacity (mean):',
                                      general_info['mean_capacity'], 'sat')
        logger.info(f"{string:{COL_WIDTH*2}}")

        # fees
        string = padded_column_string('base fee (median):',
                                      general_info['median_base_fee'], 'msat')
        logger.info(f"{string:{COL_WIDTH*2}}")
        string = padded_column_string('base fee (mean):',
                                      general_info['mean_base_fee'], 'msat')
        logger.info(f"{string:{COL_WIDTH*2}}")
        string = padded_column_string('fee rate (median):',
                                      general_info['median_fee_rate'],
                                      'sat/sat')
        logger.info(f"{string:{COL_WIDTH*2}}")
        string = padded_column_string('fee rate (mean):',
                                      general_info['mean_fee_rate'], 'sat/sat')
        logger.info(f"{string:{COL_WIDTH*2}}")

        # addresses
        logger.info("-------- Addresses --------")
        for addr in general_info['addresses']:
            logger.info(5 * " " + general_info['pub_key'] + "@" + addr)
예제 #5
0
from lndmanage.lib.network_info import NetworkAnalysis
from lndmanage.lib.node import LndNode
from lndmanage import settings

import logging.config
logging.config.dictConfig(settings.logger_config)
logger = logging.getLogger(__name__)

if __name__ == '__main__':
    node = LndNode()
    network_analysis = NetworkAnalysis(node)

    network_analysis.print_node_overview(node.pub_key)

    logger.info('-------- Nodes with highest capacity: --------')
    for n in network_analysis.get_sorted_nodes_by_property():
        logger.info(n)
    logger.info('-------- Nodes with highest degree: --------')
    for n in network_analysis.get_sorted_nodes_by_property(key='degree'):
        logger.info(n)
    logger.info('-------- Nodes with highest capacity/channel: --------')
    for n in network_analysis.get_sorted_nodes_by_property(
            key='capacity_per_channel', min_degree=10):
        logger.info(n)
    logger.info('-------- Nodes with lowest capacity/channel: --------')
    for n in network_analysis.get_sorted_nodes_by_property(
            key='capacity_per_channel', min_degree=20, decrementing=False):
        logger.info(n)
    logger.info('-------- Nodes with most user nodes: --------')
    for n in network_analysis.get_sorted_nodes_by_property(key='user_nodes',
                                                           min_degree=20):