def node_age(self, node_pub_key): """ Determine the approximate node's age by its oldest channel. :param node_pub_key: str :return: float, days """ # find all channels of nodes node_edges = self.graph.edges(node_pub_key, data=True) # collect all channel's ages in terms of blockheights channel_ages = [] for e in node_edges: channel_id = e[2]['channel_id'] height, index, output = convert_channel_id_to_short_channel_id( channel_id) channel_age = self.node.blockheight - height channel_ages.append(channel_age) # determine oldest channel's age node_age = max(channel_ages) # convert to days from actual blockheight node_age_days = float(node_age) * 10 / (60 * 24) return node_age_days
def parse_and_print(self, info): """ Parses an info string for a channel id or node public key and prints out the information gathered about the object. :param info: channel id or node public key :type info: str """ # analyzer = NetworkAnalysis(self.node) try: channel_id, node_pub_key = parse_nodeid_channelid(info) except ValueError: logger.info("Info didn't represent neither a channel nor a node.") return # Info was a channel. if channel_id is not None: try: general_info = self.node.network.edges[channel_id] except KeyError: logger.info("Channel id %s is not known in the public graph.", channel_id) return # Add some more information on the channel. general_info['node1_alias'] = \ self.node.network.node_alias(general_info['node1_pub']) general_info['node2_alias'] = \ self.node.network.node_alias(general_info['node2_pub']) general_info['blockheight'] = \ convert_channel_id_to_short_channel_id( channel_id)[0] general_info['open_timestamp'] = height_to_timestamp( self.node, general_info['blockheight']) # TODO: if it's our channel, add extra info extra_info = None self.print_channel_info(general_info) # Info was a node. else: try: general_info = self.network_info.node_info_basic(node_pub_key) except KeyError: return # TODO: if it's a (channel) peer or our node, add extra info extra_info = None self.print_node_info(general_info)
def determine_channel_openings(self, from_days_ago): """ Determines all channel openings in the last `from_days_ago` days and creates a dictionary of nodes involved. The dictionary values contain tuples of channel creation height and capacity of the channels that were opened. :param from_days_ago: int :return: dict, keys: node public keys, values: (block height, capacity) """ logger.info(f"Determining channel openings in the last " f"{from_days_ago} days (excluding already closed ones).") # retrieve all channels in the network all_channels_list = self.node.network.edges.keys() # make sure the channels are sorted by age, oldest first all_channels_list = sorted(all_channels_list) # determine blockheight from where to start the analysis # we have about six blocks per hour blockheight_start = self.node.blockheight - from_days_ago * 24 * 6 # take only youngest channels channels_filtered_and_creation_time = [] for cid in all_channels_list: height = convert_channel_id_to_short_channel_id(cid)[0] if height > blockheight_start: channels_filtered_and_creation_time.append((cid, height)) logger.info(f"In the last {from_days_ago} days, there were at least " f"{len(channels_filtered_and_creation_time)} " f"channel openings.") # analyze the openings and assign tuples of # (creation height, channel capacity) to nodes channel_openings_per_node_dict = defaultdict(list) for c, height in channels_filtered_and_creation_time: edge = self.node.network.edges[c] channel_openings_per_node_dict[edge['node1_pub']].append( (height, edge['capacity'])) channel_openings_per_node_dict[edge['node2_pub']].append( (height, edge['capacity'])) return channel_openings_per_node_dict
def get_channel_openings_series(self): """ Fetches channel opening series to be used by TimeSeries. :return: channel opening series :rtype: list[dict] """ series = [] for chan_id, channel_values in self.channels.items(): blockheight = convert_channel_id_to_short_channel_id(chan_id)[0] series.append({ 'timestamp': height_to_timestamp(self.node, blockheight), 'key': chan_id, 'quantity': channel_values['capacity'] }) return series
def get_open_channels(self, active_only=False, public_only=False): """ Fetches information (fee settings of the counterparty, channel capacity, balancedness) about this node's open channels and saves it into the channels dict attribute. :param active_only: only take active channels into account (off by default) :type active_only: bool :param public_only: only take public channels into account (off by default) :type public_only: bool :return: dict of channels sorted by remote pubkey :rtype: OrderedDict """ raw_channels = self._rpc.ListChannels( lnd.ListChannelsRequest(active_only=active_only, public_only=public_only)) try: channels_data = raw_channels.ListFields()[0][1] except IndexError: # If there are no channels, return. return OrderedDict({}) channels = OrderedDict() for c in channels_data: # calculate age from blockheight blockheight, _, _ = convert_channel_id_to_short_channel_id( c.chan_id) age_days = (self.blockheight - blockheight) * 10 / (60 * 24) try: sent_received_per_week = int( (c.total_satoshis_sent + c.total_satoshis_received) / (age_days / 7)) except ZeroDivisionError: # age could be zero right after channel becomes pending sent_received_per_week = 0 # determine policy try: edge_info = self.network.edges[c.chan_id] # interested in node2 if edge_info['node1_pub'] == self.pub_key: policy_peer = edge_info['node2_policy'] policy_local = edge_info['node1_policy'] else: # interested in node1 policy_peer = edge_info['node1_policy'] policy_local = edge_info['node2_policy'] except KeyError: # if channel is unknown in describegraph # we need to set the fees to some error value policy_peer = { 'fee_base_msat': float(-999), 'fee_rate_milli_msat': float(999) } policy_local = { 'fee_base_msat': float(-999), 'fee_rate_milli_msat': float(999) } # calculate last update (days ago) def convert_to_days_ago(timestamp): return (time.time() - timestamp) / (60 * 60 * 24) try: last_update = convert_to_days_ago( self.network.edges[c.chan_id]['last_update']) last_update_local = convert_to_days_ago( policy_local['last_update']) last_update_peer = convert_to_days_ago( policy_peer['last_update']) except (TypeError, KeyError): last_update = float('nan') last_update_peer = float('nan') last_update_local = float('nan') # define unbalancedness |ub| large means very unbalanced channel_unbalancedness, our_commit_fee = \ channel_unbalancedness_and_commit_fee( c.local_balance, c.capacity, c.commit_fee, c.initiator) try: uptime_lifetime_ratio = c.uptime / c.lifetime except ZeroDivisionError: uptime_lifetime_ratio = 0 channels[c.chan_id] = { 'active': c.active, 'age': age_days, 'alias': self.network.node_alias(c.remote_pubkey), 'amt_to_balanced': int(channel_unbalancedness * c.capacity / 2 - our_commit_fee), 'capacity': c.capacity, 'chan_id': c.chan_id, 'channel_point': c.channel_point, 'commit_fee': c.commit_fee, 'fee_per_kw': c.fee_per_kw, 'peer_base_fee': policy_peer['fee_base_msat'], 'peer_fee_rate': policy_peer['fee_rate_milli_msat'], 'local_base_fee': policy_local['fee_base_msat'], 'local_fee_rate': policy_local['fee_rate_milli_msat'], 'initiator': c.initiator, 'last_update': last_update, 'last_update_local': last_update_local, 'last_update_peer': last_update_peer, 'local_balance': c.local_balance, 'num_updates': c.num_updates, 'private': c.private, 'remote_balance': c.remote_balance, 'remote_pubkey': c.remote_pubkey, 'sent_received_per_week': sent_received_per_week, 'total_satoshis_sent': c.total_satoshis_sent, 'total_satoshis_received': c.total_satoshis_received, 'unbalancedness': channel_unbalancedness, 'uptime': c.uptime, 'lifetime': c.lifetime, 'uptime_lifetime_ratio': uptime_lifetime_ratio, } sorted_dict = OrderedDict( sorted(channels.items(), key=lambda x: x[1]['alias'])) return sorted_dict