def import_connections(self, in_conns, force_import):
        """
        Imports connections from a list. Used at the app's start to process default connections and/or from
          a configuration dialog, when user pastes from a clipboard a string, describing connections he 
          wants to add to the configuration. The latter feature is used for a convenience.
        :param in_conns: list of DashNetworkConnectionCfg objects.
        :returns: tuple (list_of_added_connections, list_of_updated_connections)
        """

        added_conns = []
        updated_conns = []
        if in_conns:
            for nc in in_conns:
                id = nc.get_conn_id()
                # check if new connection is in existing list
                conn = self.get_conn_cfg_by_id(id)
                if not conn:
                    if force_import or not cache.get_value('imported_default_conn_' + nc.get_conn_id(), False, bool):
                        # this new connection was not automatically imported before
                        self.dash_net_configs.append(nc)
                        added_conns.append(nc)
                        cache.set_value('imported_default_conn_' + nc.get_conn_id(), True)
                elif not conn.identical(nc) and force_import:
                    conn.copy_from(nc)
                    updated_conns.append(conn)
        return added_conns, updated_conns
 def add_menu_item_to_mru(self, file_name: str) -> None:
     if file_name:
         try:
             if file_name in self.recent_data_files:
                 idx = self.recent_data_files.index(file_name)
                 del self.recent_data_files[idx]
                 self.recent_data_files.insert(0, file_name)
             else:
                 self.recent_data_files.insert(0, file_name)
             app_cache.set_value(CACHE_ITEM_DATA_FILE_MRU_LIST,
                                 self.recent_data_files)
         except Exception as e:
             logging.warning(str(e))
Пример #3
0
    def save_col_defs(self, setting_name: str):
        cols = []
        if self.view:
            hdr = self.get_view_horizontal_header()
        else:
            hdr = None

        for c in sorted(self._columns, key=lambda x: x.visual_index):
            if hdr:
                width = hdr.sectionSize(self._columns.index(c))
            else:
                width = c.initial_width

            cols.append({'name': c.name, 'visible': c.visible, 'width': width})
        app_cache.set_value(setting_name, cols)
 def closeEvent(self, event):
     w = self.size().width()
     h = self.size().height()
     cache.set_value('WndPayoutWidth', w)
     cache.set_value('WndPayoutHeight', h)
     # save column widths
     widths = []
     for col in range(self.table_model.columnCount()):
         widths.append(self.tableView.columnWidth(col))
     cache.set_value('WndPayoutColWidths', widths)
     if self.source_address_mode:
         cache.set_value('SourceBip32Path', self.edtSourceBip32Path.text())
 def closeEvent(self, event):
     w = self.size().width()
     h = self.size().height()
     cache.set_value('WndPayoutWidth', w)
     cache.set_value('WndPayoutHeight', h)
     # save column widths
     widths = []
     for col in range(self.table_model.columnCount()):
         widths.append(self.tableView.columnWidth(col))
     cache.set_value('WndPayoutColWidths', widths)
Пример #6
0
 def save_cache_settings(self):
     app_cache.save_window_size(self)
     app_cache.set_value(CACHE_ITEM_SHOW_COMMANDS,
                         self.show_manual_commands)
Пример #7
0
 def on_chb_word_wrap_toggled(self, checked):
     app_cache.set_value(CACHE_ITEM_DETAILS_WORD_WRAP, checked)
     self.apply_word_wrap(checked)
    def save_cache_settings(self):
        app_cache.save_window_size(self)
        if self.initial_mn_sel is None:
            app_cache.set_value(CACHE_ITEM_UTXO_SOURCE_MODE, self.utxo_src_mode)
        app_cache.set_value(CACHE_ITEM_HW_ACCOUNT_NUMBERS, self.hw_account_numbers)
        app_cache.set_value(CACHE_ITEM_HW_ACCOUNT_BASE_PATH.replace('%NETWORK%', self.app_config.gobyte_network),
                            self.hw_account_base_bip32_path)
        app_cache.set_value(CACHE_ITEM_HW_ACCOUNT_NUMBER, self.hw_account_number)
        app_cache.set_value(CACHE_ITEM_HW_SRC_BIP32_PATH.replace('%NETWORK%', self.app_config.gobyte_network),
                            self.hw_src_bip32_path)

        if self.mn_src_index is not None:
            # save the selected masternode name
            if self.mn_src_index >=0:
                if self.mn_src_index < len(self.masternodes):
                    app_cache.set_value(CACHE_ITEM_UTXO_SRC_MASTRNODE.replace('%NETWORK%', self.app_config.gobyte_network),
                                        self.masternodes[self.mn_src_index].name)
                else:
                    app_cache.set_value(CACHE_ITEM_UTXO_SRC_MASTRNODE.replace('%NETWORK%', self.app_config.gobyte_network),
                                        '<ALL>')

        # save column widths
        widths = []
        for col in range(self.table_model.columnCount()):
            widths.append(self.tableView.columnWidth(col))
        app_cache.set_value(CACHE_ITEM_COL_WIDTHS, widths)

        # recipient list
        rcp_list = self.wdg_dest_adresses.get_recipients_list()
        rcp_data = ''
        if rcp_list:
            try:
                # hw encryption key may be not available so use the generated key to not save addresses as plain text
                self.encryption_key = base64.urlsafe_b64encode(self.app_config.hw_generated_key)
                fernet = Fernet(self.encryption_key)
                rcp_json_str = simplejson.dumps(rcp_list)
                enc_json_str = bytes(rcp_json_str, 'ascii')
                rcp_data = fernet.encrypt(enc_json_str)
                rcp_data = rcp_data.decode('ascii')
            except Exception:
                logging.exception('Cannot save data to cache.')
        app_cache.set_value(CACHE_ITEM_LAST_RECIPIENTS.replace('%NETWORK%', self.app_config.gobyte_network), rcp_data)
Пример #9
0
 def on_close(self):
     app_cache.set_value('ConfigDlg_Width', self.size().width())
     app_cache.set_value('ConfigDlg_Height', self.size().height())
     app_cache.set_value('ConfigDlg_ConnectionSplitter_Sizes',
                         self.splitter.sizes())
Пример #10
0
 def set_cache_value(self, name, value):
     app_cache.set_value(self.__class__.__name__ + '_' + name, value)
Пример #11
0
    def get_ghostnodelist(self, *args, data_max_age=MASTERNODES_CACHE_VALID_SECONDS):
        """
        Returns masternode list, read from the Dash network or from the internal cache.
        :param args: arguments passed to the 'ghostnodelist' RPC call
        :param data_max_age: maximum age (in seconds) of the cached masternode data to used; if the
            cache is older than 'data_max_age', then an RPC call is performed to load newer masternode data;
            value of 0 forces reading of the new data from the network
        :return: list of Masternode objects, matching the 'args' arguments
        """
        def parse_mns(mns_raw):
            """
            Parses dictionary of strings returned from the RPC to Masternode object list.
            :param mns_raw: Dict of masternodes in format of RPC ghostnodelist command
            :return: list of Masternode object
            """
            tm_begin = time.time()
            ret_list = []
            for mn_id in mns_raw.keys():
                mn_raw = mns_raw.get(mn_id)
                mn_raw = mn_raw.strip()
                elems = mn_raw.split()
                if len(elems) >= 8:
                    mn = Masternode()
                    # (status, protocol, payee, lastseen, activeseconds, lastpaidtime, pastpaidblock, ip)
                    mn.status, mn.protocol, mn.payee, mn.lastseen, mn.activeseconds, mn.lastpaidtime, \
                        mn.lastpaidblock, mn.ip = elems

                    mn.lastseen = int(mn.lastseen)
                    mn.activeseconds = int(mn.activeseconds)
                    mn.lastpaidtime = int(mn.lastpaidtime)
                    mn.lastpaidblock = int(mn.lastpaidblock)
                    mn.ident = mn_id
                    ret_list.append(mn)
            duration = time.time() - tm_begin
            logging.info('Parse ghostnodelist time: ' + str(duration))
            return ret_list

        def update_masternode_data(existing_mn, new_data, cursor):
            # update cached masternode's properties
            existing_mn.modified = False
            existing_mn.monitor_changes = True
            existing_mn.ident = new_data.ident
            existing_mn.status = new_data.status
            existing_mn.protocol = new_data.protocol
            existing_mn.payee = new_data.payee
            existing_mn.lastseen = new_data.lastseen
            existing_mn.activeseconds = new_data.activeseconds
            existing_mn.lastpaidtime = new_data.lastpaidtime
            existing_mn.lastpaidblock = new_data.lastpaidblock
            existing_mn.ip = new_data.ip

            # ... and finally update MN db record
            if cursor and existing_mn.modified:
                cursor.execute("UPDATE MASTERNODES set ident=?, status=?, protocol=?, payee=?,"
                               " last_seen=?, active_seconds=?, last_paid_time=?, "
                               " last_paid_block=?, ip=?"
                               "WHERE id=?",
                               (new_data.ident, new_data.status, new_data.protocol, new_data.payee,
                                new_data.lastseen, new_data.activeseconds, new_data.lastpaidtime,
                                new_data.lastpaidblock, new_data.ip, existing_mn.db_id))

        if self.open():

            if len(args) == 1 and args[0] == 'full':
                last_read_time = app_cache.get_value(f'MasternodesLastReadTime_{self.app_config.dash_network}', 0, int)
                logging.info("MasternodesLastReadTime: %d" % last_read_time)

                if self.masternodes and data_max_age > 0 and \
                   int(time.time()) - last_read_time < data_max_age:
                    logging.info('Using cached ghostnodelist (data age: %s)' % str(int(time.time()) - last_read_time))
                    return self.masternodes
                else:
                    logging.info('Loading masternode list from NIX daemon...')
                    mns = self.proxy.ghostnodelist(*args)
                    mns = parse_mns(mns)
                    logging.info('Finished loading masternode list')

                    # mark already cached masternodes to identify those to delete
                    for mn in self.masternodes:
                        mn.marker = False

                    # save masternodes to the db cache
                    db_modified = False
                    cur = None
                    try:
                        if self.db_intf.db_active:
                            cur = self.db_intf.get_cursor()

                        for mn in mns:
                            # check if newly-read masternode already exists in the cache
                            existing_mn = self.masternodes_by_ident.get(mn.ident)
                            if not existing_mn:
                                mn.marker = True
                                self.masternodes.append(mn)
                                self.masternodes_by_ident[mn.ident] = mn

                                if self.db_intf.db_active:
                                    cur.execute("INSERT INTO MASTERNODES(ident, status, protocol, payee, last_seen,"
                                            " active_seconds, last_paid_time, last_paid_block, ip, dmt_active,"
                                            " dmt_create_time) "
                                            "VALUES (?,?,?,?,?,?,?,?,?,?,?)",
                                            (mn.ident, mn.status, mn.protocol, mn.payee, mn.lastseen,
                                             mn.activeseconds, mn.lastpaidtime, mn.lastpaidblock, mn.ip, 1,
                                             datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
                                    mn.db_id = cur.lastrowid
                                    db_modified = True
                            else:
                                existing_mn.marker = True
                                update_masternode_data(existing_mn, mn, cur)
                                db_modified = True

                        # remove from the cache masternodes that no longer exist
                        for mn_index in reversed(range(len(self.masternodes))):
                            mn = self.masternodes[mn_index]

                            if not mn.marker:
                                if self.db_intf.db_active:
                                    cur.execute("UPDATE MASTERNODES set dmt_active=0, dmt_deactivation_time=?"
                                                "WHERE ID=?",
                                                (datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
                                                mn.db_id))
                                    db_modified = True
                                self.masternodes_by_ident.pop(mn.ident,0)
                                del self.masternodes[mn_index]

                        app_cache.set_value(f'MasternodesLastReadTime_{self.app_config.dash_network}', int(time.time()))
                        self.update_mn_queue_values()
                    finally:
                        if db_modified:
                            self.db_intf.commit()
                        if cur is not None:
                            self.db_intf.release_cursor()

                    return self.masternodes
            else:
                mns = self.proxy.ghostnodelist(*args)
                mns = parse_mns(mns)
                return mns
        else:
            raise Exception('Not connected')
 def edtDestAddressChanged(self):
     # save payment address to cache
     cache.set_value('WndPayoutPaymentAddress', self.edtDestAddress.text())
 def save_cache_settings(self):
     app_cache.save_window_size(self)
     app_cache.set_value(CACHE_ITEM_SHOW_FIELD_HINTS, self.show_field_hinds)
 def on_act_clear_mru_items(self):
     self.recent_data_files.clear()
     app_cache.set_value(CACHE_ITEM_DATA_FILE_MRU_LIST,
                         self.recent_data_files)
     self.update_mru_menu_items()