def read_user_config(path, dormant=False): """Parse and store the user config settings in encompass.conf into user_config[]. dormant: Whether the global active chain should be ignored. """ if not path: return {} # Return a dict, since we will call update() on it. config_path = os.path.join(path, "config") result = {} try: with open(config_path, "r") as f: data = f.read() except IOError: print_msg("Error: Cannot read config file.") result = {} try: result = json.loads(data) except: try: result = ast.literal_eval(data) except: print_msg("Error: Cannot read config file.") return {} if not type(result) is dict: return {} if not dormant: chainparams.set_active_chain(result.get('active_chain_code', 'BTC')) return result
def insert(self, index, val): if index < 0: print_msg("wocao", abort=True) if index >= self.count(): for i in range(self.count(), index+1): self.l.append(0) self.l[index] = val
def init_interface(self): if self.config.get('server'): self.init_with_server(self.config) else: if self.config.get('auto_cycle') is None: self.config.set_key('auto_cycle', True, False) if not self.is_connected and self.config.get('auto_cycle'): print_msg("Using random server...") servers = filter_protocol(DEFAULT_SERVERS, 's') while servers: server = random.choice( servers ) servers.remove(server) print server self.config.set_key('server', server, False) self.init_with_server(self.config) if self.is_connected: break if not self.is_connected: print 'no server available' self.connect_event.set() # to finish start self.server = 'ecdsa.org:50001:t' self.proxy = None return self.connect_event.set() if self.is_connected: self.send([('server.version', [ELECTRUM_VERSION, PROTOCOL_VERSION])]) self.send([('server.banner',[])]) self.trigger_callback('connected') else: self.trigger_callback('notconnected')
def _mktx(self, outputs, fee = None, change_addr = None, domain = None): for to_address, amount in outputs: if not is_valid(to_address): raise Exception("Invalid Litecoin address", to_address) if change_addr: if not is_valid(change_addr): raise Exception("Invalid Litecoin address", change_addr) if domain is not None: for addr in domain: if not is_valid(addr): raise Exception("invalid Litecoin address", addr) if not self.wallet.is_mine(addr): raise Exception("address not in wallet", addr) for k, v in self.wallet.labels.items(): if change_addr and v == change_addr: change_addr = k final_outputs = [] for to_address, amount in outputs: for k, v in self.wallet.labels.items(): if v == to_address: to_address = k print_msg("alias", to_address) break amount = int(100000000*amount) final_outputs.append(('address', to_address, amount)) if fee: fee = int(100000000*fee) return self.wallet.mktx(final_outputs, self.password, fee , change_addr, domain)
def create_new_address(self, for_change): addresses = self.change if for_change else self.addresses n = len(addresses) address = self.get_address( for_change, n) addresses.append(address) print_msg(address) return address
def create_new_address(self, account, for_change): addresses = self.accounts[account][for_change] n = len(addresses) address = self.get_new_address( account, for_change, n) self.accounts[account][for_change].append(address) self.history[address] = [] print_msg(address) return address
def __init__(self, config={}): self.config = config self.electrum_version = ELECTRUM_VERSION self.gap_limit_for_change = 3 # constant # saved fields self.seed_version = config.get('seed_version', SEED_VERSION) self.gap_limit = config.get('gap_limit', 5) self.use_change = config.get('use_change',True) self.fee = int(config.get('fee',100000)) self.num_zeros = int(config.get('num_zeros',0)) self.use_encryption = config.get('use_encryption', False) self.seed = config.get('seed', '') # encrypted self.labels = config.get('labels', {}) self.frozen_addresses = config.get('frozen_addresses',[]) self.prioritized_addresses = config.get('prioritized_addresses',[]) self.addressbook = config.get('contacts', []) self.imported_keys = config.get('imported_keys',{}) self.history = config.get('addr_history',{}) # address -> list(txid, height) self.accounts = config.get('accounts', {}) # this should not include public keys self.SequenceClass = ElectrumSequence self.sequences = {} self.sequences[0] = self.SequenceClass(self.config.get('master_public_key')) if self.accounts.get(0) is None: self.accounts[0] = { 0:[], 1:[], 'name':'Main account' } self.transactions = {} tx = config.get('transactions',{}) try: for k,v in tx.items(): self.transactions[k] = Transaction(v) except: print_msg("Warning: Cannot deserialize transactions. skipping") # not saved self.prevout_values = {} # my own transaction outputs self.spent_outputs = [] self.receipt = None # next receipt self.banner = '' # spv self.verifier = None # there is a difference between wallet.up_to_date and interface.is_up_to_date() # interface.is_up_to_date() returns true when all requests have been answered and processed # wallet.up_to_date is true when the wallet is synchronized (stronger requirement) self.up_to_date = False self.lock = threading.Lock() self.tx_event = threading.Event() if self.seed_version != SEED_VERSION: raise ValueError("This wallet seed is deprecated. Please run upgrade.py for a diagnostic.") for tx_hash in self.transactions.keys(): self.update_tx_outputs(tx_hash)
def parse(self): instruction = [] while 1: try: imp = self.imp() except WsSyntaxError, e: print_msg(e, abort=True) except StopIteration: break
def run(self): while 1: i = self.program[self.col] self.col += 1 try: r = self.exe(i) if r == 'end': break except Exception, e: print_msg(e, abort=True)
def create_new_address(self, for_change): pubkeys_list = self.change_pubkeys if for_change else self.receiving_pubkeys addr_list = self.change_addresses if for_change else self.receiving_addresses n = len(pubkeys_list) pubkeys = self.derive_pubkeys(for_change, n) address = self.pubkeys_to_address(pubkeys) pubkeys_list.append(pubkeys) addr_list.append(address) print_msg(address) return address
def check_cert(host, cert): from OpenSSL import crypto as c _cert = c.load_certificate(c.FILETYPE_PEM, cert) m = "host: %s\n"%host m += "has_expired: %s\n"% _cert.has_expired() m += "pubkey: %s bits\n" % _cert.get_pubkey().bits() m += "serial number: %s\n"% _cert.get_serial_number() #m += "issuer: %s\n"% _cert.get_issuer() #m += "algo: %s\n"% _cert.get_signature_algorithm() m += "version: %s\n"% _cert.get_version() print_msg(m)
def __init__(self, config): network = Network(config) if not network.start(wait=True): print_msg("Not connected, aborting.") sys.exit(1) self.network = network self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.server.bind(('', 8000)) self.server.listen(5) self.server.settimeout(1) self.running = False self.timeout = 60
def download_subdirectory(subdir_name, options): """ Downloads and extracts only a certain subdirectory Works by downloading the whole repo and taking just the folder that we need. """ util.print_msg("info", "Preparing to download the subdirectory %s" % subdir_name) TMPDIR_NAME = "grabrc.subdir.tmpd" TMPDIR_PATH = os.path.join(options.destdir, TMPDIR_NAME) TARGET_PATH = os.path.join(options.destdir, options.outfile or subdir_name) logging.debug("Subdirectory tmpdir: %s" % TMPDIR_PATH) logging.debug("Subdirectory target: %s" % TARGET_PATH) util.info("Creating temporary directory paths...") if options.append: util.warn("Append option doesn't apply to directories. \ Falling to default behavior of backing up \ the existing directory") target_exists = os.path.exists(TARGET_PATH) if target_exists: if options.replace: util.info("Replacing the existing directory %s" % TARGET_PATH) shutil.rmtree(TARGET_PATH) else: util.warn("Found an existing directory %s" % TARGET_PATH) util.warn("Backing up existing directory %s to %s%s" % (TARGET_PATH, TARGET_PATH, Const.BACKUP_SUFFIX)) util.backup_file(TARGET_PATH) # Try to download the repository then move it to the current directory # _create_grabrc_folder will check if the directory already exists try: # Download the repository and move the subdirectory _create_grabrc_folder(options.github, options.destdir, TMPDIR_NAME) #os.makedirs(TMPDIR_PATH) # Create the tmpdir again # We still use subdir_name, the original name if not os.path.exists(os.path.join(TMPDIR_PATH, subdir_name)): util.exit_runtime_error("Couldn't find the subdirectory %s in the repository" % subdir_name) shutil.move(os.path.join(TMPDIR_PATH, subdir_name), TARGET_PATH) finally: # Clean up after ourselves util.info("Cleaning up temporary directories...") shutil.rmtree(TMPDIR_PATH) util.success("Downloaded subdirectory %s to %s" % (subdir_name, TARGET_PATH))
def download_repo_nongit(options): """Downloads and extracts the git repository to the local filesystem""" util.print_msg("info", "Downloading the repository...") if options.replace: shutil.rmtree(os.path.join(options.destdir, options.outfile or Const.DEFAULT_DIRNAME)) elif options.append: util.print_msg("info", "Repository download doesn't support the --append option. \ Falling back to default behavior of backing up the existing \ directory") # Delegate to _create_grabrc_folder for backing up existing _create_grabrc_folder(options.github, options.destdir, options.outfile or Const.DEFAULT_DIRNAME)
def run_command(cmd, wallet, password=None, args=[]): network = None cmd_runner = Commands(wallet) func = getattr(cmd_runner, cmd.name) cmd_runner.password = password try: result = func(*args[1:]) except Exception: traceback.print_exc(file=sys.stdout) sys.exit(1) if type(result) == str: print_msg(result) elif result is not None: print_json(result)
def help(self, cmd=None): if cmd not in known_commands: print_msg("\nList of commands:", ', '.join(sorted(known_commands))) else: cmd = known_commands[cmd] print_msg(cmd.description) if cmd.syntax: print_msg("Syntax: " + cmd.syntax) if cmd.options: print_msg("options:\n" + cmd.options) return None
def check_cert(host, cert): try: b = pem.dePem(cert, 'CERTIFICATE') x = x509.X509(b) except: traceback.print_exc(file=sys.stdout) return try: x.check_date() expired = False except: expired = True m = "host: %s\n" % host m += "has_expired: %s\n" % expired util.print_msg(m)
def execute(filename): try: f = open(filename) except: print_msg("open file error\n", abort=True) text = f.read() f.close() token = tokenizer(text) parser = Parser(token) instruction = parser.parse() #print repr(text) #print instruction vm = VM(instruction) vm.run() return text
def check_cert(host, cert): try: x = x509.X509() x.parse(cert) except: traceback.print_exc(file=sys.stdout) return try: x.check_date() expired = False except: expired = True m = "host: %s\n"%host m += "has_expired: %s\n"% expired util.print_msg(m)
def read_user_config(path): """Parse and store the user config settings in electrum-ltc.conf into user_config[].""" if not path: return {} config_path = os.path.join(path, "config") if not os.path.exists(config_path): return {} try: with open(config_path, "r") as f: data = f.read() result = json.loads(data) except: print_msg("Warning: Cannot read config file.", config_path) return {} if not type(result) is dict: return {} return result
def read_user_config(self): """Parse and store the user config settings in electrum.conf into user_config[].""" if not self.path: return path = os.path.join(self.path, "config") if os.path.exists(path): try: with open(path, "r") as f: data = f.read() except IOError: return try: d = ast.literal_eval( data ) #parse raw data from reading wallet file except Exception: print_msg("Error: Cannot read config file.") return self.user_config = d
def _create_grabrc_folder(username, destdir, dirname): """ Creates the local copy of the grabrc git repository in directory destdir with name dirname. The path destdir/dirname should not already exist """ # Check if the repo exists repo_dirpath = os.path.join(destdir, dirname) tmp_path = os.path.join(repo_dirpath, "grabrctmp.d") def download_and_untar(): """Downloads a tar from the server, untars one directory up""" repo_targz = _get_grabrc_archive(username, "targz") util.untar_gz(repo_targz) os.renames(glob.glob("./%s*" % (Const.REPO_NAME))[0], tmp_path) # Sanity check: if they have a file named with the directory (they shouldn't)) if os.path.isfile(repo_dirpath): util.warn("Found a file where there should be a git directory. \ Backing up...") util.backup_file(repo_dirpath) elif os.path.isdir(repo_dirpath): util.info("Found an existing directory named %s in %s..." % (dirname, destdir)) util.info("Backing up the directory...") util.backup_file(repo_dirpath) if not os.path.exists(repo_dirpath): # Make a temporary staging directory util.print_msg("info", "Preparing repository directory at %s" % repo_dirpath) os.makedirs(repo_dirpath) os.chdir(repo_dirpath) download_and_untar() # Move everything from the tmpdirectory to one level up repofiles = [os.path.join(tmp_path, filename) for filename in os.listdir(tmp_path)] map(lambda f: shutil.move(f, repo_dirpath), repofiles) # os.rmdir requires empty dir os.rmdir(tmp_path) else: util.exit_runtime_error("The repository's target directory exists at %s \ but should have been backed up to a different location. Race condition?" % repo_dirpath) util.success("Finished repository download.")
def read_user_config(path): """Parse and store the user config settings in electrum-ixc.conf into user_config[].""" if not path: return {} # Return a dict, since we will call update() on it. config_path = os.path.join(path, "config") result = {} if os.path.exists(config_path): try: with open(config_path, "r") as f: data = f.read() result = ast.literal_eval( data ) #parse raw data from reading wallet file except Exception: print_msg("Error: Cannot read config file.") result = {} if not type(result) is dict: return {} return result
def init_interface(self): if self.config.get("server"): self.init_with_server(self.config) else: if self.config.get("auto_cycle") is None: self.config.set_key("auto_cycle", True, False) if not self.is_connected and self.config.get("auto_cycle"): print_msg("Using random server...") servers_tcp = DEFAULT_SERVERS[:] servers_http = DEFAULT_HTTP_SERVERS[:] while servers_tcp or servers_http: if servers_tcp: server = random.choice(servers_tcp) servers_tcp.remove(server) else: # try HTTP if we can't get a TCP connection server = random.choice(servers_http) servers_http.remove(server) print server self.config.set_key("server", server, False) self.init_with_server(self.config) if self.is_connected: break if not self.is_connected: print "no server available" self.connect_event.set() # to finish start self.server = "ecdsa.org:50001:t" self.proxy = None return self.connect_event.set() if self.is_connected: self.send([("server.version", [ELECTRUM_VERSION, PROTOCOL_VERSION])]) self.send([("server.banner", [])]) self.trigger_callback("connected") else: self.trigger_callback("notconnected")
def read_user_config(path): """Parse and store the user config settings in electrum.conf into user_config[].""" if not path: return {} config_path = os.path.join(path, "config") try: with open(config_path, "r") as f: data = f.read() except IOError: print_msg("Error: Cannot read config file.", path) return {} try: result = json.loads(data) except: try: result = ast.literal_eval(data) except: print_msg("Error: Cannot read config file.") return {} if not type(result) is dict: return {} return result
def init_interface(self): if self.config.get('server'): self.init_with_server(self.config) else: if self.config.get('auto_cycle') is None: self.config.set_key('auto_cycle', True, False) if not self.is_connected and self.config.get('auto_cycle'): print_msg("Using random server...") servers_tcp = DEFAULT_SERVERS[:] servers_http = DEFAULT_HTTP_SERVERS[:] while servers_tcp or servers_http: if servers_tcp: server = random.choice( servers_tcp ) servers_tcp.remove(server) else: # try HTTP if we can't get a TCP connection server = random.choice( servers_http ) servers_http.remove(server) print server self.config.set_key('server', server, False) self.init_with_server(self.config) if self.is_connected: break if not self.is_connected: print 'no server available' self.connect_event.set() # to finish start self.server = 'electrum.be:50001:t' self.proxy = None return self.connect_event.set() if self.is_connected: self.send([('server.version', [ELECTRUM_VERSION, PROTOCOL_VERSION])]) self.trigger_callback('connected') else: self.trigger_callback('notconnected')
def get_chain_instance(code): """Gets an instance of the given chain's class. Args: code (str): ChainParams code of the blockchain. Returns: An instance of the blockchain's class. All blockchain classes derive from CryptoCur, the base class defined in lib/chains/cryptocur.py """ code = code.upper() if not is_known_chain(code): return None params = get_params(code) module_name = params.module_name classmodule = None try: # If we're running tests, try the local path first. if testing_mode: # If importing fails, try with a different path. try: classmodule = importlib.import_module(''.join(['lib.chains.', module_name])) except (AttributeError, ImportError): classmodule = importlib.import_module(''.join(['chainkey.chains.', module_name])) else: # If importing fails, try with a different path. try: classmodule = importlib.import_module(''.join(['chainkey.chains.', module_name])) except (AttributeError, ImportError): classmodule = importlib.import_module(''.join(['lib.chains.', module_name])) classInst = getattr(classmodule, 'Currency') except (AttributeError, ImportError): print_msg("Error: Cannot load chain '{}'.".format(code)) return None return classInst()
def history(): config = SimpleConfig() url = None cmd = 'history' cmd = known_commands[cmd] # instanciate wallet for command-line storage = WalletStorage(config) if cmd.requires_wallet and not storage.file_exists: print_msg("Error: Wallet file not found.") sys.exit(0) wallet = Wallet(storage) # commands needing password if cmd.requires_password: if wallet.seed == '': seed = '' password = None elif wallet.use_encryption: password = prompt_password('Password:', False) if not password: print_msg("Error: Password required") sys.exit(1) # check password try: seed = wallet.get_seed(password) except Exception: print_msg("Error: This password does not decode this wallet.") sys.exit(1) else: password = None seed = wallet.get_seed(None) else: password = None # run the command return get_command(cmd, wallet, password)
def usage(): print_msg("python-wspace 0.1 (c) 2014\n") print_msg("-------------------------------\n") print_msg("Usage: wspace.py [file]\n")
def main(args: argparse.Namespace): """ main function that parses args and runs sgrep """ # get the proper paths for targets i.e. handle base path of /home/repo when it exists in docker targets = config_resolver.resolve_targets(args.target) # first check if user asked to generate a config if args.generate_config: config_resolver.generate_config() # let's check for a pattern elif args.pattern: # and a language if not args.lang: print_error_exit( "language must be specified when a pattern is passed") lang = args.lang pattern = args.pattern # TODO for now we generate a manual config. Might want to just call sgrep -e ... -l ... configs = config_resolver.manual_config(pattern, lang) else: # else let's get a config. A config is a dict from config_id -> config. Config Id is not well defined at this point. configs = config_resolver.resolve_config(args.config) # if we can't find a config, use default r2c rules if not configs: print_error_exit( f"No config given. If you want to see some examples, try running with --config r2c" ) # let's split our configs into valid and invalid configs. # It's possible that a config_id exists in both because we check valid rules and invalid rules # instead of just hard failing for that config if mal-formed valid_configs, errors = validate_configs(configs) validate = args.validate strict = args.strict if errors: if strict: print_error_exit( f"run with --strict and there were {len(errors)} errors loading configs" ) elif validate: print_error_exit( f"run with --validate and there were {len(errors)} errors loading configs" ) elif validate: # no errors! print_error_exit("Config is valid", exit_code=0) if not args.no_rewrite_rule_ids: # re-write the configs to have the hierarchical rule ids valid_configs = rename_rule_ids(valid_configs) # now validate all the patterns inside the configs if not args.skip_pattern_validation: start_validate_t = time.time() invalid_patterns = validate_patterns(valid_configs) if len(invalid_patterns): print_error_exit( f"{len(invalid_patterns)} invalid patterns found inside rules; aborting" ) debug_print( f"debug: validated config in {time.time() - start_validate_t}") # extract just the rules from valid configs all_rules = flatten_configs(valid_configs) if not args.pattern: plural = "s" if len(valid_configs) > 1 else "" config_id_if_single = (list(valid_configs.keys())[0] if len(valid_configs) == 1 else "") invalid_msg = (f"({len(errors)} config files were invalid)" if len(errors) else "") print_msg( f"running {len(all_rules)} rules from {len(valid_configs)} config{plural} {config_id_if_single} {invalid_msg}" ) # TODO log valid and invalid configs if verbose # a rule can have multiple patterns inside it. Flatten these so we can send sgrep a single yml file list of patterns all_patterns = list(flatten_rule_patterns(all_rules)) # actually invoke sgrep start = datetime.now() output_json = invoke_sgrep(all_patterns, targets, strict) debug_print(f"sgrep ran in {datetime.now() - start}") debug_print(str(output_json)) # group output; we want to see all of the same rule ids on the same file path by_rule_index: Dict[int, Dict[str, List[Dict[str, Any]]]] = collections.defaultdict( lambda: collections.defaultdict(list)) for finding in output_json["errors"]: print_error(f"sgrep: {finding['path']}: {finding['check_id']}") if strict and len(output_json["errors"]): print_error_exit( f"run with --strict and {len(output_json['errors'])} errors occurred during sgrep run; exiting" ) for finding in output_json["matches"]: # decode the rule index from the output check_id rule_index = int(finding["check_id"].split(".")[0]) by_rule_index[rule_index][finding["path"]].append(finding) current_path = Path.cwd() outputs_after_booleans = [] ignored_in_tests = 0 for rule_index, paths in by_rule_index.items(): expression = build_boolean_expression(all_rules[rule_index]) debug_print(str(expression)) # expression = (op, pattern_id) for (op, pattern_id, pattern) in expression_with_patterns] for filepath, results in paths.items(): debug_print( f"-------- rule (index {rule_index}) {all_rules[rule_index]['id']}------ filepath: {filepath}" ) check_ids_to_ranges = parse_sgrep_output(results) debug_print(str(check_ids_to_ranges)) valid_ranges_to_output = evaluate_expression( expression, check_ids_to_ranges, flags={ RCE_RULE_FLAG: args.dangerously_allow_arbitrary_code_execution_from_rules }, ) # only output matches which are inside these offsets! debug_print(f"compiled result {valid_ranges_to_output}") debug_print("-" * 80) for result in results: if sgrep_finding_to_range( result).range in valid_ranges_to_output: path_object = Path(result["path"]) if args.exclude_tests and should_exclude_this_path( path_object): ignored_in_tests += 1 continue # restore the original rule ID result["check_id"] = all_rules[rule_index]["id"] # rewrite the path to be relative to the current working directory result["path"] = str( safe_relative_to(path_object, current_path)) # restore the original message result["extra"]["message"] = rewrite_message_with_metavars( all_rules[rule_index], result) result = transform_to_r2c_output(result) outputs_after_booleans.append(result) if ignored_in_tests > 0: print_error( f"warning: ignored {ignored_in_tests} results in tests due to --exclude-tests option" ) # output results output_data = {"results": outputs_after_booleans} if not args.quiet: if args.json: print(build_output_json(output_data)) else: print("\n".join(build_normal_output(output_data, color_output=True))) if args.output: save_output(args.output, output_data, args.json) if args.error and outputs_after_booleans: sys.exit(FINDINGS_EXIT_CODE) return output_data
#!/usr/bin/env python #-*- coding:utf-8 -*- import socket import select import Queue import logging from util import print_msg logging.basicConfig(level=logging.DEBUG, format='%(name)s: %(message)s') # 建立socket,并建立连接监听5个客户连接 server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.setblocking(0) # 设置非阻塞 server_address = ('localhost', 10000) print_msg('server starting up %s on port %s', *server_address) server.bind(server_address) server.listen(5) # 初始化连接消息队列 message_queues = {} # 建立epoll只读和读写事件并将当前服务socket注册监听只读事件 # epoll events: # EPOLLIN 连接到达或有数据来临 # EPOLLPRI 优先级的连接或数据到达 # EPOLLOUT 有数据要写(发送) # EPOLLERR 连接发生错误 # EPOLLET 边缘触发模式 仅支持非阻塞模式 # EPOLLHUP 连接关闭 # EPOLLNVAL 连接未打开 READ_ONLY = (select.EPOLLIN | select.EPOLLPRI | select.EPOLLHUP
def __init__(self, config={}): self.config = config self.electrum_version = ELECTRUM_VERSION self.gap_limit_for_change = 3 # constant # saved fields self.seed_version = config.get('seed_version', SEED_VERSION) self.gap_limit = config.get('gap_limit', 5) self.use_change = config.get('use_change', True) self.fee = int(config.get('fee_per_kb', 50000)) self.num_zeros = int(config.get('num_zeros', 0)) self.use_encryption = config.get('use_encryption', False) self.seed = config.get('seed', '') # encrypted self.labels = config.get('labels', {}) self.frozen_addresses = config.get('frozen_addresses', []) self.prioritized_addresses = config.get('prioritized_addresses', []) self.addressbook = config.get('contacts', []) self.imported_keys = config.get('imported_keys', {}) self.history = config.get('addr_history', {}) # address -> list(txid, height) self.accounts = config.get('accounts', {}) # this should not include public keys self.SequenceClass = ElectrumSequence self.sequences = {} self.sequences[0] = self.SequenceClass( self.config.get('master_public_key')) if self.accounts.get(0) is None: self.accounts[0] = {0: [], 1: [], 'name': 'Main account'} self.transactions = {} tx = config.get('transactions', {}) try: for k, v in tx.items(): self.transactions[k] = Transaction(v) except: print_msg("Warning: Cannot deserialize transactions. skipping") # not saved self.prevout_values = {} # my own transaction outputs self.spent_outputs = [] # spv self.verifier = None # there is a difference between wallet.up_to_date and interface.is_up_to_date() # interface.is_up_to_date() returns true when all requests have been answered and processed # wallet.up_to_date is true when the wallet is synchronized (stronger requirement) self.up_to_date = False self.lock = threading.Lock() self.transaction_lock = threading.Lock() self.tx_event = threading.Event() if self.seed_version != SEED_VERSION: raise ValueError( "This wallet seed is deprecated. Please run upgrade.py for a diagnostic." ) for tx_hash, tx in self.transactions.items(): if self.check_new_tx(tx_hash, tx): self.update_tx_outputs(tx_hash) else: print_error("unreferenced tx", tx_hash) self.transactions.pop(tx_hash)
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from decimal import Decimal import threading, time, Queue, os, sys, shutil from math import pow as dec_pow from util import user_dir, appdata_dir, print_error, print_msg from bitcoin import * try: from vtc_scrypt import getPoWHash except ImportError: print_msg("Warning: vtc_scrypt not available, using fallback") from scrypt import scrypt_N_1_1_80 as getPoWHash KGW_headers = [{} for x in xrange(4032)] Kimoto_vals = [ 1 + (0.7084 * dec_pow((Decimal(x + 1) / Decimal(144)), -1.228)) for x in xrange(4032) ] class Blockchain(threading.Thread): def __init__(self, config, network): threading.Thread.__init__(self) self.daemon = True self.config = config self.network = network
def post_output(output_url: str, output_data: Dict[str, Any]) -> None: print_msg(f"posting to {output_url}...") r = requests.post(output_url, json=output_data) debug_print(f"posted to {output_url} and got status_code:{r.status_code}")
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import os import util import bitcoin from bitcoin import * try: from tribus_hash import getPoWHash except ImportError: util.print_msg("Warning: tribus_hash not available, using fallback") from scrypt import scrypt_1024_1_1_80 as getPoWHash MAX_TARGET = 0x0000F8FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF class Blockchain(util.PrintError): '''Manages blockchain headers and their verification''' def __init__(self, config, network): self.config = config self.network = network self.checkpoint_height, self.checkpoint_hash = self.get_checkpoint() self.check_truncate_headers() self.set_local_height() def height(self):
def print_msg(self, *msg): print_msg("[Synchronizer]", *msg)
class Interface(threading.Thread): def __init__(self, server, config=None): threading.Thread.__init__(self) self.daemon = True self.config = config if config is not None else SimpleConfig() self.connect_event = threading.Event() self.subscriptions = {} self.lock = threading.Lock() self.rtime = 0 self.bytes_received = 0 self.is_connected = False self.poll_interval = 1 self.debug = False # dump network messages. can be changed at runtime using the console #json self.message_id = 0 self.unanswered_requests = {} self.pending_transactions_for_notifications = [] # parse server self.server = server try: host, port, protocol = self.server.split(':') port = int(port) except: self.server = None return if protocol not in 'ghst': raise BaseException('Unknown protocol: %s' % protocol) self.host = host self.port = port self.protocol = protocol self.use_ssl = (protocol in 'sg') self.proxy = self.parse_proxy_options(self.config.get('proxy')) if self.proxy: self.proxy_mode = proxy_modes.index(self.proxy["mode"]) + 1 def queue_json_response(self, c): # uncomment to debug if self.debug: print_error("<--", c) msg_id = c.get('id') error = c.get('error') if error: print_error("received error:", c) if msg_id is not None: with self.lock: method, params, callback = self.unanswered_requests.pop( msg_id) callback( self, { 'method': method, 'params': params, 'error': error, 'id': msg_id }) return if msg_id is not None: with self.lock: method, params, callback = self.unanswered_requests.pop(msg_id) result = c.get('result') else: # notification method = c.get('method') params = c.get('params') if method == 'blockchain.numblocks.subscribe': result = params[0] params = [] elif method == 'blockchain.headers.subscribe': result = params[0] params = [] elif method == 'blockchain.address.subscribe': addr = params[0] result = params[1] params = [addr] with self.lock: for k, v in self.subscriptions.items(): if (method, params) in v: callback = k break else: print_error("received unexpected notification", method, params) print_error(self.subscriptions) return callback(self, { 'method': method, 'params': params, 'result': result, 'id': msg_id }) def on_version(self, i, result): self.server_version = result def start_http(self): self.session_id = None self.is_connected = True self.connection_msg = ('https' if self.use_ssl else 'http') + '://%s:%d' % (self.host, self.port) try: self.poll() except: print_error("http init session failed") self.is_connected = False return if self.session_id: print_error('http session:', self.session_id) self.is_connected = True else: self.is_connected = False def run_http(self): self.is_connected = True while self.is_connected: try: if self.session_id: self.poll() time.sleep(self.poll_interval) except socket.gaierror: break except socket.error: break except: traceback.print_exc(file=sys.stdout) break self.is_connected = False def poll(self): self.send([], None) def send_http(self, messages, callback): import urllib2, json, time, cookielib print_error("send_http", messages) if self.proxy: socks.setdefaultproxy(self.proxy_mode, self.proxy["host"], int(self.proxy["port"])) socks.wrapmodule(urllib2) cj = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) urllib2.install_opener(opener) t1 = time.time() data = [] for m in messages: method, params = m if type(params) != type([]): params = [params] data.append({ 'method': method, 'id': self.message_id, 'params': params }) self.unanswered_requests[ self.message_id] = method, params, callback self.message_id += 1 if data: data_json = json.dumps(data) else: # poll with GET data_json = None headers = {'content-type': 'application/json'} if self.session_id: headers['cookie'] = 'SESSION=%s' % self.session_id try: req = urllib2.Request(self.connection_msg, data_json, headers) response_stream = urllib2.urlopen(req, timeout=DEFAULT_TIMEOUT) except: return for index, cookie in enumerate(cj): if cookie.name == 'SESSION': self.session_id = cookie.value response = response_stream.read() self.bytes_received += len(response) if response: response = json.loads(response) if type(response) is not type([]): self.queue_json_response(response) else: for item in response: self.queue_json_response(item) if response: self.poll_interval = 1 else: if self.poll_interval < 15: self.poll_interval += 1 #print self.poll_interval, response self.rtime = time.time() - t1 self.is_connected = True def start_tcp(self): self.connection_msg = self.host + ':%d' % self.port if self.proxy is not None: socks.setdefaultproxy(self.proxy_mode, self.proxy["host"], int(self.proxy["port"])) socket.socket = socks.socksocket # prevent dns leaks, see http://stackoverflow.com/questions/13184205/dns-over-proxy def getaddrinfo(*args): return [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))] socket.getaddrinfo = getaddrinfo if self.use_ssl: cert_path = os.path.join(self.config.path, 'certs', self.host) if not os.path.exists(cert_path): is_new = True # get server certificate. # Do not use ssl.get_server_certificate because it does not work with proxy s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.connect((self.host, self.port)) except: # print_error("failed to connect", self.host, self.port) return try: s = ssl.wrap_socket(s, ssl_version=ssl.PROTOCOL_SSLv3, cert_reqs=ssl.CERT_NONE, ca_certs=None) except ssl.SSLError, e: print_error("SSL error:", self.host, e) return dercert = s.getpeercert(True) s.close() cert = ssl.DER_cert_to_PEM_cert(dercert) # workaround android bug cert = re.sub("([^\n])-----END CERTIFICATE-----", "\\1\n-----END CERTIFICATE-----", cert) temporary_path = cert_path + '.temp' with open(temporary_path, "w") as f: f.write(cert) else: is_new = False s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(2) s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) try: s.connect((self.host.encode('ascii'), int(self.port))) except: print_error("failed to connect", self.host, self.port) return if self.use_ssl: try: s = ssl.wrap_socket( s, ssl_version=ssl.PROTOCOL_SSLv3, cert_reqs=ssl.CERT_REQUIRED, ca_certs=(temporary_path if is_new else cert_path), do_handshake_on_connect=True) except ssl.SSLError, e: print_error("SSL error:", self.host, e) if e.errno != 1: return if is_new: os.rename(temporary_path, cert_path + '.rej') else: if cert_has_expired(cert_path): print_error("certificate has expired:", cert_path) os.unlink(cert_path) else: print_msg("wrong certificate", self.host) return except:
# GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import threading, time, Queue, os, sys, shutil from util import user_dir, appdata_dir, print_error, print_msg from bitcoin import * import hashlib import sqlite3 try: from ltc_scrypt import getPoWHash as getPoWScryptHash except ImportError: print_msg("Warning: ltc_scrypt not available, using fallback") from scrypt import scrypt_1024_1_1_80 as getPoWScryptHash try: from groestl_hash import getPoWHash as getPoWGroestlHash except ImportError: print_msg("Warning: groestl_hash not available, please install it") raise class Blockchain(threading.Thread): def __init__(self, config, network): threading.Thread.__init__(self) self.daemon = True
# GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # Credits to wozz for KGW implementation in python. His work is greatly appreciated. from decimal import Decimal import threading, time, Queue, os, sys, shutil from math import pow as dec_pow from util import user_dir, appdata_dir, print_error, print_msg from bitcoin import * try: from vtc_scrypt import getPoWHash except ImportError: print_msg("Warning: vtc_scrypt not available, using fallback") from scrypt import scrypt_2048_1_1_80 as getPoWHash KGW_headers = [{} for x in xrange(4032)] Kimoto_vals = [1 + (0.7084 * dec_pow((Decimal(x+1)/Decimal(144)), -1.228)) for x in xrange(4032)] class Blockchain(threading.Thread): def __init__(self, config, network): threading.Thread.__init__(self) self.daemon = True self.config = config self.network = network self.lock = threading.Lock() self.local_height = 0
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import os import util import threading import bitcoin from bitcoin import * try: from ltc_scrypt import getPoWHash except ImportError: util.print_msg("Warning: ltc_scrypt not available, using fallback") from scrypt import scrypt_1024_1_1_80 as getPoWHash MAX_TARGET = 0x00000FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF def serialize_header(res): s = int_to_hex(res.get('version'), 4) \ + rev_hex(res.get('prev_block_hash')) \ + rev_hex(res.get('merkle_root')) \ + int_to_hex(int(res.get('timestamp')), 4) \ + int_to_hex(int(res.get('bits')), 4) \ + int_to_hex(int(res.get('nonce')), 4) return s