def get_block_by_hash(self, block_hash): url = self.url + '/block/{hash}'.format(hash=block_hash) LOG.info('GET %s' % url) try: r = requests.get(url) data = r.json() except Exception as ex: LOG.error('Unable to get block %s from Blockstream.info: %s' % (block_hash, ex)) return { 'error': 'Unable to get block %s from Blockstream.info' % block_hash } if all(key in data for key in ('height', 'id', 'timestamp', 'merkle_root', 'size')): block = { 'height': data['height'], 'hash': data['id'], 'time': data['timestamp'], 'merkleroot': data['merkle_root'], 'size': data['size'] } # Todo weight? return {'block': block} else: return {'error': 'Received invalid data: %s' % data}
def get_balance(self, address): url = '{api_url}/address/{address}'.format(api_url=self.url, address=address) try: LOG.info('GET %s' % url) r = requests.get(url) data = r.json() except Exception as ex: LOG.error( 'Unable to get balance of address %s from Blocktrail.com: %s' % (address, ex)) return { 'error': 'Unable to get balance of address %s from Blocktrail.com' % address } data = data['data'] if data['data'] is not None else {} if all(key in data for key in ('balance', 'received', 'sent')): balance = { 'final': data['balance'] - data['unconfirmed_received'] + data['unconfirmed_sent'], 'received': data['received'] - data['unconfirmed_received'], 'sent': data['sent'] - data['unconfirmed_sent'] } return {'balance': balance} else: return {'error': 'Received invalid data: %s' % data}
def get_balance(self, address): url = self.url + '/address/{address}'.format(address=address) LOG.info('GET %s' % url) try: r = requests.get(url) data = r.json() except Exception as ex: LOG.error( 'Unable to get address info for %s from Blockstream.info: %s' % (address, ex)) return { 'error': 'Unable to get address info for %s from Blockstream.info' % address } sent_balance = data['chain_stats'][ 'spent_txo_sum'] # Todo fix the sent and received balance because blockstream reports this wrong (also counts when change is sent back to the address itself) received_balance = data['chain_stats']['funded_txo_sum'] final_balance = received_balance - sent_balance balance = { 'final': final_balance, 'received': received_balance, 'sent': sent_balance } return {'balance': balance}
def get_prime_input_address(self, txid): url = '{api_url}/rawtx/{txid}'.format(api_url=self.url, txid=txid) try: LOG.info('GET %s' % url) r = requests.get(url) data = r.json() except Exception as ex: LOG.error( 'Unable to get prime input address of tx %s from Blockchain.info: %s' % (txid, ex)) return { 'error': 'Unable to get prime input address of tx %s from Blockchain.info' % txid } if 'inputs' in data: tx_inputs = data['inputs'] input_addresses = [] for i in range(0, len(tx_inputs)): if 'prev_out' in tx_inputs[ i]: # Coinbase transactions don't have a input address input_addresses.append(tx_inputs[i]['prev_out']['addr']) if len(input_addresses) > 0: prime_input_address = sorted(input_addresses)[0] return {'prime_input_address': prime_input_address} else: # transaction was a coinbase transaction, so there are no input addresses return {'prime_input_address': None} return {'error': 'Received invalid data: %s' % data}
def get_block_by_hash(self, block_hash): url = self.url + '/block/' + block_hash try: LOG.info('GET %s' % url) r = requests.get(url) data = r.json() except Exception as ex: LOG.error('Unable to get block %s from %s: %s' % (block_hash, self.url, ex)) return { 'error': 'Unable to get block %s from %s' % (block_hash, self.url) } block = {} if all(key in data for key in ('height', 'hash', 'time', 'merkleroot', 'size')): block['height'] = data['height'] block['hash'] = data['hash'] block['time'] = data['time'] block['merkleroot'] = data['merkleroot'] block['size'] = data['size'] return {'block': block} else: return {'error': 'Received invalid data: %s' % data}
def process_ipfs_hash(self, ipfs_hash): LOG.info('Retrieving IPFS object') if self.ipfs_object is not None: LOG.info( 'IPFS object given with request, uploading data to local IPFS node to check that hashes are equal' ) local_ipfs_hash = add_json(data=self.ipfs_object) if ipfs_hash != local_ipfs_hash: LOG.error( 'Supplied object does not correspond to the given IPFS hash: %s != %s' % (ipfs_hash, local_ipfs_hash)) return try: data = get_json(cid=ipfs_hash) if isinstance(data, dict): self.json = data elif isinstance(data, str): self.json = simplejson.loads(data) else: raise Exception( 'IPFS hash does not contain a dict or a json string: %s -> %s' % (ipfs_hash, data)) LOG.info('Message contains json data: %s' % self.json) except Exception as ex: LOG.error('IPFS hash does not contain valid json data: %s' % ex) return
def configure(self, **config): super(DeadMansSwitchTrigger, self).configure(**config) if 'timeout' in config and valid_amount(config['timeout']): self.timeout = config['timeout'] if 'warning_email' in config and valid_email(config['warning_email']): self.warning_email = config['warning_email'] if 'phase' in config and valid_phase(config['phase']): self.phase = config['phase'] if 'activation_time' in config and valid_timestamp( config['activation_time']): self.activation_time = config['activation_time'] if 'reset' in config and config['reset'] is True: self.triggered = False self.status = 'Active' # Reset a Dead Man's Switch trigger if needed if self.activation_time is not None and self.timeout is not None and self.phase >= 1: self.activation_time = int(time.time()) + self.timeout self.phase = 1 LOG.info( "Dead Man's Switch %s has been reset, will activate in %s seconds on %s" % (self.id, self.timeout, datetime.fromtimestamp(self.activation_time)))
def get_prime_input_address(self, txid): url = self.url + '/tx/' + str(txid) try: LOG.info('GET %s' % url) r = requests.get(url) data = r.json() except Exception as ex: LOG.error( 'Unable to get prime input address of transaction %s from %s: %s' % (txid, self.url, ex)) return { 'error': 'Unable to get prime input address of transaction %s from %s' % (txid, self.url) } if 'vin' in data: tx_inputs = data['vin'] input_addresses = [] for i in range(0, len(tx_inputs)): input_addresses.append(tx_inputs[i]['addr']) if len(input_addresses) > 0: prime_input_address = sorted(input_addresses)[0] return {'prime_input_address': prime_input_address} return {'error': 'Received invalid data: %s' % data}
def construct_transaction_inputs(self): """ Construct a list of dict object containing the necessary information for the inputs of a transaction :return: A list of dicts containing the following keys for each utxo: 'address', 'value', 'output' and 'confirmations' """ if self.unspent_outputs is not None and len(self.unspent_outputs) > 0: LOG.info('Found %s utxos for address %s' % (len(self.unspent_outputs), self.sending_address)) else: LOG.error('No utxos found for address %s' % self.sending_address) # Construct the transaction inputs tx_inputs = [ { 'address': utxo.address, 'value': utxo.value, 'output': utxo.output, # output needs to be formatted as txid:i 'confirmations': utxo.confirmations } for utxo in self.unspent_outputs ] return tx_inputs
def get_utxos(self, address, confirmations=3): url = self.url + '/addrs/' + address + '/utxo?noCache=1' try: LOG.info('GET %s' % url) r = requests.get(url) data = r.json() except Exception as ex: LOG.error('Unable to get utxos of address %s from %s: %s' % (address, url, ex)) return { 'error': 'Unable to get utxos of address %s from %s' % (address, url) } utxos = [] for output in data: if all(key in output for key in ('confirmations', 'txid', 'vout', 'satoshis', 'scriptPubKey')): utxo = { 'confirmations': output['confirmations'], 'output_hash': output['txid'], 'output_n': output['vout'], 'value': output['satoshis'], 'script': output['scriptPubKey'] } if utxo['confirmations'] >= confirmations: utxos.append(utxo) return { 'utxos': sorted(utxos, key=lambda k: (k['confirmations'], k['output_hash'], k['output_n'])) }
def log_transaction_info(self, tx_inputs, tx_outputs): """ Write information about the transaction in the logs :param tx_inputs: The transaction inputs :param tx_outputs: The transaction outputs """ if self.amount == 0: LOG.info('New %s transaction: sending ALL available funds' % self.transaction_type) else: LOG.info('New %s transaction: sending %s satoshis' % (self.transaction_type, self.amount)) for tx_input in tx_inputs: LOG.info( 'INPUT: %s -> %s (%s)' % (tx_input['address'], tx_input['value'], tx_input['output'])) for tx_output in tx_outputs: LOG.info('OUTPUT: %s -> %s' % (tx_output['address'], tx_output['value'])) if self.op_return_data is not None: LOG.info('OUTPUT: OP_RETURN -> %s' % self.op_return_data)
def get_block_by_hash(self, block_hash): url = '{api_url}/block/{hash}'.format(api_url=self.url, hash=block_hash) try: LOG.info('GET %s' % url) r = requests.get(url) data = r.json() except Exception as ex: LOG.error('Unable to get block %s from Blocktrail.com: %s' % (block_hash, ex)) return { 'error': 'Unable to get block %s from Blocktrail.com' % block_hash } data = data['data'] if data['data'] is not None else {} if all(key in data for key in ('height', 'hash', 'timestamp', 'mrkl_root', 'size')): block = { 'height': data['height'], 'hash': data['hash'], 'time': data['timestamp'], 'merkleroot': data['mrkl_root'], 'size': data['size'] } return {'block': block} else: return {'error': 'Received invalid data: %s' % data}
def get_block_by_height(self, height): url = '{api_url}/block-height/{height}?format=json'.format( api_url=self.url, height=height) try: LOG.info('GET %s' % url) r = requests.get(url) data = r.json() except Exception as ex: LOG.error('Unable to get block %s from Blockchain.info: %s' % (height, ex)) return { 'error': 'Unable to get block %s from Blockchain.info' % height } if 'blocks' in data: blocks = data['blocks'] for i in range(0, len(blocks)): if blocks[i]['main_chain'] is True and blocks[i][ 'height'] == height: block = { 'height': blocks[i]['height'], 'hash': blocks[i]['hash'], 'time': blocks[i]['time'], 'merkleroot': blocks[i]['mrkl_root'], 'size': blocks[i]['size'] } return {'block': block} return {'error': 'Received invalid data: %s' % data}
def configure(self, **config): self.created = datetime.fromtimestamp( config['created']) if 'created' in config else datetime.now() if 'trigger_type' in config and valid_trigger_type( config['trigger_type']): self.trigger_type = config['trigger_type'] if 'script' in config and valid_script(config['script']): self.script = config['script'] if 'data' in config and isinstance(config['data'], dict): self.data = config['data'] if 'multi' in config and config['multi'] in [True, False]: self.multi = config['multi'] if 'status' in config and valid_status(config['status']): self.status = config['status'] if 'reset' in config and config['reset'] is True: self.triggered = 0 self.status = 'Active' elif 'triggered' in config and valid_amount(config['triggered']): self.triggered = config['triggered'] if 'description' in config and valid_description( config['description']): self.description = config['description'] if 'creator_name' in config and valid_creator(config['creator_name']): self.creator_name = config['creator_name'] if 'creator_email' in config and valid_email(config['creator_email']): self.creator_email = config['creator_email'] if 'youtube' in config and valid_youtube_id(config['youtube']): self.youtube = config['youtube'] if 'visibility' in config and valid_visibility(config['visibility']): self.visibility = config['visibility'] if 'actions' in config and valid_actions(config['actions']): self.actions = config['actions'] configured_actions = get_actions() for action_id in self.actions: if action_id not in configured_actions: LOG.warning('Trigger %s contains unknown action: %s' % (self.id, action_id)) if 'self_destruct' in config and valid_timestamp( config['self_destruct']): self.self_destruct = config['self_destruct'] if 'destruct_actions' in config and config['destruct_actions'] in [ True, False ]: self.destruct_actions = config['destruct_actions']
def activate(self): super(RecurringTrigger, self).activate() if self.end_time is None or self.next_activation + self.interval <= self.end_time: self.next_activation += self.interval # Todo what if trigger was activated after interval has passed?? LOG.info('Setting next activation of recurring trigger %s to %s' % (self.id, datetime.fromtimestamp(self.next_activation))) self.save()
def check_ipfs(): if IPFS_API is not None: return True else: LOG.warn('Not connected to IPFS, trying to reconnect') connect_to_ipfs() return IPFS_API is not None
def decorated_function(*args, **kwargs): start_time = time.time() output = f(*args, **kwargs) end_time = time.time() LOG.info('Script runtime: %s seconds' % (end_time - start_time)) return output
def exit_with_error(self, message): """ Log an error message and set the http response with the same error message :param message: The error message """ LOG.error(message) self.http_response = {'error': message}
def __exit__(self, exc_type, exc_val, exc_tb): # Make sure data is committed to the database if self.commit is True: LOG.info('Committing data') self.cnx.commit() LOG.info('Closing mysql cursor') self.cursor.close() self.cnx.close()
def push_tx(tx): # Must do import here to avoid circular import from data.data import get_explorer_api LOG.warning( 'BTC.com api does not support broadcasting transactions, using Blockchain.info instead!' ) blockchain_info_api = get_explorer_api('blockchain.info') return blockchain_info_api.push_tx(tx)
def add_file(filename): global IPFS_API try: ipfs_info = IPFS_API.add(filename) except Exception as e: LOG.error('Unable to store file on IPFS: %s' % e) raise Exception('IPFS failure') return ipfs_info['Hash'], ipfs_info['Name'], ipfs_info['Size']
def add_str(string): global IPFS_API try: cid = IPFS_API.add_str(string=string) except Exception as e: LOG.error('Unable to store string on IPFS: %s' % e) raise Exception('IPFS failure') return CID(cid).__str__()
def run(self): """ Run the action :return: True upon success, False upon failure """ LOG.info('Allowing reveal of RevealSecret action %s' % self.id) self.allow_reveal = True self.save() return True
def __enter__(self): LOG.info('Creating mysql cursor to database %s @ %s:%s' % (self.database, self.host, self.port)) self.cnx = mysql.connector.connect(user=self.user, password=self.password, database=self.database, host=self.host, port=self.port) self.cursor = self.cnx.cursor() return self.cursor
def get_transaction(self, txid): url = '{api_url}/tx/{txid}?verbose=3'.format(api_url=self.url, txid=txid) try: LOG.info('GET %s' % url) r = requests.get(url) data = r.json() except Exception as ex: LOG.error('Unable to get transaction %s from BTC.com: %s' % (txid, ex)) return { 'error': 'Unable to get transaction %s from BTC.com' % txid } data = data['data'] if data['data'] is not None else {} # todo check key names , test by setting testnet wrong on explorers tx = TX() tx.txid = txid tx.wtxid = data['witness_hash'] tx.lock_time = data['lock_time'] tx.block_height = data[ 'block_height'] if 'block_height' in data and data[ 'block_height'] != -1 else None tx.confirmations = data[ 'confirmations'] if 'confirmations' in data else None for item in data['inputs']: tx_input = TxInput() tx_input.address = item['prev_addresses'][0] if len( item['prev_addresses']) > 0 else None tx_input.value = item['prev_value'] tx_input.txid = item['prev_tx_hash'] tx_input.n = item['prev_position'] if item[ 'prev_position'] is not -1 else None tx_input.script = item['script_hex'] tx_input.sequence = item['sequence'] tx.inputs.append(tx_input) for i, item in enumerate(data['outputs']): tx_output = TxOutput() tx_output.address = item['addresses'][0] if len( item['addresses']) > 0 else None tx_output.value = item['value'] tx_output.n = i tx_output.spent = False if item['spent_by_tx'] is None else True tx_output.script = item['script_hex'] if item['script_hex'][:2] == '6a': tx_output.op_return = tx.decode_op_return(item['script_hex']) tx.outputs.append(tx_output) return {'transaction': tx.json_encodable()}
def get_utxos(self, address, confirmations=3): url = self.url + '/blocks/tip/height' LOG.info('GET %s' % url) try: r = requests.get(url) latest_block_height = int(r.text) except Exception as ex: LOG.error( 'Unable to get latest block_height from Blockstream.info: %s' % ex) return { 'error': 'Unable to get latest block_height from Blockstream.info' } url = self.url + '/address/{address}/utxo'.format(address=address) LOG.info('GET %s' % url) try: r = requests.get(url) data = r.json() except Exception as ex: LOG.error( 'Unable to get address utxos for %s from Blockstream.info: %s' % (address, ex)) return { 'error': 'Unable to get utxos info for %s from Blockstream.info' % address } LOG.info('Got %s utxos' % len(data)) utxos = [] for output in data: confirmations = latest_block_height - int( output['status']['block_height'] ) + 1 if output['status']['confirmed'] is True else 0 utxo = { 'confirmations': confirmations, 'output_hash': output['txid'], 'output_n': output['vout'], 'value': output['value'], 'script': None } # Blockstream.info does not provide the script for utxos if utxo['confirmations'] >= confirmations: utxos.append(utxo) return { 'utxos': sorted(utxos, key=lambda k: (k['confirmations'], k['output_hash'], k['output_n'])) }
def get_transaction(self, txid): url = self.url + '/tx/' + str(txid) try: LOG.info('GET %s' % url) r = requests.get(url) data = r.json() except Exception as ex: LOG.error('Unable to get transaction %s from %s: %s' % (txid, self.url, ex)) return { 'error': 'Unable to get transaction %s from %s' % (txid, self.url) } tx = TX() tx.txid = txid tx.block_height = data['blockheight'] if 'blockheight' in data else None tx.lock_time = data['locktime'] for item in data['vin']: tx_input = TxInput() tx_input.address = item['addr'] if 'addr' in item else None tx_input.value = item['valueSat'] if 'valueSat' in item else 0 tx_input.txid = item['txid'] if 'txid' in item else None tx_input.n = item['n'] if 'coinbase' not in item else None tx_input.script = item['scriptSig'][ 'hex'] if 'scriptSig' in item else None if 'coinbase' in item: tx_input.script = item['coinbase'] tx_input.sequence = item['sequence'] tx.inputs.append(tx_input) for item in data['vout']: tx_output = TxOutput() tx_output.address = item['scriptPubKey']['addresses'][ 0] if 'addresses' in item['scriptPubKey'] else None tx_output.value = int(float(item['value']) * 1e8) tx_output.n = item['n'] tx_output.spent = True if 'spentTxId' in item and item[ 'spentTxId'] is not None else False tx_output.script = item['scriptPubKey']['hex'] if item['scriptPubKey']['hex'][:2] == '6a': tx_output.op_return = tx.decode_op_return( item['scriptPubKey']['hex']) tx.outputs.append(tx_output) tx.confirmations = data[ 'confirmations'] if 'confirmations' in data else None return {'transaction': tx.json_encodable()}
def load_script(self): if self.script is not None: if not valid_script(self.script): return script_name = self.script[: -3] # script name without the .py extension script_path = None script_module_name = None # Search for the script in the allowed root directories for root_dir in ['spellbookscripts', 'apps']: if os.path.isfile(os.path.join(root_dir, self.script)): script_path = os.path.join(root_dir, self.script) if platform.system() == 'Windows': script_module_name = '%s.%s' % ( root_dir, script_name.replace('\\', '.')) elif platform.system() == 'Linux': script_module_name = '%s.%s' % ( root_dir, script_name.replace('/', '.')) else: raise NotImplementedError( 'Unsupported platform: only windows and linux are supported' ) if script_path is None: LOG.error('Can not find spellbook script %s' % self.script) return LOG.info('Loading Spellbook Script %s' % script_path) LOG.info('Script module: %s (%s)' % (script_module_name, type(script_module_name))) try: script_module = importlib.import_module(script_module_name) except Exception as ex: LOG.error('Failed to load Spellbook Script %s: %s' % (script_path, ex)) return script_class_name = os.path.basename(script_path)[:-3] spellbook_script = getattr(script_module, script_class_name) kwargs = self.get_script_variables() script = spellbook_script(**kwargs) if not isinstance(script, SpellbookScript): LOG.error( 'Script %s is not a valid Spellbook Script, instead it is a %s' % (self.script, type(script))) return return script
def check_triggers(trigger_id=None): # Get a list of all trigger_ids that are configured triggers = get_triggers() # If a trigger_id is given, only check that specific trigger if trigger_id is not None and trigger_id in triggers: triggers = [trigger_id] elif trigger_id is not None and trigger_id not in triggers: return {'error': 'Unknown trigger id: %s' % trigger_id} for trigger_id in triggers: trigger = get_trigger(trigger_id=trigger_id) if trigger.status == 'Active': LOG.info('Checking conditions of trigger %s' % trigger_id) if trigger.conditions_fulfilled() is True: trigger.activate() if trigger.self_destruct is not None: if trigger.self_destruct <= int(time.time()): LOG.info('Trigger %s has reached its self-destruct time' % trigger_id) # Also destruct any attached actions if needed if trigger.destruct_actions is True: for action_id in trigger.actions: LOG.info('Deleting action %s' % action_id) delete_action(action_id=action_id) LOG.info('Deleting trigger %s' % trigger_id) delete_trigger(trigger_id=trigger_id) continue
def conditions_fulfilled(self): if self.interval is None or self.begin_time is None: return False if self.end_time is None: return self.next_activation <= int(time.time()) elif self.end_time <= int(time.time()): LOG.info('Recurring trigger %s has reached its end time' % self.id) self.status = 'Succeeded' self.save() return False return self.next_activation <= int(time.time()) <= self.end_time