def setup_json_state_file(location: str, notes_folder: str) -> None: """ The main orchestrator of the state file mechanics. This method must be idempotent. Args: location (str): The relative or absolute location of the folder that contains the JSON state file notes_folder (str): The relative or absolute location of the folder that contains all of your markdown ntoes """ state_file: dict = util.read_existing_json_state_file(location=location) now: datetime = datetime.utcnow() now_str: str = now.strftime(DATE_TIME_FORMAT) # record current script runtime state_file['runtime'] = now_str # ensure that the files section of the state file exists if 'files' not in state_file: state_file['files'] = {} # ensure that file data is up to date for file_name_ in os.listdir(notes_folder): if not util.is_md(file_name_): continue file_path: str = util.path(notes_folder, file_name_) key: str = util.strip_file_extension(file_name_) # if it's a new file, populate the metadata if key not in state_file['files']: logger.info(f'adding new key in files: {key}') state_file['files'][key]: dict = {} state_file['files'][key]['sha256']: str = util.sha256(file_path) state_file['files'][key]['last_checked']: str = now_str # we are done processing this file, move to the next one continue # if the file was modified since we last checked it (which we know # has happened if the hash has changed) then update its info current_file_hash: str = util.sha256(file_path) if current_file_hash != state_file['files'][key]['sha256']: logger.info(f'updating changed key: {key}') state_file['files'][key]['sha256']: str = current_file_hash state_file['files'][key]['last_checked']: str = now_str # save the new state of the JSON file to disk so that we can use it # the next time the script is run util.persist_json(state_file, location)
def parse_Transaction(vds, has_nTime=False): d = {} # prepare clean, non-segwit transaction d['__data_truncated__'] = "" start_pos = vds.read_cursor pos_current = start_pos d['version'] = vds.read_int32() d['__data_truncated__'] += vds.input[pos_current:vds.read_cursor] if has_nTime: d['nTime'] = vds.read_uint32() # check if optional flag is present segwit = ord(vds.input[vds.read_cursor]) == 0 and ord(vds.input[vds.read_cursor+1]) == 1 if segwit: vds.read_cursor += 2 pos_current = vds.read_cursor n_vin = vds.read_compact_size() d['txIn'] = [] for _ in xrange(n_vin): d['txIn'].append(parse_TxIn(vds)) n_vout = vds.read_compact_size() d['txOut'] = [] for _ in xrange(n_vout): d['txOut'].append(parse_TxOut(vds)) d['__data_truncated__'] += vds.input[pos_current:vds.read_cursor] d['segwit_stack'] = [] if segwit: # If segwit flag is present, each txin is associated with a witness field for _ in xrange(n_vin): # read stack size for current txin, might be 0 stack_items = vds.read_compact_size() for _ in xrange(stack_items): d['segwit_stack'].append(parse_segwit(vds)) pos_current = vds.read_cursor d['lockTime'] = vds.read_uint32() d['__data_truncated__'] += vds.input[pos_current:vds.read_cursor] d['__data__'] = vds.input[start_pos:vds.read_cursor] # Print readable hash, reversed endian: # print "hash plain:", sha256(sha256(d['__data_truncated__']))[::-1].encode('hex') d['hash_truncated'] = sha256(sha256(d['__data_truncated__'])) return d
def new(self): ecdsaPrivkey = ecdsa.SigningKey.generate(curve=ecdsa.curves.SECP256k1) ecdsaPubkey = ecdsaPrivkey.get_verifying_key() rawPrivkey = ecdsaPrivkey.to_string() rawPubkey = "\x00" + util.ripemd160(util.sha256("\x04" + ecdsaPubkey.to_string())) pubkeyChecksum = util.sha256(util.sha256(rawPubkey))[:4] rawPubkey += pubkeyChecksum pubkey = util.b58encode(rawPubkey) privkey = "\x80" + rawPrivkey privkeyChecksum = util.sha256(util.sha256(privkey))[:4] privkey = util.b58encode(privkey + privkeyChecksum) return self(pubkey, privkey, rawPubkey, rawPrivkey)
def merkle(self, hashlist): if len(hashlist) == 0: return 0 elif len(hashlist) == 1: return hashlist[0] else: new_hashlist = [] for i in range(0, len(hashlist) - 1, 2): new_hashlist.append( sha256( bytes.fromhex(hashlist[i]) + bytes.fromhex(hashlist[i + 1]))) if len(hashlist) % 2 == 1: hashlist.append(sha256(hashlist[-1] + hashlist[-1])) return self.merkle(new_hashlist)
def parse(self, r): if self.error: return self.id = bh2u(util.sha256(r)[0:16]) try: self.data = pb2.PaymentRequest() self.data.ParseFromString(r) except: self.error = "cannot parse payment request" return self.details = pb2.PaymentDetails() self.details.ParseFromString(self.data.serialized_payment_details) if self.details.network == 'test': NetworkConstants.set_testnet() elif self.details.network == 'main': NetworkConstants.set_mainnet() else: self.error = "unknown network " + self.details.network return self.outputs = [] for o in self.details.outputs: out_type, addr = util.get_address_from_output_script(o.script) self.outputs.append((out_type, addr, o.amount)) self.memo = self.details.memo self.payment_url = self.details.payment_url
def perform_flush(self): if not self.is_loaded: return self.dirty = False for child in self.children: child.flush() data = self.to_data() block_id = sha256(self.forest.storage.block_id_key, *data) return self.set_block(block_id, data)
def get_hash(self): data = { "signature": self.signature, "body": { "public_key": self.public_key, "inflows": [inflow.serialize() for inflow in self.inflows], "outflows": [outflow.serialize() for outflow in self.outflows] } } return sha256(json.dumps(data).encode("ascii"))
def sha256(sha, message, name=None): """Verifies the hash matches the SHA256'd message. Args: sha (str): A SHA256 hash result. message (str): Message to hash and compare to. """ if not sha == util.sha256(message): raise Exception('SHA256 does not match message' if name is None else 'SHA256 of %s does not match hash' % name)
def perform_flush(self, *, in_inode=True): assert self.block_data is not None bd = (self.entry_type, self.block_data) bid = sha256(self.forest.storage.block_id_key, *bd) if self.block_id == bid: return self.forest.storage.refer_or_store_block(bid, bd) self.block_id = bid if in_inode: self.forest.storage.release_block(bid) # we SHOULD be fine, as we are INode.node # -> block_id does not disappear even in refcnt 0 immediately. del self.block_data return True
def send_message(self, message, server): """ sends a message to the server """ print "UA: I want to send %r to server" % message nonce = self.get_nonce(server) message_hash = util.sha256(message) print "UA: generating sha256 of message %r: %s" % (message, message_hash) print "UA: asking TPM for attestation of me with additional info (%s, %d)" % (message_hash, nonce) auth_message, signature = self.tpm.attest(self, message_hash, nonce) print "UA: got auth_message and signature" self.send_to_server(server, message, auth_message, signature)
def rsa(public_key, signature, message): """Verifies an RSA signature. Args: public_key (str): Public key with BEGIN and END sections. signature (str): Hex value of the signature with its leading 0x stripped. message (str): Message that was signed, unhashed. """ try: public_rsa = load_pem_public_key(bytes(public_key), backend=default_backend()) hashed = util.sha256(message) public_rsa.verify( binascii.unhexlify(signature), hashed, padding.PSS(mgf=padding.MGF1(hashes.SHA256()), salt_length=padding.PSS.MAX_LENGTH), hashes.SHA256()) except InvalidSignature: raise Exception('Invalid signature')
def get_hash(self): return sha256( bytes.fromhex(self.prev_block) + bytes.fromhex(self.magic_num) + bytes.fromhex(self.merkleroot))
def transaction_hash(chain, binary_tx): return util.sha256(binary_tx)
def evaluate(commands, z, witness): cmds = commands[:] # create a copy as we may need to add to this list if we have a redeem_script stack = [] altstack = [] while len(cmds) > 0: cmd = cmds.pop(0) if type(cmd) == int: # do what the opcode says operation = op.OP_CODE_FUNCTIONS[cmd] if cmd in (99, 100): # op_if/op_notif require the cmds array if not operation(stack, cmds): LOGGER.info('bad op: {}'.format(op.OP_CODE_NAMES[cmd])) return False elif cmd in (107, 108): # op_toaltstack/op_fromaltstack require the altstack if not operation(stack, altstack): LOGGER.info('bad op: {}'.format(op.OP_CODE_NAMES[cmd])) return False elif cmd in (172, 173, 174, 175): # these are signing operations, they need a sig_hash # to check against if not operation(stack, z): LOGGER.info('bad op: {}'.format(op.OP_CODE_NAMES[cmd])) return False else: if not operation(stack): LOGGER.info('bad op: {}'.format(op.OP_CODE_NAMES[cmd])) return False else: # add the cmd to the stack stack.append(cmd) # p2sh rule. if the next three cmds are: # OP_HASH160 <20 byte hash> OP_EQUAL this is the RedeemScript # OP_HASH160 == 0xa9 and OP_EQUAL == 0x87 if len(cmds) == 3 and cmds[0] == 0xa9 \ and type(cmds[1]) == bytes and len(cmds[1]) == 20 \ and cmds[2] == 0x87: # we execute the next three opcodes cmds.pop() h160 = cmds.pop() cmds.pop() if not op.op_hash160(stack): return False stack.append(h160) if not op.op_equal(stack): return False # final result should be a 1 if not op.op_verify(stack): LOGGER.info('bad p2sh h160') return False raw_redeem_script = util.encode_varint(len(cmd)) + cmd _, redeem_script = parse(BytesIO(raw_redeem_script)) cmds.extend(redeem_script) # witness program version 0 rule. if stack cmds are [0 <20 byte hash>] this is p2wpkh if len(stack) == 2 and stack[0] == b'' and len(stack[1]) == 20: h160 = stack.pop() stack.pop() cmds.extend(witness) cmds.extend(p2pkh_script(h160)) # witness program version 0 rule. if stack cmds are:[0 <32 byte hash>] this is p2wsh if len(stack) == 2 and stack[0] == b'' and len(stack[1]) == 32: s256 = stack.pop() stack.pop() cmds.extend(witness[:-1]) raw_witness_script = witness[-1] if s256 != util.sha256(raw_witness_script): print('bad sha256 {} vs {}'.format( s256.hex(), util.sha256(raw_witness_script).hex())) return False stream = BytesIO( util.encode_varint(len(raw_witness_script)) + raw_witness_script) cmds.extend(parse(stream)) if len(stack) == 0: return False if stack.pop() == b'': return False return True