def base58_encode(a,version='',postfix=''): """ Base58 encode input Mostly ripped from: https://github.com/jgarzik/python-bitcoinlib/blob/master/bitcoin/base58.py """ try: a = hexlify(unhexlify(a)) version = hexlify(unhexlify(version)) postfix = hexlify(unhexlify(postfix)) except: raise Exception('base58_encode() Invalid input') a, version, postfix = hex_to_hexstr(a), hex_to_hexstr(version), hex_to_hexstr(postfix) b = version + a + postfix b58_digits = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' n1 = int(b,16) res = [] while n1 > 0: n1, r = divmod(n1,58) res.append(b58_digits[r]) res = ''.join(res[::-1]) pad = 0 for i in range(len(b) // 2): j = int(2*i) teststr = str(b[j] + b[j+1]) if teststr == '00': pad += 1 else: break return str(b58_digits[0] * pad + res)
def _main_cli(args, out, encoding='utf-8'): import binascii def show_public(public_key): rawp = public_key.serialize() out.write(u"Public key: {}\n".format( binascii.hexlify(rawp).decode(encoding))) def sign(funcname, params): raw = bytes(bytearray.fromhex(params.private_key)) priv = PrivateKey(raw) func = getattr(priv, funcname) sig = func(params.message) return priv, sig if args.action == 'privkey': if args.private_key: rawkey = bytes(bytearray.fromhex(args.private_key)) else: rawkey = None priv = PrivateKey(rawkey) raw = priv.private_key out.write(u"{}\n".format(binascii.hexlify(raw).decode(encoding))) if args.show_pubkey: show_public(priv.pubkey) elif args.action == 'sign': priv, sig_raw = sign('ecdsa_sign', args) sig = priv.ecdsa_serialize(sig_raw) out.write(u"{}\n".format(binascii.hexlify(sig).decode(encoding))) if args.show_pubkey: show_public(priv.pubkey) elif args.action == 'checksig': raw = bytes(bytearray.fromhex(args.public_key)) sig = bytes(bytearray.fromhex(args.signature)) pub = PublicKey(raw, raw=True) try: sig_raw = pub.ecdsa_deserialize(sig) good = pub.ecdsa_verify(args.message, sig_raw) except: good = False out.write(u"{}\n".format(good)) return 0 if good else 1 elif args.action == 'signrec': priv, sig = sign('ecdsa_sign_recoverable', args) sig, recid = priv.ecdsa_recoverable_serialize(sig) out.write(u"{} {}\n".format(binascii.hexlify(sig).decode(encoding), recid)) if args.show_pubkey: show_public(priv.pubkey) elif args.action == 'recpub': empty = PublicKey(flags=ALL_FLAGS) sig_raw = bytes(bytearray.fromhex(args.signature)) sig = empty.ecdsa_recoverable_deserialize(sig_raw, args.recid) pubkey = empty.ecdsa_recover(args.message, sig) show_public(PublicKey(pubkey)) return 0
def init_predownloaded(self, mimetype, filedata): if DEBUG: log(self.log_prefix + 'init_predownloaded: mimetype', mimetype, 'len', len(filedata)) if self.dldoneflag.is_set(): if DEBUG: log(self.log_prefix + 'init_predownloaded: done flag is set, exit') return ext = self.guess_extension_from_mimetype(mimetype) filename = binascii.hexlify(self.dlhash) if len(ext): filename += '.' + ext content_length = len(filedata) self.fileinfo['filename'] = filename self.fileinfo['size'] = content_length self.fileinfo['mimetype'] = mimetype temp_dir = os.path.join(self.config['buffer_dir'], binascii.hexlify(self.dlhash)) if not os.path.isdir(temp_dir): os.mkdir(temp_dir) self.storage = Storage(self.dlhash, self.config, self.fileinfo, temp_dir, None, self.finished_callback, filedata=filedata) self.downloader.set_storage(self.storage) self.finished_callback() if self.dlmode == DLMODE_VOD: if DEBUG: log(self.log_prefix + 'init_predownloaded: starting in vod mode, but download is finished: fileinfo', self.fileinfo) self.vodeventcallback(self.fileinfo, VODEVENT_START, {'complete': True, 'filename': self.storage.get_dest_path(), 'mimetype': self.fileinfo['mimetype'], 'stream': None, 'length': self.storage.get_content_length(), 'bitrate': self.fileinfo['bitrate']})
def accountToJSON(account): for address, subaccount in account.iteritems(): subaccount['public_key'] = binascii.hexlify(subaccount['public_key']) subaccount['private_key'] = binascii.hexlify(subaccount['private_key']) for txins in subaccount['received']: txins['scriptPubKey'] = binascii.hexlify(txins['scriptPubKey']) return account
def display_packet(packet, header=""): if PACKET_OUTPUT: print '%s Packet info: ' % header print '\tFrom: ' + hexlify(packet[0:6]) print '\tTo: ' + hexlify(packet[6:12]) print '\tType: ' + hexlify(packet[12:14]) print '\tContent: ' + hexlify(packet[14:])
def validate_backend(try_backend): import binascii from xpra.os_util import strtobytes try_backend.init() message = b"some message1234" password = "******" key_salt = DEFAULT_SALT iterations = DEFAULT_ITERATIONS block_size = DEFAULT_BLOCKSIZE key = try_backend.get_key(password, key_salt, block_size, iterations) log("validate_backend(%s) key=%s", try_backend, binascii.hexlify(key)) assert key is not None, "backend %s failed to generate a key" % try_backend enc = try_backend.get_encryptor(key, DEFAULT_IV) log("validate_backend(%s) encryptor=%s", try_backend, enc) assert enc is not None, "backend %s failed to generate an encryptor" % enc dec = try_backend.get_decryptor(key, DEFAULT_IV) log("validate_backend(%s) decryptor=%s", try_backend, dec) assert dec is not None, "backend %s failed to generate a decryptor" % enc ev = enc.encrypt(message) evs = binascii.hexlify(strtobytes(ev)) log("validate_backend(%s) encrypted(%s)=%s", try_backend, message, evs) dv = dec.decrypt(ev) log("validate_backend(%s) decrypted(%s)=%s", try_backend, evs, dv) assert dv==message log("validate_backend(%s) passed", try_backend)
def __init__(self, main_url, download_url, dlhash, config, multihandler, fileinfo, resumedata, vodeventcallback, set_error_func, finished_func, failed_func): self.main_url = main_url self.download_url = download_url self.dlhash = dlhash self.config = config self.dlmode = config['mode'] self.fileinfo = fileinfo self.vodeventcallback = vodeventcallback self.set_error_func = set_error_func self.finished_func = finished_func self.failed_func = failed_func self.download_id = binascii.hexlify(self.dlhash) + '-' + str(long(time.time())) + '-' + str(random.randint(0, 100000)) self.dldoneflag = Event() self.rawserver = multihandler.newRawServer(dlhash, self.dldoneflag) if download_url is not None: url = download_url else: url = main_url self.downloader = Downloader(url, dlhash, self.rawserver, self.failed) self.voddownload = None self.storage = None self.log_prefix = 'dd::' + binascii.hexlify(self.dlhash) + ':' predownload = self.config.get('predownload', False) if DEBUG: log(self.log_prefix + '__init__: predownload', predownload) if resumedata is None and predownload: self.downloader.predownload(self.init_predownloaded) else: callback = lambda content_length, mimetype: self.init(resumedata, content_length, mimetype) self.downloader.init(callback)
def __str__(self): st=""" tcp_sport="%u" tcp_dport="%u" """+\ """ tcp_seq="%u" tcp_ack="%u" tcp_offset="%u" """+\ """ tcp_flags="%u" """+\ """ tcp_window="%u" tcp_csum="%u" tcp_urgptr="%u" """ st = st % \ (self.sport,self.dport, self.seq,self.ack,self.b1,self.b2, self.window,self.checksum,self.urgent) if len(self.opt)>0: st_opt = """ tcp_optnum="%u" """ % len(self.opt) i=0 for (c,l,v) in self.opt: st_opt = st_opt + \ """ tcp_optcode="%u" tcp_optlen="%u" tcp_optpayload="%s" """ st_opt = st_opt % \ (c,l,hexlify(v)) i = i+1 else: st_opt="" # The payload st = st+st_opt+""" tcp_payload="%s" """ % hexlify(self.payload) return st
def decodePNG(fileName): print 'Decoding PNG' image = open(fileName, 'rb') sigHex = binascii.hexlify(image.read(8)) if int(sigHex, 16) != PNG_SIGNATURE: print('ERROR: Only PNG Format is supported. Try a PNG formated image.') return chunkType = "Game On" refImage = [] idata = '' while chunkType != 'IEND': chunkSize = int(binascii.hexlify(image.read(4)), 16) chunkType = image.read(4) chunk = image.read(chunkSize) chunkCRC = image.read(4) if chunkType == "IHDR": refImage, colorType, bitDepth = createRefImage(chunk) if colorType != PNG_COLOR_TYPE_RGB: print('ERROR: Unsuported PNG Color Type: ' + str(colorType) + ' Try another PNG image.') return elif chunkType == "IDAT": idata += chunk fillRefImage(refImage, idata, bitDepth) image.close() print 'Done' return refImage
def receive(self, messagedata: bytes): """ Handle an UDP packet. """ # pylint: disable=unidiomatic-typecheck if len(messagedata) > self.UDP_MAX_MESSAGE_SIZE: log.error( 'INVALID MESSAGE: Packet larger than maximum size', node=pex(self.raiden.address), message=hexlify(messagedata), length=len(messagedata), ) return message = decode(messagedata) if type(message) == Pong: self.receive_pong(message) elif type(message) == Ping: self.receive_ping(message) elif type(message) == Delivered: self.receive_delivered(message) elif message is not None: self.receive_message(message) else: log.error( 'INVALID MESSAGE: Unknown cmdid', node=pex(self.raiden.address), message=hexlify(messagedata), )
def write(self, page, data): """Send a WRITE command to store data on the tag. The *page* argument specifies the offset in multiples of 4 bytes. The *data* argument must be a string or bytearray of length 4. Command execution errors raise :exc:`Type2TagCommandError`. """ if len(data) != 4: raise ValueError("data must be a four byte string or array") log.debug("write {0} to page {1}".format(hexlify(data), page)) rsp = self.transceive("\xA2" + chr(page % 256) + data) if len(rsp) != 1: log.debug("invalid response " + hexlify(data)) raise Type2TagCommandError(INVALID_RESPONSE_ERROR) if rsp[0] != 0x0A: # NAK log.debug("invalid page, received nak") raise Type2TagCommandError(INVALID_PAGE_ERROR) return True
def add_ms_chap_info(self, eaptype, challenge=None, response=None, identity=None): """ Adds information to the internal "mschap" list which contains dictionaries for each set with keys of: 't' eap type (integer) 'c' challenge (packed binary string) 'r' response (packed binary string) 'i' identity (string) Challenge and Response strings are packed binary, NOT 00:00:00:00:00:00:00:00 or 0000000000000000 """ if not identity: identity = 'UNKNOWN' if challenge: challenge = hexlify(challenge) challenge = ":".join([challenge[y:y+2] for y in range(0, len(challenge), 2)]) self.mschap.append({'t':eaptype, 'c':challenge, 'i':identity}) if response and len(self.mschap): # Adding a response string, but checking at least one challenge string exists. response = hexlify(response) response = ":".join([response[y:y+2] for y in range(0, len(response), 2)]) for value in self.mschap: if not 'r' in value: continue if response == value['r'] and identity == value['i']: return respObj = self.mschap[len(self.mschap) - 1] # Get the last response dictionary object if identity and identity != respObj['i']: return 1 if not 'r' in respObj: respObj['r'] = response else: return 2
def i2c_readprint(device, address, expectedRECVlength, data): # Com. format is : Read/^Write - Device Address - Expected # of Bytes to Receive - Data # Device Address is proper address from device datasheet, not R/W adjusted address # for reads, data byte(s) is irrelevent length = len(data) s = chr(1) + chr(address) + chr(length) + chr(expectedRECVlength) for i in data: s = s + chr(i) device.write(s) # first returned value is the number of data bytes that should be received index = 0 count = int(binascii.hexlify((device.read(1)).strip("\ x"))) returnedData = [] # copy all bytes into an array while (index < count): returnedData.append(device.read(1)) index = index + 1 print "\n76543210 - Hex- Int - Read from DeviceAddress:", hex(address), "SubAddress:", hex(data[0]), \ "Length:", "%02d Data:" % int(expectedRECVlength), (returnedData) for i in range(len(returnedData)): print baseconvert.show_base(int(binascii.hexlify((returnedData[i]).strip("\ x")), 16), 2, 8), \ "-", binascii.hexlify((returnedData[i]).strip("\ x")), "-", \ '%03d' % int(binascii.hexlify((returnedData[i]).strip("\ x")), 16)
def sendUartOld(inArg): ddata = "" inCrc = [0, 0] inCrcRaw = calcCrc(len(inArg), inArg) inCrc[0] = inCrcRaw&0xff inCrc[1] = inCrcRaw>>8 ser.write(inArg) ser.write(inCrc) time.sleep(0.2) buf_len = ser.inWaiting() cnt1=9999 while buf_len>0: if (cnt1==9999): print " 0 1 2 3 4 5 6 7 8 9 a b c d e f" cnt1 = 0 else: if (buf_len>16): ddata = ser.read(16) print "0x%02x" % cnt1, binascii.hexlify(ddata) , ddata buf_len = buf_len - 16; else: ddata = ser.read(buf_len) print "0x%02x" % cnt1, binascii.hexlify(ddata) , ddata buf_len = 0; cnt1 = cnt1 + 16; time.sleep(0.1) return
def _serial_send(self, command, ack=True, max_attempts=5): # Send a serial command and automatically handle if it needs to be resent # because of a bus error. If ack is True then an ackowledgement is # expected and only up to the maximum specified attempts will be made # to get a good acknowledgement (default is 5). If ack is False then # no acknowledgement is expected (like when resetting the device). attempts = 0 while True: # Flush any pending received data to get into a clean state. self._serial.flushInput() # Send the data. self._serial.write(command) logger.debug('Serial send: 0x{0}'.format(binascii.hexlify(command))) # Stop if no acknowledgment is expected. if not ack: return # Read acknowledgement response (2 bytes). resp = bytearray(self._serial.read(2)) logger.debug('Serial receive: 0x{0}'.format(binascii.hexlify(resp))) if resp is None or len(resp) != 2: raise RuntimeError('Timeout waiting for serial acknowledge, is the BNO055 connected?') # Stop if there's no bus error (0xEE07 response) and return response bytes. if not (resp[0] == 0xEE and resp[1] == 0x07): return resp # Else there was a bus error so resend, as recommended in UART app # note at: # http://ae-bst.resource.bosch.com/media/products/dokumente/bno055/BST-BNO055-AN012-00.pdf attempts += 1 if attempts >= max_attempts: raise RuntimeError('Exceeded maximum attempts to acknowledge serial command without bus error!')
def main(): test_vectors = { 'Short': 'abc', 'Medium': 'abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu', 'Long': 'a' * 1000000 } hash_funcs = {'SHAKE128': sha3.SHAKE128, 'SHAKE256': sha3.SHAKE256} output_lens = [31, 63] for hash_name, hash_func in hash_funcs.items(): for vector_name, vector_value in test_vectors.items(): for output_len in output_lens: print('%s with %s UTF-8 Input and %d bit Output: %s' % ( hash_name, vector_name, output_len * 8, binascii.hexlify(hash_func(vector_value.encode(), output_len)).decode() ) ) print('%s with %s UTF-16BE Input and %d bit Output: %s' % ( hash_name, vector_name, output_len * 8, binascii.hexlify(hash_func(vector_value.encode('UTF-16BE'), output_len)).decode() ) ) print('%s with %s UTF-16LE Input and %d bit Output: %s' % ( hash_name, vector_name, output_len * 8, binascii.hexlify(hash_func(vector_value.encode('UTF-16LE'), output_len)).decode() ) )
def assert_fingerprint(cert, fingerprint): """ Checks if given fingerprint matches the supplied certificate. :param cert: Certificate as bytes object. :param fingerprint: Fingerprint as string of hexdigits, can be interspersed by colons. """ # Maps the length of a digest to a possible hash function producing # this digest. hashfunc_map = { 16: md5, 20: sha1 } fingerprint = fingerprint.replace(':', '').lower() digest_length, odd = divmod(len(fingerprint), 2) if odd or digest_length not in hashfunc_map: raise SSLError('Fingerprint is of invalid length.') # We need encode() here for py32; works on py2 and p33. fingerprint_bytes = unhexlify(fingerprint.encode()) hashfunc = hashfunc_map[digest_length] cert_digest = hashfunc(cert).digest() if not cert_digest == fingerprint_bytes: raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' .format(hexlify(fingerprint_bytes), hexlify(cert_digest)))
def ngx_pretty_print(header,csv=False): if csv: print ','.join([str(header.version), str(datetime.datetime.fromtimestamp(header.valid_sec)), str(datetime.datetime.fromtimestamp(header.last_modified)), str(datetime.datetime.fromtimestamp(header.date)), str(header.crc32), str(header.header_start), str(header.body_start), str(header.etag), str(header.vary_len), str(header.vary), binascii.hexlify(header.variant), header.key]) else: print 'version : ' + str(header.version) print 'valid_sec : ' + str(datetime.datetime.fromtimestamp(header.valid_sec)) print 'last_modified: ' + str(datetime.datetime.fromtimestamp(header.last_modified)) print 'date : ' + str(datetime.datetime.fromtimestamp(header.date)) print 'crc32 : ' + str(header.crc32) print 'header_start : ' + str(header.header_start) print 'body_start : ' + str(header.body_start) print 'etag : ' + str(header.etag) print 'vary_len : ' + str(header.vary_len) print 'vary : ' + str(header.vary) print 'variant : ' + binascii.hexlify(header.variant) print 'key : ' + header.key print 'headers : \n' + header.headers
def test(): import binascii print binascii.hexlify(crypto_generichash('howdy')) state = crypto_generichash_init() state = crypto_generichash_update(state, 'howdy') print binascii.hexlify(crypto_generichash_final(state)) pk, sk = crypto_box_keypair() n = randombytes(crypto_box_NONCEBYTES) c = crypto_box("howdy", n, pk, sk) print crypto_box_open(c, n, pk, sk) k = randombytes(crypto_secretbox_KEYBYTES) n = randombytes(crypto_secretbox_NONCEBYTES) c = crypto_secretbox("howdy", n, k) print crypto_secretbox_open(c, n, k) s = crypto_scalarmult_curve25519_base(randombytes(crypto_scalarmult_BYTES)) r = crypto_scalarmult_curve25519_base(randombytes(crypto_scalarmult_BYTES)) print 'scalarmult' print repr(crypto_scalarmult_curve25519(s,r)) pk, sk = crypto_sign_keypair() signed = crypto_sign('howdy',sk) print crypto_sign_open(signed, pk)
def bin_file_header_reader(filename): filename = path + filename fd = None try: fd = open(filename, "rb") SR_unit = hexlify(fd.read(1)) if SR_unit == "00": print "SR_unit: Millisec" elif SR_unit == "01": print "SR_unit: Sec" elif SR_unit == "10": print "SR_unit: Hz" else: print "SR_unit: error" SR_value = int(hexlify(fd.read(2)), 16) print "SR_value: ", SR_value Data_Size = int(hexlify(fd.read(2)), 16) print "Data_Size: ", Data_Size return SR_unit, SR_value, Data_Size except KeyboardInterrupt: if fd != None: fd.close() except Exception, e: print e if fd != None: fd.close()
def _openssl_kdf(cls, algo, pwd, salt, key_size, iv_size): print('PWD=' + binascii.hexlify(pwd).decode('ascii').upper()) if algo == 'md5': temp = pbkdf1(pwd, salt, 1, 16, 'md5') else: temp = b'' print('FD=' + binascii.hexlify(temp).decode('ascii').upper()) fd = temp while len(fd) < key_size + iv_size: temp = cls._hasher(algo, temp + pwd + salt) fd += temp key = fd[0:key_size] iv = fd[key_size:key_size+iv_size] print('salt=' + binascii.hexlify(salt).decode('ascii').upper()) print('key=' + binascii.hexlify(key).decode('ascii').upper()) print('iv=' + binascii.hexlify(iv).decode('ascii').upper()) return key, iv
def getEncryption(self): puk = rsa.PublicKey(int( 'F20CE00BAE5361F8FA3AE9CEFA495362' 'FF7DA1BA628F64A347F0A8C012BF0B25' '4A30CD92ABFFE7A6EE0DC424CB6166F8' '819EFA5BCCB20EDFB4AD02E412CCF579' 'B1CA711D55B8B0B3AEB60153D5E0693A' '2A86F3167D7847A0CB8B00004716A909' '5D9BADC977CBB804DBDCBA6029A97108' '69A453F27DFDDF83C016D928B3CBF4C7', 16 ), 3) e = int(self.qq).to_bytes(8, 'big') o = hashlib.md5(self.pwd.encode()) r = bytes.fromhex(o.hexdigest()) p = hashlib.md5(r + e).hexdigest() a = binascii.b2a_hex(rsa.encrypt(r, puk)).decode() s = hex(len(a) // 2)[2:] l = binascii.hexlify(self.vcode.upper().encode()).decode() c = hex(len(l) // 2)[2:] c = '0' * (4 - len(c)) + c s = '0' * (4 - len(s)) + s salt = s + a + binascii.hexlify(e).decode() + c + l return base64.b64encode( tea.encrypt(bytes.fromhex(salt), bytes.fromhex(p)) ).decode().replace('/', '-').replace('+', '*').replace('=', '_')
def main(): # prime1 = int( # '102639592829741105772054196573991675900' + # '716567808038066803341933521790711307779') # prime2 = int( # '1066034883801684548209272203600128786792' + # '07958575989291522270608237193062808643') # modulus = prime1 * prime2 from example_params import generator, modulus, bitlen_modulus, \ prime1, prime2 assert prime1 * prime2 == modulus catfish = Catfish( gen=generator, mod=modulus, len_mod=1024, tcost=2, mcost=bitlen_modulus, verbose=True, prime1=prime1, prime2=prime2 ) # from Crypto.Random.random import getrandbits # salt = long2bytes(getrandbits(128), 128 / 8) hexsalt = '4c880aa553669c3869f62b389c2c3499' salt = unhexlify(hexsalt) password = '******' print 'hexsalt', hexlify(salt) print 'pass', password print 'hexpass', hexlify(password) hextag = catfish.hexdigest(salt, password) print 'hexhash', hextag
def sense_ttb(self, target): """Sense for a Type B Target is supported for 106, 212 and 424 kbps. However, there may not be any target that understands the activation command in other than 106 kbps. """ log.debug("polling for NFC-B technology") if target.brty not in ("106B", "212B", "424B"): message = "unsupported bitrate {0}".format(target.brty) raise nfc.clf.UnsupportedTargetError(message) self.chipset.in_set_rf(target.brty) self.chipset.in_set_protocol(self.chipset.in_set_protocol_defaults) self.chipset.in_set_protocol(initial_guard_time=20, add_sof=1, check_sof=1, add_eof=1, check_eof=1) sensb_req = (target.sensb_req if target.sensb_req else bytearray.fromhex("050010")) log.debug("send SENSB_REQ " + hexlify(sensb_req)) try: sensb_res = self.chipset.in_comm_rf(sensb_req, 30) except CommunicationError as error: if error != "RECEIVE_TIMEOUT_ERROR": log.debug(error) return None if len(sensb_res) >= 12 and sensb_res[0] == 0x50: log.debug("rcvd SENSB_RES " + hexlify(sensb_res)) return nfc.clf.RemoteTarget(target.brty, sensb_res=sensb_res)
def sense_ttf(self, target): """Sense for a Type F Target is supported for 212 and 424 kbps. """ log.debug("polling for NFC-F technology") if target.brty not in ("212F", "424F"): message = "unsupported bitrate {0}".format(target.brty) raise nfc.clf.UnsupportedTargetError(message) self.chipset.in_set_rf(target.brty) self.chipset.in_set_protocol(self.chipset.in_set_protocol_defaults) self.chipset.in_set_protocol(initial_guard_time=24) sensf_req = (target.sensf_req if target.sensf_req else bytearray.fromhex("00FFFF0100")) log.debug("send SENSF_REQ " + hexlify(sensf_req)) try: frame = chr(len(sensf_req)+1) + sensf_req frame = self.chipset.in_comm_rf(frame, 10) except CommunicationError as error: if error != "RECEIVE_TIMEOUT_ERROR": log.debug(error) return None if len(frame) >= 18 and frame[0] == len(frame) and frame[1] == 1: log.debug("rcvd SENSF_RES " + hexlify(frame[1:])) return nfc.clf.RemoteTarget(target.brty, sensf_res=frame[1:])
def generate_keyfiles(n, m, vf, sf): '''Generate a set of public and private keys for testing. n - the number of OR loops m - the number of keys per loop (note: constant in this crude version) vf - the file path to which to write the verification keys sf - the file path to which to write the signing (private) keys ''' signing_indices = [random.choice(range(m)) for _ in range(n)] priv=[] with open(sf,'wb') as f: for i in range(n): priv.append(os.urandom(32)) f.write(binascii.hexlify(priv[i])+'\n') with open(vf,'wb') as f: for i in range(n): pubkeys = [] for j in range(m): if j==signing_indices[i]: p = btc.privtopub(priv[i]) else: p = btc.privtopub(os.urandom(32)) p = btc.decode_pubkey(p) p = btc.encode_pubkey(p,'bin_compressed') pubkeys.append(binascii.hexlify(p)) f.write(','.join(pubkeys)+'\n')
def validate_read(expected, actual, msg, ignore_errors=False): if expected != actual: print 'Failed %s' % msg print ' Expected; %s' % binascii.hexlify(expected,) print ' Actual: %s' % binascii.hexlify(actual,) if not ignore_errors: raise Exception('failed validate: %s' % msg)
def main(): if len(sys.argv) == 2 and sys.argv[1] in ('-h', '--help'): print """Utility that encrypts passwords for virt-who. Enter password that should be encrypted. This encrypted password then can be supplied to virt-who configuration. This command must be executed as root! WARNING: root user can still decrypt encrypted passwords! """ sys.exit(0) if os.getuid() != 0: print >>sys.stderr, "Only root can encrypt passwords" sys.exit(1) try: pwd = getpass("Password: "******"Keyfile %s doesn't exist and can't be created, rerun as root" % Password.KEYFILE sys.exit(1) except InvalidKeyFile: print >>sys.stderr, "Can't access keyfile %s, rerun as root" % Password.KEYFILE sys.exit(1) print >>sys.stderr, "Use following as value for encrypted_password key in the configuration file:" print hexlify(enc)
def submit(self, header, worker_name): # Drop unused padding header = header[:160] # 1. Check if blockheader meets requested difficulty header_bin = binascii.unhexlify(header[:160]) rev = ''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]) hash_bin = utils.doublesha(rev) block_hash = ''.join([ hash_bin[i*4:i*4+4][::-1] for i in range(0, 8) ]) #log.info('!!! %s' % header[:160]) log.info("Submitting %s" % utils.format_hash(binascii.hexlify(block_hash))) if utils.uint256_from_str(hash_bin) > self.target: log.debug("Share is below expected target") return True # 2. Lookup for job and extranonce used for creating given block header try: (job, extranonce2) = self.get_job_from_header(header) except KeyError: log.info("Job not found") return False # 3. Format extranonce2 to hex string extranonce2_hex = binascii.hexlify(self.extranonce2_padding(extranonce2)) # 4. Parse ntime and nonce from header ntimepos = 17*8 # 17th integer in datastring noncepos = 19*8 # 19th integer in datastring ntime = header[ntimepos:ntimepos+8] nonce = header[noncepos:noncepos+8] # 5. Submit share to the pool return self.f.rpc('mining.submit', [worker_name, job.job_id, extranonce2_hex, ntime, nonce])
def generate(self): reghost = re.compile(regHosts+'$') regnumdir = re.compile('^\d{4}$') regdat = re.compile('^(\d+)\.dat(?:\.gz)?$') with self.lock: sqlite_conn = sqlite3.connect(DatDBFile) c = sqlite_conn.cursor() try: c.execute('DROP TABLE dattbl') except sqlite3.OperationalError: pass c.execute('CREATE TABLE dattbl(datpath, hash PRIMARY KEY, board, published)') for h in os.listdir(o2on_config.DatDir): if not reghost.match(h): continue for b in os.listdir(os.path.join(o2on_config.DatDir, h)): with self.lock: for d in os.listdir(os.path.join(o2on_config.DatDir, h, b)): if not regnumdir.match(d): continue for f in os.listdir(os.path.join(o2on_config.DatDir, h, b, d)): m = regdat.match(f) if not m: continue path = h+"/"+b+"/"+m.group(1) dat = Dat(path) try: c.execute('INSERT OR IGNORE INTO dattbl VALUES(?, ?, ?, ?)', (path, hexlify(dat.hash()), dat.fullboard(), 0)) except sqlite3.IntegrityError: raise Exception("dup hash %s %s" % (hexlify(dat.hash()), path)) self.glob.logger.log("DATDB", "added %s" % path) try: c.execute('COMMIT') except sqlite3.OperationalError: pass
def run_server_loop(portfilename, modelfile, meanfile, stdfile, lsqweightfile): ackvalue = -1 acklength=4 single_integer_packer = struct.Struct('i') single_float_packer = struct.Struct('f') ackpacket = single_integer_packer.pack(ackvalue) datadim = 66 timesteps = 62 nb_classes = 119 #batch_size=32 #nb_epoch=30 loadstartmoment= time.clock() with tf.Session() as sess: #sess.run(init) prediction = recurrent_neural_network(x, keep_prob) print("Init saver with meta graph from %s" % modelfile) restorer = tf.train.Saver(tf.global_variables()) restorer.restore(sess, modelfile) print("Done!") print ( "Load normalisation stats..." ) zmean = np.loadtxt( meanfile ) #traindata.mean zstd = np.loadtxt( stdfile )# traindata.std lsq_weights = np.loadtxt( lsqweightfile ).reshape(45, 120) loadtime = time.clock()-loadstartmoment print ("Loading took %0.1f seconds!"%loadtime) # Connection handling! # # Get data from socket and do it! s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind((HOST, 0)) s.listen(1) port = s.getsockname()[1] text_file = open(portfilename, "w") text_file.write(str(port)) text_file.close() single_integer_packer = struct.Struct('i') print ("Waiting for connection in port %i!" % port) while 1: connectionstartstartmoment = time.clock() conn, addr = s.accept() print ('Connected by', addr) print ('Length of word to be classified:') datalen = conn.recv(single_integer_packer.size) if not datalen: break print (datalen) unpacked_datalen = single_integer_packer.unpack(datalen) unpacked_datalen = int(unpacked_datalen[0]) print(unpacked_datalen) print ("Going to read unicode data of length %i" % unpacked_datalen) char_packer = struct.Struct( unpacked_datalen*'c') conn.send( ackpacket ) print ('Word to be classified:') worddata = conn.recv(char_packer.size) if not worddata: break print(worddata) try: word = worddata.decode("utf-8").replace('\x00', '') #char_packer.unpack( worddata ) #word = ''.join(word) print (word) #word = word.decode('utf-16') word_data_ok = True except: print ("Something went wrong, let's print stack trace:") traceback.print_exc() conn.close() word_data_ok = False if word_data_ok: print("Got word \"%s\""%word) print(words[word]) if word not in words: print ("Word not in dictionary: Closing connection") conn.close() word_data_ok = False continue phones = words[word] classes = [ class_def[phone]['class'] for phone in phones ] conn.send( ackpacket ) print ('Will now classify using models %s' % modelfile) datalen = conn.recv(single_integer_packer.size) if not datalen: break unpacked_datalen = single_integer_packer.unpack(datalen) print ("Going to read float data of length %i" % unpacked_datalen) float_packer = struct.Struct( unpacked_datalen[0]*'f') conn.send( ackpacket ) MSGLEN = unpacked_datalen[0] * 4 chunks = [] bytes_recd = 0 while bytes_recd < MSGLEN: chunk = conn.recv(min(MSGLEN - bytes_recd, 2048)) if chunk == b'': raise RuntimeError("socket connection broken") chunks.append(chunk) bytes_recd = bytes_recd + len(chunk) data = b''.join(chunks) #data = conn.recv(float_packer.size) if not data: break try: unpacked_data = float_packer.unpack( data ) unpacked_data_ok = True except: print ("Something went wrong, let's print stack trace:") print ("Size of data: " + str(len(data))) traceback.print_exc() conn.close() unpacked_data_ok = False continue if unpacked_data_ok: featdim=len(unpacked_data) test_x = np.array(unpacked_data).reshape(-1, timesteps, datadim) if debugging: for n in range(0,test_x.shape[0]): np.savetxt('/tmp/got_features%i' % n,test_x[n,:,1:]) np.savetxt("/tmp/feat_orig", test_x[0,:,:]) for n in range(test_x.shape[0]): non_empty_rows = np.where(np.abs(test_x[n,:,:]).sum(-1)>0)[0] print("Non-empty rows for sample %i:" %n) print(non_empty_rows) test_x[n,non_empty_rows,:] = np.log( test_x[n,non_empty_rows,:] + 0.001 ) #np.savetxt("/tmp/feat_log", test_x[0,:,:]) np.savetxt('/tmp/log_features%i' % n,test_x[n,:,1:]) test_x[n,non_empty_rows,:] = ( test_x[n,non_empty_rows,:] - zmean ) / zstd np.savetxt("/tmp/feat_normalised", test_x[0,:,:]) test_y = np.zeros(test_x.shape[0]); if debugging: for n in range(0,test_x.shape[0]): np.savetxt('/tmp/norm_features%i' % n,test_x[n,:,1:]) # Classification here! # teststartmoment= time.clock() #return_data = model.predict(test_x, batch_size=batch_size) [EMB] = sess.run([ prediction ], feed_dict={ x: test_x[:,:,1:], y: test_y, keep_prob: 1 }) #return_data = EMB # np.argmax(EMB,2).reshape([-1]) print ("EMB shape") print (EMB.shape) guesses = np.argmax(EMB,1) print ("guesses shape") print (guesses.shape) print ("Classified:") print (guesses) ranking_matrix = np.zeros([45,120]) for i in range(test_y.shape[0]): guess = guesses[i] wanted = classes[i] ranking_matrix[ wanted, guess ] += 1 if (ranking_matrix).sum(-1).sum(-1) > 0: ranking_matrix /= (ranking_matrix).sum(-1).sum(-1) score = (lsq_weights*ranking_matrix).sum(-1).sum(-1) if score < 1: rounded_score = -2 else: rounded_score = round(score) if rounded_score > 5: rounded_score = 5 print ("Score %0.2f -> %i" % (score, rounded_score)) testtime = time.clock()-teststartmoment # Classification done, let's go back # to handling data transfer: encoded_length=single_integer_packer.pack( len(guesses)+len(classes)+1 ) conn.send( encoded_length ) print ("Waiting for acc:") client_ack = conn.recv( acklength ) try: ack = (single_integer_packer.unpack(client_ack)) ack_ok = True except: ack_ok = False print ("Something went wrong, let's print stack trace:") traceback.print_exc() conn.close() if ack_ok: print ("client_ack: " + str(ack) ) if single_integer_packer.unpack(client_ack)[0] == ackvalue: packable = [rounded_score] for cl in classes: packable.append( cl ) for gu in guesses: packable.append( gu ) print(packable) result_packer = struct.Struct(len(packable) * 'f') encoded_score_data = result_packer.pack( *packable ) print ('Encoded data: "%s"' % binascii.hexlify(encoded_score_data)) conn.send(encoded_score_data) conn.close() connectiontime = time.clock() - connectionstartstartmoment print ("Processing took %f s " % (connectiontime)) sys.stdout.flush()
def _process_challenge(self, packet): authlog("processing challenge: %s", packet[1:]) def warn_server_and_exit(code, message, server_message="authentication failed"): authlog.error("Error: authentication failed:") authlog.error(" %s", message) self.disconnect_and_quit(code, server_message) if not self.has_password(): warn_server_and_exit( EXIT_PASSWORD_REQUIRED, "this server requires authentication, please provide a password", "no password available") return password = self.load_password() if not password: warn_server_and_exit( EXIT_PASSWORD_FILE_ERROR, "failed to load password from file %s" % self.password_file, "no password available") return server_salt = packet[1] if self.encryption: assert len( packet ) >= 3, "challenge does not contain encryption details to use for the response" server_cipher = typedict(packet[2]) key = self.get_encryption_key() if key is None: warn_server_and_exit(EXIT_ENCRYPTION, "the server does not use any encryption", "client requires encryption") return if not self.set_server_encryption(server_cipher, key): return #all server versions support a client salt, #they also tell us which digest to use: digest = bytestostr(packet[3]) l = len(server_salt) salt_digest = "xor" if len(packet) >= 5: salt_digest = bytestostr(packet[4]) if salt_digest == "xor": #with xor, we have to match the size assert l >= 16, "server salt is too short: only %i bytes, minimum is 16" % l assert l <= 256, "server salt is too long: %i bytes, maximum is 256" % l else: #other digest, 32 random bytes is enough: l = 32 client_salt = get_salt(l) if salt_digest in ("xor", "des"): if not LEGACY_SALT_DIGEST: warn_server_and_exit( EXIT_INCOMPATIBLE_VERSION, "server uses legacy salt digest '%s'" % salt_digest, "unsupported digest %s" % salt_digest) return log.warn( "Warning: server using legacy support for '%s' salt digest", salt_digest) salt = gendigest(salt_digest, client_salt, server_salt) authlog("combined %s salt(%s, %s)=%s", salt_digest, binascii.hexlify(server_salt), binascii.hexlify(client_salt), binascii.hexlify(salt)) if digest.startswith(b"hmac"): import hmac digestmod = get_digest_module(digest) if not digestmod: log("invalid digest module '%s': %s", digest) warn_server_and_exit( EXIT_UNSUPPORTED, "server requested digest '%s' but it is not supported" % digest, "invalid digest") return password = strtobytes(password) salt = memoryview_to_bytes(salt) challenge_response = hmac.HMAC(password, salt, digestmod=digestmod).hexdigest() authlog("hmac.HMAC(%s, %s)=%s", binascii.hexlify(password), binascii.hexlify(salt), challenge_response) elif digest == b"xor": #don't send XORed password unencrypted: encrypted = self._protocol.cipher_out or self._protocol.get_info( ).get("type") == "ssl" local = self.display_desc.get("local", False) authlog("xor challenge, encrypted=%s, local=%s", encrypted, local) if local and ALLOW_LOCALHOST_PASSWORDS: pass elif not encrypted and not ALLOW_UNENCRYPTED_PASSWORDS: warn_server_and_exit( EXIT_ENCRYPTION, "server requested digest %s, cowardly refusing to use it without encryption" % digest, "invalid digest") return salt = salt[:len(password)] challenge_response = memoryview_to_bytes(xor(password, salt)) else: warn_server_and_exit( EXIT_PASSWORD_REQUIRED, "server requested an unsupported digest: %s" % digest, "invalid digest") return if digest: authlog("%s(%s, %s)=%s", digest, binascii.hexlify(password), binascii.hexlify(salt), binascii.hexlify(challenge_response)) self.password_sent = True self.remove_packet_handlers("challenge") self.send_hello(challenge_response, client_salt)
def text_to_bits(text, encoding='utf-8'): bits = bin(int(binascii.hexlify(text.encode(encoding)), 16))[2:] return bits.zfill(8 * ((len(bits) + 7) // 8))
def hash_password(password): salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii') hashedPW = hashlib.pbkdf2_hmac('sha512', password.encode('utf-8'), salt, 100000) hashedPW = binascii.hexlify(hashedPW) return (salt + hashedPW).decode('ascii')
def cipher_sendoff(chan): # # attemp to create socket, bind & send # try: udp = socket(AF_INET, SOCK_DGRAM) sake = udp.getsockname() udp.bind(sake) udp.settimeout(timeout) auth = "" integ = "" conf = "" payload = "" all_data = "" data = "" n = 0 offset = 0x00 # # finally... actually send packets # # I should really just keep sending them until I get # an error or to the end... but I don't trust me and the target ;) # this should get pretty much all of the data # for i in range(1,16): n = n + 1 if verbose: print('building and sending packet %d on channel %d' % (i, ord(chan))) # # set off that rocket # payload = build_me_a_rocket(offset, chan) # if udp.sendto(" ", (target, PORT)) <= 0: if udp.sendto(payload, (target, PORT)) <= 0: print("couldn't send packet to %s" % target) # catch response data,addr = udp.recvfrom(512) # skip the header data = binascii.hexlify(data[20:]) # print(data) # chop the crc data = data[:-2] completion_code = data[0:2] channel = data[2:4] if completion_code == "c1": print("The remote system doesn't appear to support the Get Channel Cipher Suites command") # print("The remote system doesn't appear to support the Get Channel Cipher Suites command") # sys.exit(2) return(2) # minus completion code and channel # all_data = all_data + data[4:] if verbose: print('chunk [%d]: %s' % (n, data[4:])) if len(data) != 36: # print('remote out of data %d' % len(data)) if verbose: print('all data received (%d bytes): %s' % (len(all_data), all_data)) break # print data + ' <---' # print "CC: %s" % completion_code # print "Chan: %s" % channel offset = offset + 0x01 udp.close() except Exception, e: sys.stderr.write("hmmm.... problems in cipher_sendoff, tonto: %s, bailin'" % e) # sys.exit(3) return(3)
def sweep(repo, live_objects, existing_count, cat_pipe, threshold, compression, verbosity): # Traverse all the packs, saving the (probably) live data. ns = Nonlocal() ns.stale_files = [] def remove_stale_files(new_pack_prefix): if verbosity and new_pack_prefix: log('created ' + path_msg(basename(new_pack_prefix)) + '\n') for p in ns.stale_files: if new_pack_prefix and p.startswith(new_pack_prefix): continue # Don't remove the new pack file if verbosity: log('removing ' + path_msg(basename(p)) + '\n') os.unlink(p) if ns.stale_files: # So git cat-pipe will close them cat_pipe.restart() ns.stale_files = [] writer = git.PackWriter(objcache_maker=lambda : None, compression_level=compression, run_midx=False, on_pack_finish=remove_stale_files) try: # FIXME: sanity check .idx names vs .pack names? collect_count = 0 for idx_name in glob.glob(os.path.join(repo.packdir(), b'*.idx')): if verbosity: qprogress('preserving live data (%d%% complete)\r' % ((float(collect_count) / existing_count) * 100)) with git.open_idx(idx_name) as idx: idx_live_count = 0 for sha in idx: if live_objects.exists(sha): idx_live_count += 1 collect_count += idx_live_count if idx_live_count == 0: if verbosity: log('deleting %s\n' % path_msg(git.repo_rel(basename(idx_name)))) ns.stale_files.append(idx_name) ns.stale_files.append(idx_name[:-3] + b'pack') continue live_frac = idx_live_count / float(len(idx)) if live_frac > ((100 - threshold) / 100.0): if verbosity: log('keeping %s (%d%% live)\n' % (git.repo_rel(basename(idx_name)), live_frac * 100)) continue if verbosity: log('rewriting %s (%.2f%% live)\n' % (basename(idx_name), live_frac * 100)) for sha in idx: if live_objects.exists(sha): item_it = cat_pipe.get(hexlify(sha)) _, typ, _ = next(item_it) writer.just_write(sha, typ, b''.join(item_it)) ns.stale_files.append(idx_name) ns.stale_files.append(idx_name[:-3] + b'pack') if verbosity: progress('preserving live data (%d%% complete)\n' % ((float(collect_count) / existing_count) * 100)) # Nothing should have recreated midx/bloom yet. pack_dir = repo.packdir() assert(not os.path.exists(os.path.join(pack_dir, b'bup.bloom'))) assert(not glob.glob(os.path.join(pack_dir, b'*.midx'))) except BaseException as ex: with pending_raise(ex): writer.abort() # This will finally run midx. # Can only change refs (if needed) after this. writer.close() remove_stale_files(None) # In case we didn't write to the writer. if verbosity: log('discarded %d%% of objects\n' % ((existing_count - count_objects(pack_dir, verbosity)) / float(existing_count) * 100))
def generate_api_key(cls): return hexlify(os.urandom(40)).decode('utf-8')
f = open("keys.csv", "r") data = f.readlines() f.close() keymatrix = {} c = 0 for l_ in data: (key, code) = l_.rstrip().split(":") keymatrix[code] = key c += 1 print "Key matrix loaded, %d keys" % c out = "" packets = rdpcap(sys.argv[1]) for pkt in packets: if pkt.haslayer(Dot11): if pkt.addr2 == "22:44:66:88:ff:00": t = binascii.hexlify(pkt.info) if t not in keymatrix.keys(): print "** UNKNOWN : %s **" % t if len(keymatrix[t]) == 1: out += keymatrix[t] elif keymatrix[t] == "SPACE": out += " " else: out += "[" + keymatrix[t] + "]" print out
def bytes_to_hex_str(byte_str): return hexlify(byte_str).decode('ascii')
print("NUM_SAMPLES = %d" % NUM_SAMPLES) print("NUM_CAPTURES = %d" % NUM_CAPTURES) print("ANALOG_OFFSET = %f" % ANALOG_OFFSET) ps = ps2000a.PS2000a() # use the finest resolution v-offset you cna. ps.setChannel('A','DC',VRange=VRANGE_PRIMARY,VOffset=ANALOG_OFFSET,enabled=True,BWLimited=False) ps.setChannel('B','DC',VRange=7.0,VOffset=0.0,enabled=True,BWLimited=False) nSamples = NUM_SAMPLES (freq,maxSamples) = ps.setSamplingFrequency(SAMPLE_RATE,nSamples) print("Actual frequency %d Hz" % freq) ser = serial.Serial('/dev/ttyUSB0',9600) if NUM_CAPTURES == 1: traces = np.zeros((1,NUM_SAMPLES),np.float32) data = np.zeros((1,RAND_LEN),np.uint8) data_out = np.zeros((1,RAND_LEN),np.uint8) output_string = RAND_KEY + binascii.hexlify(os.urandom(RAND_LEN)) + "\n" traces[0,:],data_out[0,:] = encryptAndTrace_2CH(ps,output_string,0) data[0,:] = [0x11,0x22,0x33,0x44,0x55,0x66,0x77,0x88,0x99,0xaa,0xbb,0xcc,0xdd,0xee,0xff,0x00][0:RAND_LEN] # np.savez(sys.argv[2],traces=traces,data=data,data_out=data_out) else: traces = np.zeros((NUM_CAPTURES,NUM_SAMPLES),np.float32) data = np.zeros((NUM_CAPTURES,RAND_LEN),np.uint8) data_out = np.zeros((NUM_CAPTURES,RAND_LEN),np.uint8) for i in range(0,NUM_CAPTURES): if FIXED_PT: rand_input = "\xF0\xF0\xF0\xF0\x0F\x0F\x0F\x0F" # [0xF0,0xF0,0xF0,0xF0,0xF0,0xF0,0xF0,0xF0] else: rand_input = os.urandom(RAND_LEN) output_string = RAND_KEY + binascii.hexlify(rand_input) + "\n" time.sleep(0.1) # encryptAndTrace(ps,output_string)
def getAboutText(): AboutText = "" AboutText += _("Model:\t\t%s %s\n") % (getMachineBrand(), getMachineName()) AboutText += _("OEM Model:\t\t%s\n") % getMachineBuild() bootloader = "" if path.exists('/sys/firmware/devicetree/base/bolt/tag'): f = open('/sys/firmware/devicetree/base/bolt/tag', 'r') bootloader = f.readline().replace('\x00', '').replace('\n', '') f.close() AboutText += _("Bootloader:\t\t%s\n") % (bootloader) if path.exists('/proc/stb/info/chipset'): AboutText += _("Chipset:\t\t%s") % about.getChipSetString() + "\n" cpuMHz = "" if getMachineBuild() in ('vusolo4k', 'vuultimo4k', 'vuzero4k'): cpuMHz = " (1,5 GHz)" elif getMachineBuild() in ('formuler1tc', 'formuler1', 'triplex', 'tiviaraplus'): cpuMHz = " (1,3 GHz)" elif getMachineBuild() in ('u51', 'u5', 'u53', 'u52', 'u54', 'u5pvr', 'h9', 'h9combo', 'cc1', 'sf8008', 'sf8008s', 'hd60', 'i55plus', 'ustym4kpro', 'v8plus'): cpuMHz = " (1,6 GHz)" elif getMachineBuild() in ('vuuno4kse', 'vuuno4k', 'dm900', 'dm920', 'gb7252', 'dags7252', 'xc7439', '8100s'): cpuMHz = " (1,7 GHz)" elif getMachineBuild() in ('alien5'): cpuMHz = " (2,0 GHz)" elif getMachineBuild() in ('vuduo4k'): cpuMHz = " (2,1 GHz)" elif getMachineBuild() in ('sf5008', 'et13000', 'et1x000', 'hd52', 'hd51', 'sf4008', 'vs1500', 'h7', 'osmio4k'): try: import binascii f = open( '/sys/firmware/devicetree/base/cpus/cpu@0/clock-frequency', 'rb') clockfrequency = f.read() f.close() cpuMHz = " (%s MHz)" % str( round(int(binascii.hexlify(clockfrequency), 16) / 1000000, 1)) except: cpuMHz = " (1,7 GHz)" else: if path.exists('/proc/cpuinfo'): f = open('/proc/cpuinfo', 'r') temp = f.readlines() f.close() try: for lines in temp: lisp = lines.split(': ') if lisp[0].startswith('cpu MHz'): #cpuMHz = " (" + lisp[1].replace('\n', '') + " MHz)" cpuMHz = " (" + str( int(float(lisp[1].replace('\n', '')))) + " MHz)" break except: pass AboutText += _("CPU:\t\t%s") % about.getCPUString() + cpuMHz + "\n" AboutText += _("Cores:\t\t%s") % about.getCpuCoresString() + "\n" imagestarted = "" bootname = '' if path.exists('/boot/bootname'): f = open('/boot/bootname', 'r') bootname = f.readline().split('=')[1] f.close() if getMachineBuild() in ('cc1', 'sf8008', 'sf8008s', 'ustym4kpro'): if path.exists('/boot/STARTUP'): f = open('/boot/STARTUP', 'r') f.seek(5) image = f.read(4) if image == "emmc": image = "1" elif image == "usb0": f.seek(13) image = f.read(1) if image == "1": image = "2" elif image == "3": image = "3" elif image == "5": image = "4" elif image == "7": image = "5" f.close() if bootname: bootname = " (%s)" % bootname AboutText += _( "Selected Image:\t\t%s") % "STARTUP_" + image + bootname + "\n" elif getMachineBuild() in ('osmio4k'): if path.exists('/boot/STARTUP'): f = open('/boot/STARTUP', 'r') f.seek(38) image = f.read(1) f.close() if bootname: bootname = " (%s)" % bootname AboutText += _( "Selected Image:\t\t%s") % "STARTUP_" + image + bootname + "\n" elif path.exists('/boot/STARTUP'): f = open('/boot/STARTUP', 'r') f.seek(22) image = f.read(1) f.close() if bootname: bootname = " (%s)" % bootname AboutText += _( "Selected Image:\t\t%s") % "STARTUP_" + image + bootname + "\n" elif path.exists('/boot/cmdline.txt'): f = open('/boot/cmdline.txt', 'r') f.seek(38) image = f.read(1) f.close() if bootname: bootname = " (%s)" % bootname AboutText += _( "Selected Image:\t\t%s") % "STARTUP_" + image + bootname + "\n" AboutText += _("Version:\t\t%s") % getImageVersion() + "\n" AboutText += _("Build:\t\t%s") % getImageBuild() + "\n" AboutText += _("Kernel:\t\t%s") % about.getKernelVersionString() + "\n" string = getDriverDate() year = string[0:4] month = string[4:6] day = string[6:8] driversdate = '-'.join((year, month, day)) AboutText += _("Drivers:\t\t%s") % MyDateConverter(driversdate) + "\n" AboutText += _( "GStreamer:\t\t%s") % about.getGStreamerVersionString() + "\n" AboutText += _("Python:\t\t%s") % about.getPythonVersionString() + "\n" if getMachineBuild() not in ('vuduo4k', 'v8plus', 'ustym4kpro', 'hd60', 'i55plus', 'osmio4k', 'h9', 'h9combo', 'vuzero4k', 'sf5008', 'et13000', 'et1x000', 'hd51', 'hd52', 'vusolo4k', 'vuuno4k', 'vuuno4kse', 'vuultimo4k', 'sf4008', 'dm820', 'dm7080', 'dm900', 'dm920', 'gb7252', 'dags7252', 'vs1500', 'h7', 'xc7439', '8100s', 'u5', 'u5pvr', 'u52', 'u53', 'u54', 'u51', 'cc1', 'sf8008', 'sf8008s'): AboutText += _("Installed:\t\t%s") % about.getFlashDateString() + "\n" AboutText += _("Last update:\t\t%s") % MyDateConverter( getEnigmaVersionString()) + "\n" fp_version = getFPVersion() if fp_version is None: fp_version = "" elif fp_version != 0: fp_version = _("Frontprocessor version: %s") % fp_version AboutText += fp_version + "\n" tempinfo = "" if path.exists('/proc/stb/sensors/temp0/value'): f = open('/proc/stb/sensors/temp0/value', 'r') tempinfo = f.read() f.close() elif path.exists('/proc/stb/fp/temp_sensor'): f = open('/proc/stb/fp/temp_sensor', 'r') tempinfo = f.read() f.close() elif path.exists('/proc/stb/sensors/temp/value'): f = open('/proc/stb/sensors/temp/value', 'r') tempinfo = f.read() f.close() if tempinfo and int(tempinfo.replace('\n', '')) > 0: mark = str('\xc2\xb0') AboutText += _("System temperature:\t%s") % tempinfo.replace( '\n', '').replace(' ', '') + mark + "C\n" tempinfo = "" if path.exists('/proc/stb/fp/temp_sensor_avs'): f = open('/proc/stb/fp/temp_sensor_avs', 'r') tempinfo = f.read() f.close() elif path.exists('/proc/stb/power/avs'): f = open('/proc/stb/power/avs', 'r') tempinfo = f.read() f.close() elif path.exists('/sys/devices/virtual/thermal/thermal_zone0/temp'): try: f = open('/sys/devices/virtual/thermal/thermal_zone0/temp', 'r') tempinfo = f.read() tempinfo = tempinfo[:-4] f.close() except: tempinfo = "" elif path.exists('/proc/hisi/msp/pm_cpu'): try: for line in open('/proc/hisi/msp/pm_cpu').readlines(): line = [x.strip() for x in line.strip().split(":")] if line[0] in ("Tsensor"): temp = line[1].split("=") temp = line[1].split(" ") tempinfo = temp[2] except: tempinfo = "" if tempinfo and int(tempinfo.replace('\n', '')) > 0: mark = str('\xc2\xb0') AboutText += _("Processor temperature:\t%s") % tempinfo.replace( '\n', '').replace(' ', '') + mark + "C\n" AboutLcdText = AboutText.replace('\t', ' ') return AboutText, AboutLcdText
def b64_to_hex(data: str) -> str: return binascii.hexlify(base64.b64decode(data)).decode('utf-8')
def body_iq_test_date_change_during_activity(device): print("------------------------------------------") print("- DATE CHANGE DURING ACTIVITY TEST -") print("------------------------------------------") print("") print("Start an activity") print("") pub = device.itm_poll(expect="publish", timeout=30) assert pub != None DecodedActivity = Activity() DecodedActivity.ParseFromString(pub['data']) step_count = 0 if DecodedActivity.HasField('walking_activity'): step_count = DecodedActivity.walking_activity.step_count elif DecodedActivity.HasField('running_activity'): step_count = DecodedActivity.running_activity.step_count print("Activity " + activity[DecodedActivity.type] + "\n" + " begin " + str(DecodedActivity.timestamp_begin) + " end " + str(DecodedActivity.timestamp_end) + " step count " + str(step_count) + "\n" + " status " + status[DecodedActivity.activity_status]) # Verify that the activity started report has been received assert status[DecodedActivity.activity_status] == 'STARTED' # Instantiate a datetime publish date = DateTime() date.datetime = 1000 # Publish to intel/core/ # Publish Date print("Set new date: 1000") device.itm_publish("\x49\x43\x44\x44\x00", date.SerializeToString()) time.sleep(10) # Send request (NULL data) pub = device.itm_topic_req("\x49\x42\x41\x00", "\x00") assert pub != None print("Report request sended") print("topic_rsp: req_id %d status %d data %s" %(pub['req_id'], pub['status'], binascii.hexlify(pub['data']))) DecodedActivity = Activity() DecodedActivity.ParseFromString(pub['data']) step_count = 0 if DecodedActivity.HasField('walking_activity'): step_count = DecodedActivity.walking_activity.step_count elif DecodedActivity.HasField('running_activity'): step_count = DecodedActivity.running_activity.step_count print("Activity " + activity[DecodedActivity.type] + "\n" + " begin " + str(DecodedActivity.timestamp_begin) + " end " + str(DecodedActivity.timestamp_end) + " step count " + str(step_count) + "\n" + " status " + status[DecodedActivity.activity_status]) # Verify that the activity is an ongoing one assert status[DecodedActivity.activity_status] == 'ONGOING' # Verify that the dates are consistent assert str(DecodedActivity.timestamp_end) > str(DecodedActivity.timestamp_begin) print("") print("Stop your activity") print("") time.sleep(15) pub = device.itm_poll(expect="publish", timeout=30) assert pub != None DecodedActivity = Activity() DecodedActivity.ParseFromString(pub['data']) step_count = 0 if DecodedActivity.HasField('walking_activity'): step_count = DecodedActivity.walking_activity.step_count elif DecodedActivity.HasField('running_activity'): step_count = DecodedActivity.running_activity.step_count print("Activity " + activity[DecodedActivity.type] + "\n" + " begin " + str(DecodedActivity.timestamp_begin) + " end " + str(DecodedActivity.timestamp_end) + " step count " + str(step_count) + "\n" + " status " + status[DecodedActivity.activity_status]) # Verify that the activity started report has been received assert status[DecodedActivity.activity_status] == 'FINISHED' # Verify that the dates are consistent assert str(DecodedActivity.timestamp_end) > str(DecodedActivity.timestamp_begin) device.disconnect() print("") print("Start an activity") print("") time.sleep(10) device = util.connection(["\x49\x42\x41\x00"]) # Instantiate a datetime publish date2 = DateTime() date2.datetime = 500 # Publish to intel/core/ # Publish Date print("Set new date: 500") device.itm_publish("\x49\x43\x44\x44\x00", date2.SerializeToString()) time.sleep(10) print("Stop your activity") print("") time.sleep(25) rl = device.itm_subscribe_req("\x49\x42\x41\x00\x00") pub = device.itm_poll(expect="publish", timeout=30) assert pub != None DecodedActivity = Activity() DecodedActivity.ParseFromString(pub['data']) step_count = 0 if DecodedActivity.HasField('walking_activity'): step_count = DecodedActivity.walking_activity.step_count elif DecodedActivity.HasField('running_activity'): step_count = DecodedActivity.running_activity.step_count print("Activity " + activity[DecodedActivity.type] + "\n" + " begin " + str(DecodedActivity.timestamp_begin) + " end " + str(DecodedActivity.timestamp_end) + " step count " + str(step_count) + "\n" + " status " + status[DecodedActivity.activity_status]) # Verify that an activity finished report has been received assert status[DecodedActivity.activity_status] == 'FINISHED' # Verify that the dates are consistent assert str(DecodedActivity.timestamp_end) > str(DecodedActivity.timestamp_begin) print("") print("TESTS PASSED") return device
def _processSettingForWrite(value): if isinstance(value, list): value = binascii.hexlify('\0'.join(value)) elif isinstance(value, bool): value = value and 'true' or 'false' return str(value)
def on_verified_privmsg(self, nick, message): #Marks the nick as active on this channel; note *only* if verified. #Otherwise squatter/attacker can persuade us to send privmsgs to him. if self.on_privmsg_trigger: self.on_privmsg_trigger(nick, self) #strip sig from message for processing, having verified message = " ".join(message[1:].split(" ")[:-2]) for command in message.split(COMMAND_PREFIX): _chunks = command.split(" ") #Decrypt if necessary if _chunks[0] in encrypted_commands: box, encrypt = self.daemon.mcc.get_encryption_box( _chunks[0], nick) if encrypt: if not box: log.debug( 'error, dont have encryption box object for ' + nick + ', dropping message') return # need to decrypt everything after the command string to_decrypt = ''.join(_chunks[1:]) try: decrypted = decode_decrypt(to_decrypt, box).decode('ascii') except Exception as e: log.debug('Error when decrypting, skipping: ' + repr(e)) return #rebuild the chunks array as if it had been plaintext _chunks = [_chunks[0]] + decrypted.split(" ") # looks like a very similar pattern for all of these # check for a command name, parse arguments, call a function # maybe we need some eval() trickery to do it better try: # orderbook watch commands if self.check_for_orders(nick, _chunks): pass # taker commands elif _chunks[0] == 'error': error = " ".join(_chunks[1:]) if self.on_error: self.on_error(error) elif _chunks[0] == 'pubkey': maker_pk = _chunks[1] if self.on_pubkey: self.on_pubkey(nick, maker_pk) elif _chunks[0] == 'ioauth': utxo_list = _chunks[1].split(',') auth_pub = _chunks[2] cj_addr = _chunks[3] change_addr = _chunks[4] btc_sig = _chunks[5] if self.on_ioauth: self.on_ioauth(nick, utxo_list, auth_pub, cj_addr, change_addr, btc_sig) elif _chunks[0] == 'sig': sig = _chunks[1] if self.on_sig: self.on_sig(nick, sig) # maker commands if self.check_for_commitments(nick, _chunks, private=True): pass if _chunks[0] == 'fill': try: oid = int(_chunks[1]) amount = int(_chunks[2]) taker_pk = _chunks[3] if len(_chunks) > 4: commit = _chunks[4] else: commit = None except (ValueError, IndexError) as e: self.send_error(nick, str(e)) return if self.on_order_fill: self.on_order_fill(nick, oid, amount, taker_pk, commit) elif _chunks[0] == 'auth': #Note index error logically impossible, would have thrown #in sig check (zero message after cmd not allowed) cr = _chunks[1] if self.on_seen_auth: self.on_seen_auth(nick, cr) elif _chunks[0] == 'tx': b64tx = _chunks[1] try: txhex = binascii.hexlify( base64.b64decode(b64tx)).decode('ascii') except TypeError as e: self.send_error(nick, 'bad base64 tx. ' + repr(e)) return if self.on_seen_tx: self.on_seen_tx(nick, txhex) elif _chunks[0] == 'push': b64tx = _chunks[1] try: txhex = binascii.hexlify( base64.b64decode(b64tx)).decode('ascii') except TypeError as e: self.send_error(nick, 'bad base64 tx. ' + repr(e)) return if self.on_push_tx: self.on_push_tx(nick, txhex) except (IndexError, ValueError): # TODO proper error handling log.debug('cj peer error TODO handle') continue
from ledgerblue.comm import getDongle from struct import pack import binascii print("Open Device") dongle = getDongle(False) print("Getting pub keys...") response = dongle.exchange(pack('>6B', 0x00, 0x20, 0x01, 0x00, 0x01, 0x00)) A = response[0:32] B = response[32:64] pub = response[64:] print("Public ViewKey: %s"%binascii.hexlify(A)) print("Public SpendKey: %s"%binascii.hexlify(B)) print("Public Address: %s"%pub)
def run_test(self): print("Mining blocks...") self.nodes[0].generate(105) self.sync_all() chain_height = self.nodes[1].getblockcount() assert_equal(chain_height, 105) assert_equal(self.nodes[1].getbalance(), 0) assert_equal(self.nodes[2].getbalance(), 0) # Check that balances are correct balance0 = self.nodes[1].getaddressbalance("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB") assert_equal(balance0["balance"], 0) # Check p2pkh and p2sh address indexes print("Testing p2pkh and p2sh address index...") txid0 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 10) self.nodes[0].generate(1) txidb0 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 10) self.nodes[0].generate(1) txid1 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 15) self.nodes[0].generate(1) txidb1 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 15) self.nodes[0].generate(1) txid2 = self.nodes[0].sendtoaddress("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4", 20) self.nodes[0].generate(1) txidb2 = self.nodes[0].sendtoaddress("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", 20) self.nodes[0].generate(1) self.sync_all() txids = self.nodes[1].getaddresstxids("yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4") assert_equal(len(txids), 3) assert_equal(txids[0], txid0) assert_equal(txids[1], txid1) assert_equal(txids[2], txid2) txidsb = self.nodes[1].getaddresstxids("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB") assert_equal(len(txidsb), 3) assert_equal(txidsb[0], txidb0) assert_equal(txidsb[1], txidb1) assert_equal(txidsb[2], txidb2) # Check that limiting by height works print("Testing querying txids by range of block heights..") height_txids = self.nodes[1].getaddresstxids({ "addresses": ["93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB"], "start": 105, "end": 110 }) assert_equal(len(height_txids), 2) assert_equal(height_txids[0], txidb0) assert_equal(height_txids[1], txidb1) # Check that multiple addresses works multitxids = self.nodes[1].getaddresstxids({"addresses": ["93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB", "yMNJePdcKvXtWWQnFYHNeJ5u8TF2v1dfK4"]}) assert_equal(len(multitxids), 6) assert_equal(multitxids[0], txid0) assert_equal(multitxids[1], txidb0) assert_equal(multitxids[2], txid1) assert_equal(multitxids[3], txidb1) assert_equal(multitxids[4], txid2) assert_equal(multitxids[5], txidb2) # Check that balances are correct balance0 = self.nodes[1].getaddressbalance("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB") assert_equal(balance0["balance"], 45 * 100000000) # Check that outputs with the same address will only return one txid print("Testing for txid uniqueness...") addressHash = binascii.unhexlify("FE30B718DCF0BF8A2A686BF1820C073F8B2C3B37") scriptPubKey = CScript([OP_HASH160, addressHash, OP_EQUAL]) unspent = self.nodes[0].listunspent() tx = CTransaction() tx.vin = [CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))] tx.vout = [CTxOut(10, scriptPubKey), CTxOut(11, scriptPubKey)] tx.rehash() signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8")) sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True, False, True) self.nodes[0].generate(1) self.sync_all() txidsmany = self.nodes[1].getaddresstxids("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB") assert_equal(len(txidsmany), 4) assert_equal(txidsmany[3], sent_txid) # Check that balances are correct print("Testing balances...") balance0 = self.nodes[1].getaddressbalance("93bVhahvUKmQu8gu9g3QnPPa2cxFK98pMB") assert_equal(balance0["balance"], 45 * 100000000 + 21) # Check that balances are correct after spending print("Testing balances after spending...") privkey2 = "cU4zhap7nPJAWeMFu4j6jLrfPmqakDAzy8zn8Fhb3oEevdm4e5Lc" address2 = "yeMpGzMj3rhtnz48XsfpB8itPHhHtgxLc3" addressHash2 = binascii.unhexlify("C5E4FB9171C22409809A3E8047A29C83886E325D") scriptPubKey2 = CScript([OP_DUP, OP_HASH160, addressHash2, OP_EQUALVERIFY, OP_CHECKSIG]) self.nodes[0].importprivkey(privkey2) unspent = self.nodes[0].listunspent() tx = CTransaction() tx.vin = [CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))] amount = int(unspent[0]["amount"] * 100000000) tx.vout = [CTxOut(amount, scriptPubKey2)] tx.rehash() signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8")) spending_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True, False, True) self.nodes[0].generate(1) self.sync_all() balance1 = self.nodes[1].getaddressbalance(address2) assert_equal(balance1["balance"], amount) tx = CTransaction() tx.vin = [CTxIn(COutPoint(int(spending_txid, 16), 0))] send_amount = 1 * 100000000 + 12840 change_amount = amount - send_amount - 10000 tx.vout = [CTxOut(change_amount, scriptPubKey2), CTxOut(send_amount, scriptPubKey)] tx.rehash() signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8")) sent_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True, False, True) self.nodes[0].generate(1) self.sync_all() balance2 = self.nodes[1].getaddressbalance(address2) assert_equal(balance2["balance"], change_amount) # Check that deltas are returned correctly deltas = self.nodes[1].getaddressdeltas({"addresses": [address2], "start": 0, "end": 200}) balance3 = 0 for delta in deltas: balance3 += delta["satoshis"] assert_equal(balance3, change_amount) assert_equal(deltas[0]["address"], address2) assert_equal(deltas[0]["blockindex"], 1) # Check that entire range will be queried deltasAll = self.nodes[1].getaddressdeltas({"addresses": [address2]}) assert_equal(len(deltasAll), len(deltas)) # Check that deltas can be returned from range of block heights deltas = self.nodes[1].getaddressdeltas({"addresses": [address2], "start": 113, "end": 113}) assert_equal(len(deltas), 1) # Check that unspent outputs can be queried print("Testing utxos...") utxos = self.nodes[1].getaddressutxos({"addresses": [address2]}) assert_equal(len(utxos), 1) assert_equal(utxos[0]["satoshis"], change_amount) # Check that indexes will be updated with a reorg print("Testing reorg...") best_hash = self.nodes[0].getbestblockhash() self.nodes[0].invalidateblock(best_hash) self.nodes[1].invalidateblock(best_hash) self.nodes[2].invalidateblock(best_hash) self.nodes[3].invalidateblock(best_hash) # Allow some time for the reorg to start set_mocktime(get_mocktime() + 2) set_node_times(self.nodes, get_mocktime()) self.sync_all() balance4 = self.nodes[1].getaddressbalance(address2) assert_equal(balance4, balance1) utxos2 = self.nodes[1].getaddressutxos({"addresses": [address2]}) assert_equal(len(utxos2), 1) assert_equal(utxos2[0]["satoshis"], amount) # Check sorting of utxos self.nodes[2].generate(150) txidsort1 = self.nodes[2].sendtoaddress(address2, 50) self.nodes[2].generate(1) txidsort2 = self.nodes[2].sendtoaddress(address2, 50) self.nodes[2].generate(1) self.sync_all() utxos3 = self.nodes[1].getaddressutxos({"addresses": [address2]}) assert_equal(len(utxos3), 3) assert_equal(utxos3[0]["height"], 114) assert_equal(utxos3[1]["height"], 264) assert_equal(utxos3[2]["height"], 265) # Check mempool indexing print("Testing mempool indexing...") privKey3 = "cRyrMvvqi1dmpiCmjmmATqjAwo6Wu7QTjKu1ABMYW5aFG4VXW99K" address3 = "yWB15aAdpeKuSaQHFVJpBDPbNSLZJSnDLA" addressHash3 = binascii.unhexlify("6C186B3A308A77C779A9BB71C3B5A7EC28232A13") scriptPubKey3 = CScript([OP_DUP, OP_HASH160, addressHash3, OP_EQUALVERIFY, OP_CHECKSIG]) # address4 = "2N8oFVB2vThAKury4vnLquW2zVjsYjjAkYQ" scriptPubKey4 = CScript([OP_HASH160, addressHash3, OP_EQUAL]) unspent = self.nodes[2].listunspent() tx = CTransaction() tx.vin = [CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))] amount = int(unspent[0]["amount"] * 100000000) tx.vout = [CTxOut(amount, scriptPubKey3)] tx.rehash() signed_tx = self.nodes[2].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8")) memtxid1 = self.nodes[2].sendrawtransaction(signed_tx["hex"], True, False, True) set_mocktime(get_mocktime() + 2) set_node_times(self.nodes, get_mocktime()) tx2 = CTransaction() tx2.vin = [CTxIn(COutPoint(int(unspent[1]["txid"], 16), unspent[1]["vout"]))] amount = int(unspent[1]["amount"] * 100000000) tx2.vout = [ CTxOut(int(amount / 4), scriptPubKey3), CTxOut(int(amount / 4), scriptPubKey3), CTxOut(int(amount / 4), scriptPubKey4), CTxOut(int(amount / 4), scriptPubKey4) ] tx2.rehash() signed_tx2 = self.nodes[2].signrawtransaction(binascii.hexlify(tx2.serialize()).decode("utf-8")) memtxid2 = self.nodes[2].sendrawtransaction(signed_tx2["hex"], True, False, True) set_mocktime(get_mocktime() + 2) set_node_times(self.nodes, get_mocktime()) mempool = self.nodes[2].getaddressmempool({"addresses": [address3]}) assert_equal(len(mempool), 3) assert_equal(mempool[0]["txid"], memtxid1) assert_equal(mempool[0]["address"], address3) assert_equal(mempool[0]["index"], 0) assert_equal(mempool[1]["txid"], memtxid2) assert_equal(mempool[1]["index"], 0) assert_equal(mempool[2]["txid"], memtxid2) assert_equal(mempool[2]["index"], 1) self.nodes[2].generate(1); self.sync_all(); mempool2 = self.nodes[2].getaddressmempool({"addresses": [address3]}) assert_equal(len(mempool2), 0) tx = CTransaction() tx.vin = [ CTxIn(COutPoint(int(memtxid2, 16), 0)), CTxIn(COutPoint(int(memtxid2, 16), 1)) ] tx.vout = [CTxOut(int(amount / 2 - 10000), scriptPubKey2)] tx.rehash() self.nodes[2].importprivkey(privKey3) signed_tx3 = self.nodes[2].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8")) memtxid3 = self.nodes[2].sendrawtransaction(signed_tx3["hex"], True, False, True) set_mocktime(get_mocktime() + 2) set_node_times(self.nodes, get_mocktime()) mempool3 = self.nodes[2].getaddressmempool({"addresses": [address3]}) assert_equal(len(mempool3), 2) assert_equal(mempool3[0]["prevtxid"], memtxid2) assert_equal(mempool3[0]["prevout"], 0) assert_equal(mempool3[1]["prevtxid"], memtxid2) assert_equal(mempool3[1]["prevout"], 1) # sending and receiving to the same address privkey1 = "cMvZn1pVWntTEcsK36ZteGQXRAcZ8CoTbMXF1QasxBLdnTwyVQCc" address1 = "yM9Eed1bxjy7tYxD3yZDHxjcVT48WdRoB1" address1hash = binascii.unhexlify("0909C84A817651502E020AAD0FBCAE5F656E7D8A") address1script = CScript([OP_DUP, OP_HASH160, address1hash, OP_EQUALVERIFY, OP_CHECKSIG]) self.nodes[0].sendtoaddress(address1, 10) self.nodes[0].generate(1) self.sync_all() utxos = self.nodes[1].getaddressutxos({"addresses": [address1]}) assert_equal(len(utxos), 1) tx = CTransaction() tx.vin = [ CTxIn(COutPoint(int(utxos[0]["txid"], 16), utxos[0]["outputIndex"])) ] amount = int(utxos[0]["satoshis"] - 10000) tx.vout = [CTxOut(amount, address1script)] tx.rehash() self.nodes[0].importprivkey(privkey1) signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8")) mem_txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True, False, True) self.sync_all() mempool_deltas = self.nodes[2].getaddressmempool({"addresses": [address1]}) assert_equal(len(mempool_deltas), 2) print("Passed\n")
def body_iq_test_activity_start_report_request_and_stop(device): print("-------------------------------------") print("- ACTIVITY START AND STOP TEST -") print("-------------------------------------") print("") print("Start an activity") print("") pub = device.itm_poll(expect="publish", timeout=30) assert pub != None DecodedActivity = Activity() DecodedActivity.ParseFromString(pub['data']) step_count = 0 if DecodedActivity.HasField('walking_activity'): step_count = DecodedActivity.walking_activity.step_count elif DecodedActivity.HasField('running_activity'): step_count = DecodedActivity.running_activity.step_count print("Activity " + activity[DecodedActivity.type] + "\n" + " begin " + str(DecodedActivity.timestamp_begin) + " end " + str(DecodedActivity.timestamp_end) + " step count " + str(step_count) + "\n" + " status " + status[DecodedActivity.activity_status]) # Verify that an activity started report has been received assert status[DecodedActivity.activity_status] == 'STARTED' print("") print("ACTIVITY STARTED: OK") print("") # Send request (NULL data) pub = device.itm_topic_req("\x49\x42\x41\x00", "\x00") assert pub != None print("Report request sended") print("topic_rsp: req_id %d status %d data %s" %(pub['req_id'], pub['status'], binascii.hexlify(pub['data']))) DecodedActivity = Activity() DecodedActivity.ParseFromString(pub['data']) step_count = 0 if DecodedActivity.HasField('walking_activity'): step_count = DecodedActivity.walking_activity.step_count elif DecodedActivity.HasField('running_activity'): step_count = DecodedActivity.running_activity.step_count print("Activity " + activity[DecodedActivity.type] + "\n" + " begin " + str(DecodedActivity.timestamp_begin) + " end " + str(DecodedActivity.timestamp_end) + " step count " + str(step_count) + "\n" + " status " + status[DecodedActivity.activity_status]) # Verify that the activity is an ongoing one assert status[DecodedActivity.activity_status] == 'ONGOING' print("") print("INTERMEDIATE ACTIVITY REPORT: OK") print("") print("Stop your activity") print("") pub = device.itm_poll(expect="publish", timeout=30) assert pub != None DecodedActivity = Activity() DecodedActivity.ParseFromString(pub['data']) step_count = 0 if DecodedActivity.HasField('walking_activity'): step_count = DecodedActivity.walking_activity.step_count elif DecodedActivity.HasField('running_activity'): step_count = DecodedActivity.running_activity.step_count print("Activity " + activity[DecodedActivity.type] + "\n" + " begin " + str(DecodedActivity.timestamp_begin) + " end " + str(DecodedActivity.timestamp_end) + " step count " + str(step_count) + "\n" + " status " + status[DecodedActivity.activity_status]) # Verify that an activity finished report has been received assert status[DecodedActivity.activity_status] == 'FINISHED' print("") print("ACTIVITY FINISHED: OK") print("") print("TESTS PASSED")
def _to_hex(my_bytes): return binascii.hexlify(my_bytes).decode('ascii')
with open(testFile, 'rb') as f: content = f.read() os.system("clear") print("Printing the hex values of the script") # print(binascii.hexlify(content)) for dictGrabber in listOf4HexValues: if dictGrabber.get("variation") == 0: limitGrab = dictGrabber.get("hexLimit") + 1 hexGrab = dictGrabber.get("magicNumber").lower() grabbingLimit = 0 grabbedHex = "" # import pdb # pdb.set_trace() while grabbingLimit < limitGrab: grabbedHex = grabbedHex + \ binascii.hexlify(content[grabbingLimit]) grabbingLimit = grabbingLimit + 1 if grabbedHex == hexGrab: print("Found a match!") kindOfFile = dictGrabber.get("extension") print(kindOfFile) exit() else: continue elif dictGrabber.get("variation") == 1: # print("Skipping variation") if dictGrabber.get("hexLimit") >= 12: # print("This part will match") else: break
def _hexlify(data): result = binascii.hexlify(data) return str(result.decode() if isinstance(result, bytes) else result)
def _hash_chunks(self, chunks): hasher = self._make_hasher() for chunk in chunks: hasher.update(chunk) hashbytes = hasher.digest() return binascii.hexlify(hashbytes).decode('utf-8')
def _encrypt(s, key): iv = Random.get_random_bytes(16) cipher = AES.new(bytes(bytearray.fromhex(key)), AES.MODE_CBC, iv) return (binascii.hexlify(cipher.encrypt(s)) + b' ' + binascii.hexlify(iv)).decode('utf-8')
import json import os import sys import shutil import tempfile import zipfile import hashlib import binascii from xml.dom import minidom from six import reraise, string_types import mozfile from mozlog.unstructured import getLogger _SALT = binascii.hexlify(os.urandom(32)) _TEMPORARY_ADDON_SUFFIX = "@temporary-addon" # Logger for 'mozprofile.addons' module module_logger = getLogger(__name__) class AddonFormatError(Exception): """Exception for not well-formed add-on manifest files""" class AddonManager(object): """ Handles all operations regarding addons in a profile including: installing and cleaning addons """
plaintext=val def encrypt(plaintext,key, mode): encobj = AES.new(key,mode) return(encobj.encrypt(plaintext)) def decrypt(ciphertext,key, mode): encobj = AES.new(key,mode) return(encobj.decrypt(ciphertext)) key = hashlib.sha256(password).digest() plaintext = Padding.appendPadding(plaintext,blocksize=Padding.AES_blocksize,mode='CMS') print "After padding (CMS): "+binascii.hexlify(bytearray(plaintext)) ciphertext = encrypt(plaintext,key,AES.MODE_ECB) print "Cipher (ECB): "+binascii.hexlify(bytearray(ciphertext)) plaintext = decrypt(ciphertext,key,AES.MODE_ECB) plaintext = Padding.removePadding(plaintext,mode='CMS') print " decrypt: "+plaintext plaintext=val plaintext = Padding.appendPadding(plaintext,blocksize=Padding.AES_blocksize,mode='ZeroLen') print "\nAfter padding (Bit): "+binascii.hexlify(bytearray(plaintext))
def decode_bigint_be(data): """Décode un entier grand boutiste à partir de données""" return int(binascii.hexlify(data).decode('ascii'), 16)
def handleNotification(self, cHandle, data): bytes_data = bytearray(data) # debug print("rx notif %s=%s" % (cHandle, binascii.hexlify(bytes_data)))
if usebase64: msg = msg.encode('base64').rstrip() msg = msg.replace('\n', '') return msg def AES_CBC_decrypt(key, ciphered_msg, usebase64=False): if usebase64: to_decipher_str = ciphered_msg.decode('base64') else: to_decipher_str = ciphered_msg cipher = AES.new(key, AES.MODE_ECB) msg = cipher.decrypt(to_decipher_str) return unpad_msg(msg) if __name__ == '__main__': ############################################################# # check AES_CBC_encrypt and AES_CBC_decrypt default_key = b'1234567890123456' content = b'w23456787654567\x02' ciphered = AES_CBC_encrypt(default_key, content) plain_content = AES_CBC_decrypt(default_key, ciphered) print(content) print(ciphered) print(plain_content) print(binascii.hexlify(ciphered).upper()) #############################################################
def test_nist256p_oid(self): self.assertEqual(hexlify(NIST256p.encoded_oid), b"06082a8648ce3d030107")
def b2h(the_bytes): return binascii.hexlify(the_bytes).decode("utf8")