def respond(self, code, string): self.send_response(code) self.end_headers() if code == 200: self.wfile.write(bytes(string, 'utf-8')) else: self.wfile.write(bytes(str(code) + ' ' + string, 'utf-8'))
def restore_zk(filename): """ Restores Zookeeper data from a fixed file in the local FS. Args: filename: A str, the path to the temporary Zookeeper backup file. """ handle = kazoo.client.KazooClient(hosts=ZK_DEFAULT_HOST) handle.start() with open(filename, 'r') as f: for line in f.readlines(): pair = json.loads(line) path = pair.keys()[0] value = pair.values()[0].decode('base64') try: handle.create(path, bytes(value), makepath=True) logging.debug("Created '{0}'".format(path)) except kazoo.exceptions.NodeExistsError: try: handle.set(path, bytes(value)) logging.debug("Updated '{0}'".format(path)) except kazoo.exceptions.BadArgumentsError: logging.warning("BadArgumentsError for path '{0}'".format(path)) except kazoo.exceptions.NoNodeError: logging.warning("NoNodeError for path '{0}'. Parent nodes are " "missing".format(path)) except kazoo.exceptions.ZookeeperError: logging.warning("ZookeeperError for path '{0}'".format(path)) handle.stop()
def _main_cli(args, out, encoding='utf-8'): import binascii def show_public(public_key): rawp = public_key.serialize() out.write(u"Public key: {}\n".format( binascii.hexlify(rawp).decode(encoding))) def sign(funcname, params): raw = bytes(bytearray.fromhex(params.private_key)) priv = PrivateKey(raw) func = getattr(priv, funcname) sig = func(params.message) return priv, sig if args.action == 'privkey': if args.private_key: rawkey = bytes(bytearray.fromhex(args.private_key)) else: rawkey = None priv = PrivateKey(rawkey) raw = priv.private_key out.write(u"{}\n".format(binascii.hexlify(raw).decode(encoding))) if args.show_pubkey: show_public(priv.pubkey) elif args.action == 'sign': priv, sig_raw = sign('ecdsa_sign', args) sig = priv.ecdsa_serialize(sig_raw) out.write(u"{}\n".format(binascii.hexlify(sig).decode(encoding))) if args.show_pubkey: show_public(priv.pubkey) elif args.action == 'checksig': raw = bytes(bytearray.fromhex(args.public_key)) sig = bytes(bytearray.fromhex(args.signature)) pub = PublicKey(raw, raw=True) try: sig_raw = pub.ecdsa_deserialize(sig) good = pub.ecdsa_verify(args.message, sig_raw) except: good = False out.write(u"{}\n".format(good)) return 0 if good else 1 elif args.action == 'signrec': priv, sig = sign('ecdsa_sign_recoverable', args) sig, recid = priv.ecdsa_recoverable_serialize(sig) out.write(u"{} {}\n".format(binascii.hexlify(sig).decode(encoding), recid)) if args.show_pubkey: show_public(priv.pubkey) elif args.action == 'recpub': empty = PublicKey(flags=ALL_FLAGS) sig_raw = bytes(bytearray.fromhex(args.signature)) sig = empty.ecdsa_recoverable_deserialize(sig_raw, args.recid) pubkey = empty.ecdsa_recover(args.message, sig) show_public(PublicKey(pubkey)) return 0
def test_basic(self): minipo = r"""# Afrikaans translation of program ABC # msgid "" msgstr "" "Project-Id-Version: program 2.1-branch\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2006-01-09 07:15+0100\n" "PO-Revision-Date: 2004-03-30 17:02+0200\n" "Last-Translator: Zuza Software Foundation <*****@*****.**>\n" "Language-Team: Afrikaans <*****@*****.**>\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" # Please remember to do something #: ../dir/file.xml.in.h:1 ../dir/file2.xml.in.h:4 msgid "Applications" msgstr "Toepassings" """ tmx = self.po2tmx(minipo) print("The generated xml:") print(bytes(tmx)) assert tmx.translate("Applications") == "Toepassings" assert tmx.translate("bla") is None xmltext = bytes(tmx).decode('utf-8') assert xmltext.index('creationtool="Translate Toolkit"') assert xmltext.index('adminlang') assert xmltext.index('creationtoolversion') assert xmltext.index('datatype') assert xmltext.index('o-tmf') assert xmltext.index('segtype') assert xmltext.index('srclang')
def create_discovery_packet (self, dpid, port_num, port_addr): """ Build discovery packet """ chassis_id = pkt.chassis_id(subtype=pkt.chassis_id.SUB_LOCAL) chassis_id.id = bytes('dpid:' + hex(long(dpid))[2:-1]) # Maybe this should be a MAC. But a MAC of what? Local port, maybe? port_id = pkt.port_id(subtype=pkt.port_id.SUB_PORT, id=str(port_num)) ttl = pkt.ttl(ttl = self._ttl) sysdesc = pkt.system_description() sysdesc.payload = bytes('dpid:' + hex(long(dpid))[2:-1]) discovery_packet = pkt.lldp() discovery_packet.tlvs.append(chassis_id) discovery_packet.tlvs.append(port_id) discovery_packet.tlvs.append(ttl) discovery_packet.tlvs.append(sysdesc) discovery_packet.tlvs.append(pkt.end_tlv()) eth = pkt.ethernet(type=pkt.ethernet.LLDP_TYPE) eth.src = port_addr eth.dst = pkt.ETHERNET.NDP_MULTICAST eth.payload = discovery_packet po = of.ofp_packet_out(action = of.ofp_action_output(port=port_num)) po.data = eth.pack() return po.pack()
def encoded_password(self): if self._auth_method == Authentication.CLEARTEXT_PASSWORD: return self._password elif self._auth_method == Authentication.CRYPT_PASSWORD: return crypt.crypt(self._password, self._options['salt']) elif self._auth_method == Authentication.MD5_PASSWORD: for key in 'user', 'salt': m = hashlib.md5() m.update(self._password + self._options[key]) hexdigest = m.hexdigest() if six.PY3: # In python3 the output of m.hexdigest() is a unicode string, # so has to be converted to bytes before concat'ing with # the password bytes. hexdigest = bytes(hexdigest, ASCII) self._password = hexdigest prefix = 'md5' if six.PY3: # Same workaround for bytes here. prefix = bytes(prefix, ASCII) return prefix + self._password else: raise ValueError("unsupported authentication method: {0}".format(self._auth_method))
def test_import_variables(self): content = ('{"str_key": "str_value", "int_key": 60,' '"list_key": [1, 2], "dict_key": {"k_a": 2, "k_b": 3}}') try: # python 3+ bytes_content = io.BytesIO(bytes(content, encoding='utf-8')) except TypeError: # python 2.7 bytes_content = io.BytesIO(bytes(content)) response = self.app.post( self.IMPORT_ENDPOINT, data={'file': (bytes_content, 'test.json')}, follow_redirects=True ) self.assertEqual(response.status_code, 200) body = response.data.decode('utf-8') self.assertIn('str_key', body) self.assertIn('int_key', body) self.assertIn('list_key', body) self.assertIn('dict_key', body) self.assertIn('str_value', body) self.assertIn('60', body) self.assertIn('[1, 2]', body) # As dicts are not ordered, we may get any of the following cases. case_a_dict = '{"k_a": 2, "k_b": 3}' case_b_dict = '{"k_b": 3, "k_a": 2}' try: self.assertIn(case_a_dict, body) except AssertionError: self.assertIn(case_b_dict, body)
def put(self, key, data, txn=None, flags=0, dlen=-1, doff=-1) : if isinstance(key, str) : key = bytes(key, charset) if isinstance(data, str) : value = bytes(data, charset) return self._db.put(key, data, flags=flags, txn=txn, dlen=dlen, doff=doff)
def tlv_pack(*args): if len(args) == 2: tlv = {'type':args[0], 'value':args[1]} else: tlv = args[0] data = '' value = tlv['value'] if (tlv['type'] & TLV_META_TYPE_UINT) == TLV_META_TYPE_UINT: if isinstance(value, float): value = int(round(value)) data = struct.pack('>III', 12, tlv['type'], value) elif (tlv['type'] & TLV_META_TYPE_QWORD) == TLV_META_TYPE_QWORD: data = struct.pack('>IIQ', 16, tlv['type'], value) elif (tlv['type'] & TLV_META_TYPE_BOOL) == TLV_META_TYPE_BOOL: data = struct.pack('>II', 9, tlv['type']) + bytes(chr(int(bool(value))), 'UTF-8') else: if value.__class__.__name__ == 'unicode': value = value.encode('UTF-8') elif not is_bytes(value): value = bytes(value, 'UTF-8') if (tlv['type'] & TLV_META_TYPE_STRING) == TLV_META_TYPE_STRING: data = struct.pack('>II', 8 + len(value) + 1, tlv['type']) + value + NULL_BYTE elif (tlv['type'] & TLV_META_TYPE_RAW) == TLV_META_TYPE_RAW: data = struct.pack('>II', 8 + len(value), tlv['type']) + value elif (tlv['type'] & TLV_META_TYPE_GROUP) == TLV_META_TYPE_GROUP: data = struct.pack('>II', 8 + len(value), tlv['type']) + value elif (tlv['type'] & TLV_META_TYPE_COMPLEX) == TLV_META_TYPE_COMPLEX: data = struct.pack('>II', 8 + len(value), tlv['type']) + value return data
def RequestToQNetworkRequest(request): """ Convert a Urllib Request to a QtNetwork one. """ if isinstance(request, str): # From string return QtNetwork.QNetworkRequest(QtCore.QUrl(request)), b"GET", QtCore.QByteArray() else: # From Urllib Request qnrequest = QtNetwork.QNetworkRequest(QtCore.QUrl(request.full_url)) # Headers for name, value in request.header_items(): if not isinstance(name, bytes): name = bytes(name, "ascii") if not isinstance(value, bytes): value = bytes(value, "utf-8") qnrequest.setRawHeader(name, value) # Method, Data data = request.data if data is None: data = QtCore.QByteArray() elif isinstance(data, str): data = bytes(data, "utf-8") return qnrequest, bytes(request.get_method(), "ascii"), data
def get_both(self, key, value) : if isinstance(key, str) : key = bytes(key, charset) if isinstance(value, str) : value = bytes(value, charset) v=self._dbcursor.get_both(key, value) return self._fix(v)
def ToBytes( value ): if not value: return bytes() # This is tricky. On py2, the bytes type from builtins (from python-future) is # a subclass of str. So all of the following are true: # isinstance(str(), bytes) # isinstance(bytes(), str) # But they don't behave the same in one important aspect: iterating over a # bytes instance yields ints, while iterating over a (raw, py2) str yields # chars. We want consistent behavior so we force the use of bytes(). if type( value ) == bytes: return value # This is meant to catch Python 2's native str type. if isinstance( value, bytes ): return bytes( value, encoding = 'utf8' ) if isinstance( value, str ): # On py2, with `from builtins import *` imported, the following is true: # # bytes(str(u'abc'), 'utf8') == b"b'abc'" # # Obviously this is a bug in python-future. So we work around it. Also filed # upstream at: https://github.com/PythonCharmers/python-future/issues/193 # We can't just return value.encode( 'utf8' ) on both py2 & py3 because on # py2 that *sometimes* returns the built-in str type instead of the newbytes # type from python-future. if PY2: return bytes( value.encode( 'utf8' ), encoding = 'utf8' ) else: return bytes( value, encoding = 'utf8' ) # This is meant to catch `int` and similar non-string/bytes types. return ToBytes( str( value ) )
def __init__(self, app_id, app_secret, access_token): """ Initializes and populates the instance attributes with app_id, app_secret, access_token, appsecret_proof, and requests given arguments app_id, app_secret, and access_token. """ self.app_id = app_id self.app_secret = app_secret self.access_token = access_token if version_info < (3, 0): h = hmac.new( bytes(self.app_secret), msg=bytes(self.access_token), digestmod=hashlib.sha256 ) else: h = hmac.new( bytes(self.app_secret, 'utf-8'), msg=bytes(self.access_token, 'utf-8'), digestmod=hashlib.sha256 ) self.appsecret_proof = h.hexdigest() self.requests = requests.Session() self.requests.verify = os.path.join( os.path.dirname(__file__), 'fb_ca_chain_bundle.crt', ) self.requests.params.update({ 'access_token': self.access_token, 'appsecret_proof': self.appsecret_proof, })
def handle(self): # self.request is the TCP socket connected to the client # TypeError: Type str doesn't support the buffer API request = self.request.recv(1024).decode('utf-8') print('Connected by',self.client_address[0]) print('Request is', request) method = request.split(' ')[0] src = request.split(' ')[1] if method == 'GET': if src == '/test.jpg': content = pic_content # TypeError: 'str' does not support the buffer interface else: content = bytes(text_content,'gbk') self.request.sendall(content) if method == 'POST': form = request.split('\r\n') idx = form.index('') # Find the empty line entry = form[idx:] # Main content of the request value = entry[-1].split('=')[-1] # TypeError: 'str' does not support the buffer interface self.request.sendall(bytes(text_content+ '\n <p>' + value + '</p>','gbk') )
def do_GET(self): if not self._validate(): return try: resp = self.server._mpr.call(url=self.path, method='GET', args={'headers': self.headers}) except Exception as e: traceback.print_exc() self._send_error(e) return if resp: self._handle_mapper_response(resp) self.send_response(self._status_code, self._message) self.send_header('Content-type', 'application/json') self._send_default_headers() self.end_headers() if self._payload is not None: self.wfile.write(bytes(json.dumps( self._payload, default=self._serialize), 'utf-8')) else: self.wfile.write(bytes(json.dumps([]), 'utf-8')) else: self.send_response(400) self._send_default_headers() self.end_headers()
def __bytes__(self): data = struct.pack('!HBBHHHH', self.ID, ((0b10000000 if self.QR == DnsQR.response else 0) | (self.OPCODE << 3) | (0b100 if self.AA else 0) | (0b010 if self.TC else 0) | (0b001 if self.RD else 0)), ((0b10000000 if self.RA else 0) | self.Z << 4 | self.RCODE), self.QDCOUNT, self.ANCOUNT, self.NSCOUNT, self.ARCOUNT, ) for record in self.questions: data += bytes(record) for record in self.answers: data += bytes(record) for record in self.nameservers: data += bytes(record) for record in self.additional_records: data += bytes(record) return data
def test_read_bytes_name(self): history.append('fred') history.append('wilma') history.write_file(bytes('my_history', sys.getfilesystemencoding()), raise_exc=True) history.clear() history.read_file(bytes('my_history', sys.getfilesystemencoding()), raise_exc=True) self.assertEqual(len(history), 2)
def subseg(self, text, start, end): """ Return a "sub-segment" list containing segment structures that make up a portion of this segment. A list is returned to handle cases where wide characters need to be replaced with a space character at either edge so two or three segments will be returned. """ if start < 0: start = 0 if end > self.sc: end = self.sc if start >= end: return [] # completely gone if self.text: # use text stored in segment (self.text) spos, epos, pad_left, pad_right = calc_trim_text( self.text, 0, len(self.text), start, end ) return [ (end-start, self.offs, bytes().ljust(pad_left) + self.text[spos:epos] + bytes().ljust(pad_right)) ] elif self.end: # use text passed as parameter (text) spos, epos, pad_left, pad_right = calc_trim_text( text, self.offs, self.end, start, end ) l = [] if pad_left: l.append((1,spos-1)) l.append((end-start-pad_left-pad_right, spos, epos)) if pad_right: l.append((1,epos)) return l else: # simple padding adjustment return [(end-start,self.offs)]
def _collect_names_then_unlink(r): # protect the process from ^C and "killall python" etc signal.signal(signal.SIGINT, signal.SIG_IGN) signal.signal(signal.SIGTERM, signal.SIG_IGN) # close all fds except r try: MAXFD = os.sysconf("SC_OPEN_MAX") except: MAXFD = 256 closerange(0, r) closerange(r + 1, MAXFD) # collect data written to pipe data = [] while 1: try: s = os.read(r, 512) except: # XXX IO lock might be held at fork, so don't try # printing unexpected exception - see issue 6721 pass else: if not s: break data.append(s) # attempt to unlink each collected name for name in bytes('', 'ascii').join(data).split(bytes('\0', 'ascii')): try: sem_unlink(name.decode('ascii')) except: # XXX IO lock might be held at fork, so don't try # printing unexpected exception - see issue 6721 pass
def build_ftl(model): origin = 0 for v in model.verts: if v.xyz == (0.0,0.0,0.0) and v.n == (0.0,0.0,0.0): break origin += 1 geomHeader = encode_ints([ len(model.verts), len(model.faces), len(model.mats), len(model.groups), len(model.actions), len(model.sels), origin]) + bytes(256) return concat_bytes([ b'FTL\x00', pack('<f', 0.83257), bytes(512), # fake checksum pack('<i', 544), # 3d data header offset b'\xff' * 20, # 5 times 32-bit -1: skipped offsets geomHeader, model.encode_verts(), model.encode_faces(), model.encode_mats(), model.encode_groups(), model.encode_actions(), model.encode_sels()])
def save_module_dict(pickler, obj): if is_dill(pickler) and obj == pickler._main.__dict__ and not pickler._session: log.info("D1: <dict%s" % str(obj.__repr__).split('dict')[-1]) # obj if PY3: pickler.write(bytes('c__builtin__\n__main__\n', 'UTF-8')) else: pickler.write('c__builtin__\n__main__\n') log.info("# D1") elif not is_dill(pickler) and obj == _main_module.__dict__: log.info("D3: <dict%s" % str(obj.__repr__).split('dict')[-1]) # obj if PY3: pickler.write(bytes('c__main__\n__dict__\n', 'UTF-8')) else: pickler.write('c__main__\n__dict__\n') #XXX: works in general? log.info("# D3") elif '__name__' in obj and obj != _main_module.__dict__ \ and obj is getattr(_import_module(obj['__name__'],True), '__dict__', None): log.info("D4: <dict%s" % str(obj.__repr__).split('dict')[-1]) # obj if PY3: pickler.write(bytes('c%s\n__dict__\n' % obj['__name__'], 'UTF-8')) else: pickler.write('c%s\n__dict__\n' % obj['__name__']) log.info("# D4") else: log.info("D2: <dict%s" % str(obj.__repr__).split('dict')[-1]) # obj if is_dill(pickler) and pickler._session: # we only care about session the first pass thru pickler._session = False StockPickler.save_dict(pickler, obj) log.info("# D2") return
def set_color(self, channel=0, index=0, red=0, green=0, blue=0, name=None, hex=None): """Set the color to the device as RGB Args: red: Red color intensity 0 is off, 255 is full red intensity green: Green color intensity 0 is off, 255 is full green intensity blue: Blue color intensity 0 is off, 255 is full blue intensity name: Use CSS colour name as defined here: http://www.w3.org/TR/css3-color/ hex: Specify color using hexadecimal color value e.g. '#FF3366' """ red, green, blue = self._determine_rgb(red=red, green=green, blue=blue, name=name, hex=hex) r = int(round(red, 3)) g = int(round(green, 3)) b = int(round(blue, 3)) if self.inverse: r, g, b = 255 - r, 255 - g, 255 - b if index == 0 and channel == 0: control_string = bytes(bytearray([0, r, g, b])) self._usb_ctrl_transfer(0x20, 0x9, 0x0001, 0, control_string) else: control_string = bytes(bytearray([0, channel, index, r, g, b])) self._usb_ctrl_transfer(0x20, 0x9, 0x0005, 0, control_string)
def query(ip, query): pk_id = func.random_pktid(const.LENGTH_PKTID) port = func.format_string(const.PORT, const.LENGTH_PORT, "0") step = func.format_string(const.TTL, const.LENGTH_TTL, "0") query = func.format_string(query, const.LENGTH_QUERY, " ") pack = bytes(const.CODE_QUERY, "ascii") + bytes(pk_id, "ascii") + bytes(ip, "ascii") + bytes(port, "ascii") + bytes(step, "ascii") + bytes(query, "ascii") return pack
def storeXMLInfo(info_file, masked_image_file): with open(info_file, 'r') as fid: xml_info = fid.read() #if it is empty the xml create a node and exit if not xml_info: with tables.File(masked_image_file, 'r+') as fid: fid.create_array('/', 'xml_info', obj = bytes('', 'utf-8')) return #read the xml and exit root = ET.fromstring(xml_info) x_microns = float(root.findall('./info/stage/steps/equivalent/microns/x')[0].text) y_microns = float(root.findall('./info/stage/steps/equivalent/microns/y')[0].text) x_pixels = float(root.findall('./info/stage/steps/equivalent/pixels/x')[0].text) y_pixels = float(root.findall('./info/stage/steps/equivalent/pixels/y')[0].text) fps = float(root.findall('./info/camera/display/frame/rate')[0].text) pixels2microns_x = x_microns/x_pixels pixels2microns_y = y_microns/y_pixels with tables.File(masked_image_file, 'r+') as fid: if '/xml_info' in fid: fid.remove_node('/', 'xml_info') xml_node = fid.create_array('/', 'xml_info', obj = bytes(xml_info, 'utf-8')) masks_node = fid.get_node('/', 'mask') masks_node.attrs['fps'] = fps masks_node.attrs['pixels2microns_x'] = pixels2microns_x masks_node.attrs['pixels2microns_y'] = pixels2microns_y
def on_meta_data_changed(self): """Update the download's metadata.""" if self.reply is None: return self.raw_headers = {} for key, value in self.reply.rawHeaderPairs(): self.raw_headers[bytes(key)] = bytes(value)
def get_readonly(model_name): """ Get a mesh and make vertices and faces read only. Parameters ------------ model_name : str Model name in models directory Returns ----------- mesh : trimesh.Trimesh Geometry with read-only data verts : (n, 3) float Read- only vertices faces : (m, 3) int Read- only faces """ original = g.get_mesh(model_name) # get the original data from the mesh verts = original.vertices faces = original.faces # use the buffer interface to generate read-only arrays verts = g.np.ndarray(verts.shape, verts.dtype, bytes(verts.tostring())) faces = g.np.ndarray(faces.shape, faces.dtype, bytes(faces.tostring())) # everything should be read only now assert not verts.flags['WRITEABLE'] assert not faces.flags['WRITEABLE'] mesh = g.trimesh.Trimesh(verts, faces, process=False, validate=False) assert not mesh.vertices.flags['WRITEABLE'] assert not mesh.faces.flags['WRITEABLE'] # return the mesh, and read-only vertices and faces return mesh, verts, faces
def prepAgents(self, test): if verbose: print(' Preparing agents...') targets = list(test.specs.keys()) for target in targets: # Create TCP socket. Skip if in simulation mode. if not simulate: try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) except socket.error as e: sys.exit('Failed to create socket for target "%s": %s.' % (target, e)) # Bind socket to local machine name and specified port. try: hostname = socket.gethostname() port = AGENT_LISTEN_PORT sock.connect((hostname, port)) # Perform a simple echo test to make sure it works. testBytes = bytes('hello, ' + target, 'UTF-8') sock.sendall(testBytes) response = sock.recv(BUFFER_SIZE) if response == testBytes: sock.sendall(bytes(test.specs[target], 'UTF-8')) self.sockets[target] = sock else: # TODO print('Agent %d failed echo test. Skipping.' % target) except socket.error as e: sys.exit('Failed to open connection to socket for target '\ '"%s": %s.' % (target, e)) if verbose: print(' ...finished.\n')
def splitextTest(self, path, filename, ext): self.assertEqual(posixpath.splitext(path), (filename, ext)) self.assertEqual(posixpath.splitext("/" + path), ("/" + filename, ext)) self.assertEqual(posixpath.splitext("abc/" + path), ("abc/" + filename, ext)) self.assertEqual(posixpath.splitext("abc.def/" + path), ("abc.def/" + filename, ext)) self.assertEqual(posixpath.splitext("/abc.def/" + path), ("/abc.def/" + filename, ext)) self.assertEqual(posixpath.splitext(path + "/"), (filename + ext + "/", "")) path = bytes(path, "ASCII") filename = bytes(filename, "ASCII") ext = bytes(ext, "ASCII") self.assertEqual(posixpath.splitext(path), (filename, ext)) self.assertEqual(posixpath.splitext(b"/" + path), (b"/" + filename, ext)) self.assertEqual(posixpath.splitext(b"abc/" + path), (b"abc/" + filename, ext)) self.assertEqual(posixpath.splitext(b"abc.def/" + path), (b"abc.def/" + filename, ext)) self.assertEqual(posixpath.splitext(b"/abc.def/" + path), (b"/abc.def/" + filename, ext)) self.assertEqual(posixpath.splitext(path + b"/"), (filename + ext + b"/", b""))
async def test_mailgun_webhook_event_without_an_api_key( http_client, webhook_id_without_api_key, mailgun_events ): """Test that webhook triggers an event if there is no api key.""" timestamp = '1529006854' token = 'a8ce0edb2dd8301dee6c2405235584e45aa91d1e9f979f3de0' event_count = len(mailgun_events) await http_client.post( '/api/webhook/{}'.format(webhook_id_without_api_key), json={ 'hello': 'mailgun', 'signature': { 'signature': hmac.new( key=bytes(API_KEY, 'utf-8'), msg=bytes('{}{}'.format(timestamp, token), 'utf-8'), digestmod=hashlib.sha256 ).hexdigest(), 'timestamp': timestamp, 'token': token } } ) assert len(mailgun_events) == event_count + 1 assert mailgun_events[-1].data['webhook_id'] == webhook_id_without_api_key assert mailgun_events[-1].data['hello'] == 'mailgun'
def do_http_msg(self, handler, headers, msg): log.debug("headers: {}".format(headers)) log.debug("text: {}".format(msg)) params = msg important_fields = { 'key': params.get('key', None), 'channel': params.get('channel', None), 'text': params.get('msg', None) } if important_fields['channel'] is None or important_fields['text'] is None or important_fields['key'] is None: missing_fields = list(filter(lambda x: important_fields[x] is None, important_fields.keys())) handler.send_response(403) handler.send_header("Content-Type", "application/json") handler.end_headers() handler.wfile.write(bytes(json.dumps({"success": False, "msg": "Missing field(s).", "fields": missing_fields}), "utf-8")) elif important_fields['key'] == self.registryValue("sendingKey"): self.send_msg(important_fields['channel'], important_fields['text']) handler.send_response(200) handler.send_header("Content-Type", "application/json") handler.end_headers() handler.wfile.write(bytes(json.dumps({"success": True, "msg": "Thanks!"}), "utf-8")) else: handler.send_response(403) handler.send_header("Content-Type", "application/json") handler.end_headers() handler.wfile.write(bytes(json.dumps({"success": False, "msg": "Invalid sendingKey"}), "utf-8"))
if len(sys.argv) == 1: file_mode = True with open("assets/public.pem") as aeskey_file: key = RSA.import_key(aeskey_file.read()) if file_mode: with open("cryptomat/.signature.json") as json_file: data = json.load(json_file) else: data = json.loads(sys.argv[1]) #convert int list to bytes msg = data[0] signature = bytes(data[1]) msg_bytes = bytearray() msg_bytes.extend(map(ord, msg)) hash = SHA256.new(msg_bytes) try: pkcs1_15.new(key).verify(hash, signature) answer = "+" except (TypeError): print("py - TypeError") answer = "-" except (ValueError): answer = "-"
gsqf = pow(gf, sqf, n) table = {} # Giant step ygna = pow(c, m, n) for a in trange(sqf, leave=False): table[ygna] = a ygna = (ygna * gsqf) % n # Baby step gb = 1 for b in trange(sqf, leave=False): if gb in table: a = table[gb] # gf^b = cy^a = gf^(ki + a * sqf) ki = (b - a * sqf) % f remainders.append(ki) break gb = (gb * gf) % n # Reconstruct k = libnum.solve_crt(remainders, factors) # Print flag k = hashlib.sha512(str(k).encode('ascii')).digest() dec = bytes(ci ^ ki for ci, ki in zip(enc, k)) print(dec)
def draw_disk(key): s=''.join(get_data(key)) b=bytes([int(s[i:i+2],16) for i in range(0,len(s),2)]) return Image.frombytes('1',(128,128),b)
def broadcast(message, prefix=""): """ Broadcasts message to all connected clients. """ for _socket in clients: _socket.send(bytes(prefix, "utf8") + message)
def write_data(file: Multiplexer.File, buf: bytes) -> None: for i in range(0, len(buf), 255): n = min(len(buf) - i, 255) file.write(bytes((n,))) file.write(buf[i : i + n])
def hash(self): print("calchash") return hashlib.sha3_512(self.data.encode('utf-8') + bytes(self.nonce)).hexdigest()
def _write_effective_compression_file(self, data_size): """Ensure file contents can be effectively compressed.""" self.volume_file.seek(0) self.volume_file.write(bytes([65] * data_size)) self.volume_file.seek(0)
import socket import sys HOST, PORT = "192.168.0.16", 5555 data = "test rico" # Create a socket (SOCK_STREAM means a TCP socket) with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: # Connect to server and send data sock.connect((HOST, PORT)) sock.sendall(bytes(data + "\n", "utf-8")) # Receive data from the server and shut down received = str(sock.recv(1024), "utf-8") print("Sent: {}".format(data)) print("Received: {}".format(received))
def new_x25519(cls): private_key = PrivateKey.generate() key_exchange = bytes(private_key.public_key) return private_key, cls.x25519.pack() + pack_int(2, key_exchange)
class firmware(object): '''Loads a firmware file''' desc = {} image = bytes() crctab = array.array('I', [ 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, 0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de, 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9, 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599, 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924, 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190, 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01, 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950, 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010, 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f, 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8, 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb, 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef, 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236, 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe, 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713, 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242, 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66, 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9, 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, 0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d ]) crcpad = bytearray(b'\xff\xff\xff\xff') def __init__(self, path): # read the file f = open(path, "r") self.desc = json.load(f) f.close() self.image = bytearray( zlib.decompress(base64.b64decode(self.desc['image']))) # pad image to 4-byte length while ((len(self.image) % 4) != 0): self.image.extend(b'\xff') def property(self, propname): return self.desc[propname] def __crc32(self, bytes, state): for byte in bytes: index = (state ^ byte) & 0xff state = self.crctab[index] ^ (state >> 8) return state def crc(self, padlen): state = self.__crc32(self.image, int(0)) for i in range(len(self.image), (padlen - 1), 4): state = self.__crc32(self.crcpad, state) return state
m = int(md5_hash, 16) # 用发送者的私钥对hash进行加密得到签名S S = rsa.decrypt(m, selfkey_1) # 签名S共1024位,不足的高位补0 S = '{:01024b}'.format(S) S = S.encode('utf-8') # 转bytes类型 '''3.利用ZIP压缩 < M, S >''' compress = zipstream.LZ77Compressor() # 拼接<M,S> new_msg = msg # 把1024位二进制的S转为128位16进制 for i in range(int(len(S) / 8)): tmp = S[i * 8: i * 8 + 8] data = int(tmp, 2) data = bytes([data]) new_msg = new_msg + data # 拼接,后128位为签名S # print(new_msg[-128:]) new_msg = compress.compress(new_msg) '''4. 利用IDEA加密压缩数据''' # 生成一个随机的128位IDEA密钥 IDEA_key = 0x4AD6459F82C5B300952C49104881EF51 # 对拼接后的数据进行IDEA加密 IDEA_MS = IDEA.IDEA_en(new_msg, IDEA_key) '''5. 用RSA加密IDEA的密钥k,得到RSA(k)''' p2 = get512prime.get_prime() q2 = get512prime.get_prime() while p2 == q2: q2 = get512prime.get_prime()
def tls_response(self): parser = yield from iofree.get_parser() while True: head = yield from iofree.read(5) assert head[ 1:3] == b"\x03\x03", f"bad legacy_record_version {head[1:3]}" length = int.from_bytes(head[3:], "big") if (head[0] == ContentType.application_data and length > (16384 + 256)) or (head[0] != ContentType.application_data and length > 16384): parser.write(self.pack_fatal(AlertDescription.record_overflow)) raise Alert(AlertLevel.fatal, AlertDescription.record_overflow) content = memoryview((yield from iofree.read(length))) if head[0] == ContentType.alert: level = AlertLevel.from_value(content[0]) description = AlertDescription.from_value(content[1]) raise Alert(level, description) elif head[0] == ContentType.handshake: self.peer_handshake = self.unpack_handshake(content) assert (self.peer_handshake.handshake_type == HandshakeType.server_hello), "expect server hello" peer_pk = self.peer_handshake.extensions[ ExtensionType.key_share].key_exchange shared_key = crypto_scalarmult(bytes(self.private_key), peer_pk) TLSCipher = self.peer_handshake.cipher_suite self.TLSCipher = TLSCipher key_index = self.peer_handshake.extensions.get( ExtensionType.pre_shared_key) psk = None if key_index is None else self.psk_list[key_index] key_scheduler = TLSCipher.tls_hash.scheduler(shared_key, psk) self.key_scheduler = key_scheduler secret = key_scheduler.server_handshake_traffic_secret( self.handshake_context) # server handshake cipher self.peer_cipher = TLSCipher(secret) client_handshake_traffic_secret = key_scheduler.client_handshake_traffic_secret( self.handshake_context) elif head[0] == ContentType.application_data: plaintext = self.peer_cipher.decrypt(content, head).rstrip(b"\x00") content_type = ContentType.from_value(plaintext[-1]) if content_type == ContentType.handshake: self.unpack_handshake(plaintext[:-1]) if self.server_finished: if self.early_data: eoe_data = HandshakeType.end_of_early_data.pack_data( b"") # self.handshake_context.extend(eoe_data) inner_plaintext = ContentType.handshake.tls_inner_plaintext( eoe_data) record = self.cipher.tls_ciphertext( inner_plaintext) parser.write(record) # client handshake cipher cipher = TLSCipher(client_handshake_traffic_secret) client_finished = cipher.verify_data( self.handshake_context) client_finished_data = HandshakeType.finished.pack_data( client_finished) inner_plaintext = ContentType.handshake.tls_inner_plaintext( client_finished_data) record = cipher.tls_ciphertext(inner_plaintext) change_cipher_spec = ContentType.change_cipher_spec.tls_plaintext( b"\x01") parser.write(change_cipher_spec + record) # server application cipher server_secret = key_scheduler.server_application_traffic_secret_0( self.handshake_context) self.peer_cipher = TLSCipher(server_secret) self.server_finished = False # client application cipher client_secret = key_scheduler.client_application_traffic_secret_0( self.handshake_context) self.cipher = TLSCipher(client_secret) self.handshake_context.extend(client_finished_data) elif content_type == ContentType.application_data: self.data_callback(plaintext[:-1]) elif content_type == ContentType.alert: level = AlertLevel.from_value(plaintext[0]) description = AlertDescription.from_value(plaintext[1]) raise Alert(level, description) elif content_type == ContentType.invalid: raise Exception("invalid content type") else: raise Exception(f"unexpected content type {content_type}") elif head[0] == ContentType.change_cipher_spec: assert content == b"\x01", "change_cipher should be 0x01" else: raise Exception(f"Unknown content type: {head[0]}")
def memstr(x): if isinstance(x, memoryview): return x.tobytes() else: return bytes(x)
print('bool([x])') print(bool(0)) # >>> False print(bool('0')) # >>> True print(bool(None)) # >>> False print(bool([])) # >>> False # argument can be any object # return True or False # None,False,0, 0.0,空字符串'', 空元组(), 空列表[], 空字典{} 这些算作False # 其他皆为True print('\n', 7) print('bytearray([source[, encoding[, errors]]])') print(bytearray([0, 100, 255])) # >>> bytearray(b'\x00d\xff') print(bytearray( 12)) # >>> bytearray(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00') print(bytes([0, 100, 255])) # >>> b'\x00d\xff' print(bytes(12)) # >>> b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' print('\n', 8) print('bytes([source[, encoding[, errors]]])') # 返回一个新的字节对象,是一个在 0<= x < 256之间的不可变的整数序列。 # bytes 是 bytearray 的不可变版本 – 它具有同样的非改变性的方法和同样的索引和切片操作 # 因此,构造函数参数的解释与bytearray()相同。 print('\n', 9) print('callable(object)') print(callable(1)) # >>> False print(callable(abs)) # >>> True, function is callable print(callable([1, 2])) # >>> True, function is callable print(callable(zip())) # >>> False, if with '()' # argument: any object
def joinfiles(self, filepaths:list) -> bytes: result = bytes() for filepath in filepaths: result = result + self.readfile(filepath) return result
def memstr(x): return bytes(x)
def stop(self): self.logger.debug("Stopping logging thread") if self.running: os.write(self.writepipe, bytes("stop", "utf-8"))
def join(self, chunks:list) -> bytes: result = bytes() for chunk in chunks: result = result + chunk return result
def _bytearray_to_mysql(self, value): """Convert value to bytes""" return bytes(value)
def _onRequestFinished(self, reply: QNetworkReply) -> None: if reply.error() == QNetworkReply.TimeoutError: Logger.log("w", "Got a timeout.") self.setViewPage("errored") self.resetDownload() return if reply.error() == QNetworkReply.HostNotFoundError: Logger.log("w", "Unable to reach server.") self.setViewPage("errored") self.resetDownload() return if reply.operation() == QNetworkAccessManager.GetOperation: for response_type, url in self._request_urls.items(): if reply.url() == url: if reply.attribute( QNetworkRequest.HttpStatusCodeAttribute) == 200: try: json_data = json.loads( bytes(reply.readAll()).decode("utf-8")) # Check for errors: if "errors" in json_data: for error in json_data["errors"]: Logger.log("e", "%s", error["title"]) return # Create model and apply metadata: if not self._models[response_type]: Logger.log("e", "Could not find the %s model.", response_type) break self._server_response_data[ response_type] = json_data["data"] self._models[response_type].setMetadata( self._server_response_data[response_type]) if response_type == "packages": self._models[response_type].setFilter( {"type": "plugin"}) self.reBuildMaterialsModels() self.reBuildPluginsModels() self._notifyPackageManager() elif response_type == "authors": self._models[response_type].setFilter( {"package_types": "material"}) self._models[response_type].setFilter( {"tags": "generic"}) elif response_type == "updates": # Tell the package manager that there's a new set of updates available. packages = set([ pkg["package_id"] for pkg in self._server_response_data[response_type] ]) self._package_manager.setPackagesWithUpdate( packages) self.metadataChanged.emit() if self.isLoadingComplete(): self.setViewPage("overview") except json.decoder.JSONDecodeError: Logger.log("w", "Received invalid JSON for %s.", response_type) break else: Logger.log( "w", "Unable to connect with the server, we got a response code %s while trying to connect to %s", reply.attribute( QNetworkRequest.HttpStatusCodeAttribute), reply.url()) self.setViewPage("errored") self.resetDownload() elif reply.operation() == QNetworkAccessManager.PutOperation: # Ignore any operation that is not a get operation pass
#!/usr/bin/env python # -*- coding:UTF-8 -*- from socket import * from time import ctime HOST = "127.0.0.1" PORT = 21567 BUFSIZE = 1024 ADDR = (HOST, PORT) udpSerSock = socket(AF_INET, SOCK_DGRAM) udpSerSock.bind(ADDR) while True: print("waiting for message...\n") data, addr = udpSerSock.recvfrom(BUFSIZE) if not data: print("client has exist") break print("server recvived:", data, "from", addr) udpSerSock.sendto(bytes('[%s] %s' % (ctime(), data), "utf-8"), addr) udpSerSock.close()
# # def addNum(num1, num2): # sumnum = num1 + num2 # return sumnum # print(addNum(2,5)) # ################################################################################# # # reading the input from user # print('What is your name') # name = sys.stdin.readline() # print(name) ################################################################################# # file I/O test_file = open("test.txt","wb") print(test_file.name) print(test_file.mode) test_file.write(bytes('Winter is coming, and its going to be very long!\n','UTF-8')) test_file.close() test_file = open("test.txt","r+") file_data = test_file.read() print(file_data) test_file.close() os.remove('test.txt') # to delete the file