def unquote_to_bytes(string): """unquote_to_bytes('abc%20def') -> b'abc def'.""" # Note: strings are encoded as UTF-8. This is only an issue if it contains # unescaped non-ASCII characters, which URIs should not. if not string: # Is it a string-like object? string.split return bytes(b'') if isinstance(string, str): string = string.encode('utf-8') ### For Python-Future: # It is already a byte-string object, but force it to be newbytes here on # Py2: string = bytes(string) ### bits = string.split(b'%') if len(bits) == 1: return string res = [bits[0]] append = res.append for item in bits[1:]: try: append(_hextobyte[item[:2]]) append(item[2:]) except KeyError: append(b'%') append(item) return bytes(b'').join(res)
def bitmex_leveragepolice(symbol): verb = "POST" nonce = str(int(time.time())) path = "/api/v1/position/leverage" data = '{"symbol":"'+str(symbol)+'","leverage":3}' wplogging.logger.info("swapcheck "+data) message = verb + path + nonce + data signature = hmac.new(bytes(config.SHITMEX_API_SECRET, 'utf8'), bytes(message, 'utf8'), digestmod=hashlib.sha256).hexdigest() req = urllib.request.Request('https://www.bitmex.com'+path) req.add_header('api-nonce', nonce) req.add_header('api-key', config.SHITMEX_API_KEY) req.add_header('api-signature', signature) req.add_header('Content-Type', 'application/json') datab=str.encode(data) #Bytes needed for POST data try: resp = urllib.request.urlopen(req, datab) except urllib.request.HTTPError as err: msg = 'HTTP Error: '+str(err.code) wplogging.logger.info("Leverage Police failed: "+msg) #bot.sendMessage(chat_id=update.message.chat.id,text=msg,parse_mode="Markdown",disable_web_page_preview=1) return content = resp.read() decodeddata =json.loads(content.decode()) time.sleep(1)
def convert(string): codes = decode.codes state = rem = 0 if isinstance(string, unicode): string = bytes(string, 'utf-8') else: string = bytes(string) for byte in string: byte = codes[byte] if state == 0: rem = byte state = 1 elif state == 1: yield rem | ((byte & 0x3) << 6) rem = byte >> 2 state = 2 elif state == 2: yield rem | ((byte & 0xf) << 4) rem = byte >> 4 state = 3 else: yield rem | (byte << 2) state = 0
def _hmac_encode(key, msg): # Ensure arguments are bytes, for compatibles. if not isinstance(key, bytes): key = bytes(key, encoding='utf8') if not isinstance(msg, bytes): msg = bytes(msg, encoding='utf8') return hmac.new(key, msg, digestmod=hashlib.sha1).hexdigest()
def read(self, amt=None): if self.fp is None: return bytes(b"") if self._method == "HEAD": self._close_conn() return bytes(b"") if amt is not None: # Amount is given, so call base class version # (which is implemented in terms of self.readinto) return bytes(super(HTTPResponse, self).read(amt)) else: # Amount is not given (unbounded read) so we must check self.length # and self.chunked if self.chunked: return self._readall_chunked() if self.length is None: s = self.fp.read() else: try: s = self._safe_read(self.length) except IncompleteRead: self._close_conn() raise self.length = 0 self._close_conn() # we read everything return bytes(s)
def from_string(string): # TODO: this should probably go in a util package. def ishex(s): return set(s).issubset(set('0123456789abcdefABCDEF')) r = [] # Create an opcodes_by_name table with both OP_ prefixed names and # shortened ones with the OP_ dropped. opcodes_by_name = {} for name, code in OPCODES_BY_NAME.items(): opcodes_by_name[name] = code opcodes_by_name[name[3:]] = code for word in string.split(): if word.isdigit() or (word[0] == '-' and word[1:].isdigit()): r.append(CScript([int(word)])) elif ishex(word): word_bytes = unhexlify(word.encode('utf8')) push_code = bytes([len(word_bytes)]) r.append(push_code + word_bytes) elif len(word) >= 2 and word[0] == "'" and word[-1] == "'": r.append(CScript([bytes(word[1:-1].encode('utf8'))])) elif word in opcodes_by_name: r.append(CScript([opcodes_by_name[word]])) else: raise ValueError("Error parsing script: %r" % string) return CScript(b''.join(r))
def quote_from_bytes(bs, safe='/'): """Like quote(), but accepts a bytes object rather than a str, and does not perform string-to-bytes encoding. It always returns an ASCII string. quote_from_bytes(b'abc def\x3f') -> 'abc%20def%3f' """ if not isinstance(bs, (bytes, bytearray)): raise TypeError("quote_from_bytes() expected bytes") if not bs: return str('') ### For Python-Future: bs = bytes(bs) ### if isinstance(safe, str): # Normalize 'safe' by converting to bytes and removing non-ASCII chars safe = str(safe).encode('ascii', 'ignore') else: ### For Python-Future: safe = bytes(safe) ### safe = bytes([c for c in safe if c < 128]) if not bs.rstrip(_ALWAYS_SAFE_BYTES + safe): return bs.decode() try: quoter = _safe_quoters[safe] except KeyError: _safe_quoters[safe] = quoter = Quoter(safe).__getitem__ return str('').join([quoter(char) for char in bs])
def sign(value): """ Returns the hash of the given value, used for signing order key stored in cookie for remembering address fields. """ key = bytes(settings.SECRET_KEY, encoding="utf8") value = bytes(value, encoding="utf8") return hmac.new(key, value, digest).hexdigest()
def EnumerateInterfacesFromClient(args): """Enumerate all interfaces and collect their MAC addresses.""" del args # Unused libc = ctypes.cdll.LoadLibrary(ctypes.util.find_library("c")) ifa = Ifaddrs() p_ifa = ctypes.pointer(ifa) libc.getifaddrs(ctypes.pointer(p_ifa)) addresses = {} macs = {} ifs = set() m = p_ifa while m: ifname = ctypes.string_at(m.contents.ifa_name) ifs.add(ifname) try: iffamily = ord(m.contents.ifa_addr[0]) if iffamily == 0x2: # AF_INET data = ctypes.cast(m.contents.ifa_addr, ctypes.POINTER(Sockaddrin)) ip4 = bytes(list(data.contents.sin_addr)) address_type = rdf_client_network.NetworkAddress.Family.INET address = rdf_client_network.NetworkAddress( address_type=address_type, packed_bytes=ip4) addresses.setdefault(ifname, []).append(address) if iffamily == 0x11: # AF_PACKET data = ctypes.cast(m.contents.ifa_addr, ctypes.POINTER(Sockaddrll)) addlen = data.contents.sll_halen macs[ifname] = bytes(list(data.contents.sll_addr[:addlen])) if iffamily == 0xA: # AF_INET6 data = ctypes.cast(m.contents.ifa_addr, ctypes.POINTER(Sockaddrin6)) ip6 = bytes(list(data.contents.sin6_addr)) address_type = rdf_client_network.NetworkAddress.Family.INET6 address = rdf_client_network.NetworkAddress( address_type=address_type, packed_bytes=ip6) addresses.setdefault(ifname, []).append(address) except ValueError: # Some interfaces don't have a iffamily and will raise a null pointer # exception. We still want to send back the name. pass m = m.contents.ifa_next libc.freeifaddrs(p_ifa) for interface in ifs: mac = macs.setdefault(interface, b"") address_list = addresses.setdefault(interface, b"") args = {"ifname": interface} if mac: args["mac_address"] = mac if addresses: args["addresses"] = address_list yield rdf_client_network.Interface(**args)
def ibytes(x): """ Construct a bytes object from a sequence or iterator over integers. In Python 3, bytes() can do that, but python-future does not have that capability. """ if not hasattr(x, '__len__'): return bytes(list(x)) return bytes(x)
def pbkdf2_hmac(hash_name, password, salt, iterations, dklen=None): """Password based key derivation function 2 (PKCS #5 v2.0) This Python implementations based on the hmac module about as fast as OpenSSL's PKCS5_PBKDF2_HMAC for short passwords and much faster for long passwords. """ if not isinstance(hash_name, str): raise TypeError(hash_name) if not isinstance(password, (bytes, bytearray)): password = bytes(buffer(password)) if not isinstance(salt, (bytes, bytearray)): salt = bytes(buffer(salt)) # Fast inline HMAC implementation inner = new(hash_name) outer = new(hash_name) blocksize = getattr(inner, 'block_size', 64) if len(password) > blocksize: password = new(hash_name, password).digest() password = password + b'\x00' * (blocksize - len(password)) inner.update(password.translate(_trans_36)) outer.update(password.translate(_trans_5C)) def prf(msg, inner=inner, outer=outer): # PBKDF2_HMAC uses the password as key. We can re-use the same # digest objects and just update copies to skip initialization. icpy = inner.copy() ocpy = outer.copy() icpy.update(msg) ocpy.update(icpy.digest()) return ocpy.digest() if iterations < 1: raise ValueError(iterations) if dklen is None: dklen = outer.digest_size if dklen < 1: raise ValueError(dklen) hex_format_string = '%%0%ix' % (new(hash_name).digest_size * 2) dkey = b'' loop = 1 while len(dkey) < dklen: prev = prf(salt + struct.pack(b'>I', loop)) rkey = int(binascii.hexlify(prev), 16) for _i in range(iterations - 1): prev = prf(prev) rkey ^= int(binascii.hexlify(prev), 16) loop += 1 dkey += binascii.unhexlify(hex_format_string % rkey) return dkey[:dklen]
def __call__(self, r): """ Called when forming a request - generates api key headers. This call uses `expires` instead of nonce. This way it will not collide with other processes using the same API Key if requests arrive out of order. """ # modify and return the request expires = int(round(time.time()) + 600)*1000 # 60s grace period in case of clock skew #expires = 1576477289000 r.headers['api-expires'] = str(expires) r.headers['api-key'] = self.apiKey #print(str(r.body,encoding = "utf-8")) parsedURL = urlparse(r.url) #path = parsedURL.path #print(parsedURL) #ParseResult(scheme='http', netloc='192.168.0.71:7000', path='/v1/api/pc/order/query', params='', query='asset=BTC&symbol=BTC_USD&count=100', fragment='') qsencoded = parsedURL.query #'asset=BTC&symbol=BTC_USD&count=100' data = r.body #post 里面才有 if data: #参数是以 data 形式的请求,用body做签名 if isinstance(data, str): # request 参数用的data query_dict = dict(parse.parse_qsl(data)) data_str = json.dumps(query_dict,sort_keys = True).replace(' ', '') #json.dumps返回带空格的字符串,确保去除空格 elif isinstance(data, (bytes, bytearray)): # request 参数用的json data_str = str(data,encoding = "utf-8") #r.body 是bytes else: print('r.body not type of str nor bytes, retun None') # 对字段排序,先转为dict, 再排序、转回str d = json.loads(data_str) #print(d) data_str1 = json.dumps(d,sort_keys = True).replace('\\', '') #json.dumps返回带空格的字符串,确保去除反斜杠 data_str = data_str1.replace(' ', '') #json.dumps返回带空格的字符串,确保去除空格 elif qsencoded: #参数是以 query string 形式的请求,使用query string签名 #path = path + '?' + parsedURL.query query_dict = dict(parse.parse_qsl(qsencoded)) data_str1 = json.dumps(query_dict,sort_keys = True).replace('\\', '') #json.dumps返回带空格的字符串,确保去除反斜杠 data_str = data_str1.replace(' ', '') #json.dumps返回带反斜杠的字符串,确保去除空格 else: print('error:has no parameters') #print('data_str:'+data_str) message = self.apiKey + str(expires) + data_str #print('message str:'+ message) #print('secret str:'+ self.apiSecret) signature = hmac.new(bytes(self.apiSecret, 'utf8'), bytes(message, 'utf8'), digestmod=hashlib.sha256).hexdigest() #print('signature str:'+signature) r.headers['api-signature'] = signature #r.headers['api-signature'] = generate_gte_signature(self.apiKey,self.apiSecret, expires, data_str or '') #print(self.apiKey) #print(str(expires)) #print(r.headers['api-signature']) return r
def generate_gte_signature(apikey, secret, expires, data_str): """Generate a request signature compatible with GTE.""" # print "Computing HMAC: %s" % verb + path + str(nonce) + data message = apikey + str(expires) + data_str print('message str:'+ message) signature = hmac.new(bytes(secret, 'utf8'), bytes(message, 'utf8'), digestmod=hashlib.sha256).hexdigest() print('signature str:'+signature) return signature
def __getitem__(self, key): self._check_key(key) full_key = py2_encode('{0}__{1}'.format(self._id, key)) raw_item = self._window.getProperty(full_key) if raw_item: try: return pickle.loads(bytes(raw_item)) except TypeError as e: return pickle.loads(bytes(raw_item, 'utf-8')) else: raise KeyError(key)
def ParseIfaddrs(ifaddrs): """Parses contents of the intrusive linked list of `ifaddrs`. Args: ifaddrs: A pointer to the first node of `ifaddrs` linked list. Can be NULL. Returns: An iterator over instances of `rdf_client_network.Interface`. """ precondition.AssertOptionalType(ifaddrs, ctypes.POINTER(Ifaddrs)) ifaces = {} for ifaddr in IterIfaddrs(ifaddrs): ifname = ctypes.string_at(ifaddr.ifa_name).decode("utf-8") iface = ifaces.setdefault(ifname, rdf_client_network.Interface()) iface.ifname = ifname if not ifaddr.ifa_addr: continue sockaddr = ctypes.cast(ifaddr.ifa_addr, ctypes.POINTER(Sockaddr)) iffamily = sockaddr.contents.sa_family if iffamily == AF_INET: sockaddrin = ctypes.cast(ifaddr.ifa_addr, ctypes.POINTER(Sockaddrin)) address = rdf_client_network.NetworkAddress() address.address_type = rdf_client_network.NetworkAddress.Family.INET address.packed_bytes = struct.pack("=L", sockaddrin.contents.sin_addr) iface.addresses.append(address) elif iffamily == AF_INET6: sockaddrin = ctypes.cast(ifaddr.ifa_addr, ctypes.POINTER(Sockaddrin6)) address = rdf_client_network.NetworkAddress() address.address_type = rdf_client_network.NetworkAddress.Family.INET6 address.packed_bytes = bytes(list(sockaddrin.contents.sin6_addr)) iface.addresses.append(address) elif iffamily == AF_LINK: sockaddrdl = ctypes.cast(ifaddr.ifa_addr, ctypes.POINTER(Sockaddrdl)) nlen = sockaddrdl.contents.sdl_nlen alen = sockaddrdl.contents.sdl_alen iface.mac_address = bytes(sockaddrdl.contents.sdl_data[nlen:nlen + alen]) else: raise ValueError("Unexpected socket address family: %s" % iffamily) return itervalues(ifaces)
def bitmex_getbalance(bot, update): # Logging wplogging.logger.info("/bitmexbalance - "+update.message.from_user.username) # Lreciept confirmation bot.sendChatAction(chat_id=update.message.chat.id, action=telegram.ChatAction.TYPING) verb = "GET" nonce = str(int(time.time())) path = "/api/v1/user/margin" data = "" message = verb + path + nonce + data #gen sig signature = hmac.new(bytes(config.SHITMEX_API_SECRET, 'utf8'), bytes(message, 'utf8'), digestmod=hashlib.sha256).hexdigest() #build req headers req = urllib.request.Request('https://www.bitmex.com/api/v1/user/margin') req.add_header('api-nonce', nonce) req.add_header('api-key', config.SHITMEX_API_KEY) req.add_header('api-signature', signature) try: resp = urllib.request.urlopen(req) except urllib.request.HTTPError as err: msg = 'HTTP Error: '+str(err.code) wplogging.logger.info(update.message.chat.first_name+" "+msg) bot.sendMessage(chat_id=update.message.chat.id,text=msg,parse_mode="Markdown",disable_web_page_preview=1) return content = resp.read() decodeddata =json.loads(content.decode()) #vars of interest #This is the affiliate account balance in satoshi currbalance = str(decodeddata['walletBalance']/100000000) currord = str(decodeddata['initMargin']/100000000) currpos = str(decodeddata['marginBalance']/100000000) curravail = str(decodeddata['availableMargin']/100000000) # Reply msg = config.MSG_HEADER_BITMEX msg += "Current wallet balance: "+currbalance+"\n" msg += "Current balance net of positions: "+currpos+"\n" msg += "Current order value: "+currord+"\n" msg += "Available margin: "+curravail+"\n" bot.sendMessage(chat_id=update.message.chat.id,text=msg,parse_mode="Markdown",disable_web_page_preview=1)
def generate_signature(self, secret, verb, url, nonce, data): """Generate a request signature compatible with BitMEX.""" # Parse the url so we can remove the base and extract just the path. parsedURL = urlparse(url) path = parsedURL.path if parsedURL.query: path = path + '?' + parsedURL.query # print "Computing HMAC: %s" % verb + path + str(nonce) + data message = verb + path + str(nonce) + data signature = hmac.new(bytes(secret, 'utf8'), bytes(message, 'utf8'), digestmod=hashlib.sha256).hexdigest() return signature
def generate_signature(self, secret, verb, url, nonce, data): '''Generate a request signature compatible with BitMEX.''' # Parse the url so we can remove the base and extract just the path. parsedURL = urlparse(url) path = parsedURL.path if parsedURL.query: path = path + '?' + parsedURL.query # print "Computing HMAC: %s" % verb + path + str(nonce) + data message = verb + path + str(nonce) + data signature = hmac.new(bytes(secret, 'utf8'), bytes(message, 'utf8'), digestmod=hashlib.sha256).hexdigest() return signature
def alexa_speech_recognizer_generate_data(audio, boundary): """ Generate a iterator for chunked transfer-encoding request of Alexa Voice Service Args: audio: raw 16 bit LSB audio data boundary: boundary of multipart content Returns: """ logger.debug('Start sending speech to Alexa Voice Service') chunk = '--%s\r\n' % boundary chunk += ( 'Content-Disposition: form-data; name="request"\r\n' 'Content-Type: application/json; charset=UTF-8\r\n\r\n' ) data = { "messageHeader": { "deviceContext": [{ "name": "playbackState", "namespace": "AudioPlayer", "payload": { "streamId": "", "offsetInMilliseconds": "0", "playerActivity": "IDLE" } }] }, "messageBody": { "profile": "alexa-close-talk", "locale": config['triggers']['pocketsphinx']['language'], "format": "audio/L16; rate=16000; channels=1" } } yield bytes(chunk + json.dumps(data) + '\r\n', 'utf8') chunk = '--%s\r\n' % boundary chunk += ( 'Content-Disposition: form-data; name="audio"\r\n' 'Content-Type: audio/L16; rate=16000; channels=1\r\n\r\n' ) yield bytes(chunk, 'utf8') for audio_chunk in audio: yield audio_chunk yield bytes('--%s--\r\n' % boundary, 'utf8') logger.debug('Finished sending speech to Alexa Voice Service') platform.indicate_processing()
def admin_routes(env, start_response): path = env["PATH_INFO"] status = "200 OK" res = "" if path in ["/", "/report", "/report_mem"]: report = self.get_worker_report(with_memory=(path == "/report_mem")) res = bytes(json_stdlib.dumps(report, cls=MongoJSONEncoder), 'utf-8') elif path == "/wait_for_idle": self.wait_for_idle() res = bytes("idle", "utf-8") else: status = "404 Not Found" start_response(status, [('Content-Type', 'application/json')]) return [res]
def encode_bytes(*args): """ Encodes values into a byte string strings are left as-is integer values are encoded into their char values :return: bytestring representing all arguments joined together """ result = b'' for arg in args: if isinstance(arg, bytes): result += arg elif isinstance(arg, str): result += bytes(arg) elif isinstance(arg, int): result += bytes([arg]) return result
def _call_ipptool(self, request): with tempfile.NamedTemporaryFile(delete=False) as temp_file: temp_file.write(bytes(request, encoding='utf-8')) from tornado.process import Subprocess process = Subprocess([self.config['ipptool_path'], self.authenticated_uri, '-X', temp_file.name], stdin=subprocess.PIPE, stdout=Subprocess.STREAM, stderr=Subprocess.STREAM, io_loop=self.io_loop) future = [] self.io_loop.add_timeout(self.io_loop.time() + self.config['timeout'], functools.partial(self.timeout_handler, process.proc, future)) try: stdout, stderr = yield [Task(process.stdout.read_until_close), Task(process.stderr.read_until_close)] if future: raise TimeoutError finally: os.unlink(temp_file.name) result = plistlib.readPlistFromString(stdout) try: raise Return(result['Tests'][0]) except (IndexError, KeyError): logger = logging.getLogger(__name__) logger.error('ipptool command failed: {} {}'.format(stdout, stderr)) raise
def _safe_readinto(self, b): """Same as _safe_read, but for reading into a buffer.""" total_bytes = 0 mvb = memoryview(b) while total_bytes < len(b): if MAXAMOUNT < len(mvb): temp_mvb = mvb[0:MAXAMOUNT] if PY2: data = self.fp.read(len(temp_mvb)) n = len(data) temp_mvb[:n] = data else: n = self.fp.readinto(temp_mvb) else: if PY2: data = self.fp.read(len(mvb)) n = len(data) mvb[:n] = data else: n = self.fp.readinto(mvb) if not n: raise IncompleteRead(bytes(mvb[0:total_bytes]), len(b)) mvb = mvb[n:] total_bytes += n return total_bytes
def __init__(self, width, height, data): """ Initialize an XBM image. Parameters ---------- width : int The width of the bitmap. height : int The height of the bitmap. data : list A list of 1s and 0s which represent the bitmap data. The length must be equal to width * height. """ assert len(data) == (width * height) bytes_list = [] for row in range(height): val = 0 offset = row * width for col in range(width): d = col % 8 if col > 0 and d == 0: bytes_list.append(val) val = 0 v = data[offset + col] val |= v << (7 - d) bytes_list.append(val) self.width = width self.height = height self.data = bytes(bytes_list)
def readdir(self, dirname=b'.'): """ List the files in a directory. Parameters ---------- dirname : bytes (optional) Name of the directory to list. Defaults to the root directory. Returns ------- out : [bytes] List of file names. """ files = [] while True: seq = self.next_seq() msg = MsgFileioReadDirReq( sequence=seq, offset=len(files), dirname=dirname) self.link(msg) reply = self.link.wait(SBP_MSG_FILEIO_READ_DIR_RESP, timeout=1.0) if not reply: raise Exception("Timeout waiting for FILEIO_READ_DIR reply") # Why isn't this already decoded? reply = MsgFileioReadDirResp(reply) if reply.sequence != seq: raise Exception("Reply FILEIO_READ_DIR doesn't match request (%d vs %d)" % (reply.sequence, seq)) chunk = bytes(reply.contents).rstrip(b'\0') if len(chunk) == 0: return files files += chunk.split(b'\0')
def generate_signature(secret, verb, url, nonce, data): """Generate a request signature compatible with GTE.""" # Parse the url so we can remove the base and extract just the path. parsedURL = urlparse(url) path = parsedURL.path if parsedURL.query: path = path + '?' + parsedURL.query if isinstance(data, (bytes, bytearray)): data = data.decode('utf8') # print "Computing HMAC: %s" % verb + path + str(nonce) + data message = verb + path + str(nonce) + data signature = hmac.new(bytes(secret, 'utf8'), bytes(message, 'utf8'), digestmod=hashlib.sha256).hexdigest() return signature
def upload_file(self, model, filename, query_parameters=None, uri=None): if not uri: uri = model.uri() # urllib3 still uses RFC2231, not compliant with our server # https://github.com/shazow/urllib3/issues/303 # https://github.com/shazow/urllib3/pull/856 filename_ascii = bytes(filename, 'utf-8').decode('ascii', 'ignore') m = MultipartEncoder(fields={"files[]": (filename_ascii, open(filename, 'rb'))}) response = self._session.post("{}{}".format(self._base_url(), uri), auth=CytomineAuth( self._public_key, self._private_key, self._base_url(), self._base_path), headers=self._headers(content_type=m.content_type), params=query_parameters, data=m) if response.status_code == requests.codes.ok: model = model.populate(response.json()) # [model.callback_identifier.lower()]) self._logger.info("File uploaded successfully to {}".format(uri)) else: model = False self._logger.error("Error during file uploading to {}".format(uri)) return model
def handle_refresh_request(rsp): logger.debug("HANDLE_RESPONSE") if rsp.headers['content-type'] == 'text/plain': rsp._content = bytes(json.dumps({'error': 'invalid_client', 'error_description': rsp.text}), rsp.encoding) rsp.headers['content-type'] = 'application/json' session.token = rsp.text return rsp
def _tunnel(self): self._set_hostport(self._tunnel_host, self._tunnel_port) connect_str = "CONNECT %s:%d HTTP/1.0\r\n" % (self.host, self.port) connect_bytes = connect_str.encode("ascii") self.send(connect_bytes) for header, value in self._tunnel_headers.items(): header_str = "%s: %s\r\n" % (header, value) header_bytes = header_str.encode("latin-1") self.send(header_bytes) self.send(bytes(b'\r\n')) response = self.response_class(self.sock, method=self._method) (version, code, message) = response._read_status() if code != 200: self.close() raise socket.error("Tunnel connection failed: %d %s" % (code, message.strip())) while True: line = response.fp.readline(_MAXLINE + 1) if len(line) > _MAXLINE: raise LineTooLong("header line") if not line: # for sites which EOF without sending a trailer break if line in (b'\r\n', b'\n', b''): break
def on_notification(self, handle, data): data = bytes(data[3:]) if self.notification_callback is not None: self.notification_callback(handle, data) self.notification_event.set()
def __init__(self, raw=None, fees=None, disassemble=False, pos=False, messages=False): # raw transaction data in byte format if raw: if not isinstance(raw, (bytearray, newbytes.newbytes)): raise AttributeError("Raw data must be a bytestring, not {}" .format(type(raw))) self._raw = bytes(raw) else: self._raw = None self.inputs = [] self.outputs = [] self.locktime = 0 if pos: self.n_time = 0 else: self.n_time = None # integer value, not encoded in the pack but for utility self.fees = fees self.version = None # stored as le bytes self._hash = None self.transaction_message = b"" if messages else None if disassemble: self.disassemble()
def on_indication(self, handle, data): data = bytes(data) if self.indication_callback is not None: self.indication_callback(handle, data) self.indication_event.set()
def _send_webhook_msg(self, ip, port, payload_str, url_path='', content_len=-1, content_type='application/json', get_method=None): headers = { 'content-type': content_type, } if not payload_str: content_len = None payload = None else: payload = bytes(payload_str, encoding='utf-8') if content_len == -1: content_len = len(payload) if content_len is not None: headers['content-length'] = str(content_len) url = 'http://{ip}:{port}/{path}'.format(ip=ip, port=port, path=url_path) req = Request(url, data=payload, headers=headers) if get_method is not None: req.get_method = get_method return urlopen(req)
def _call_ipptool(self, request): with tempfile.NamedTemporaryFile(delete=False) as temp_file: temp_file.write(bytes(request, encoding='utf-8')) process = subprocess.Popen([self.config['ipptool_path'], self.authenticated_uri, '-X', temp_file.name], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) future = [] timer = threading.Timer(self.config['timeout'], self.timeout_handler, (process, future)) timer.start() try: stdout, stderr = process.communicate() finally: os.unlink(temp_file.name) timer.cancel() if future: raise TimeoutError if PY3: result = plistlib.loads(stdout) else: result = plistlib.readPlistFromString(stdout) try: return result['Tests'][0] except (IndexError, KeyError): logger = logging.getLogger(__name__) logger.error('ipptool command failed: {} {}'.format(stdout, stderr)) raise
def receive(self, count=DEFAULT_RECV_COUNT, encoding=DEFAULT_ENCODING): """ """ # TODO: multiprocess approach will pickle io objects... The goal is to use a Manager instead in final implementation. receive_queue = multiprocessing.Queue() t_reader = multiprocessing.Process(target=receive_proc, args=(receive_queue, self.stdio, count)) t_reader.start() t_reader.join(timeout=self.timeout) if t_reader.is_alive(): # the read timed out! t_reader.terminate() t_reader.join() raise DeviceTimeoutError(uuid=self._uuid, name=self.name, make=self.make, model=self.model, version=self.version, message='device timed out during receive.') received = receive_queue.get_nowait() if isinstance(received, Exception): raise received if encoding: return received.decode(encoding=encoding) else: return bytes(received)
def readdir(self, dirname=b'.'): """ List the files in a directory. Parameters ---------- dirname : bytes (optional) Name of the directory to list. Defaults to the root directory. Returns ------- out : [bytes] List of file names. """ files = [] while True: seq = self.next_seq() msg = MsgFileioReadDirReq( sequence=seq, offset=len(files), dirname=dirname) self.link(msg) reply = self.link.wait(SBP_MSG_FILEIO_READ_DIR_RESP, timeout=READDIR_WAIT_S) if not reply: raise Exception("Timeout waiting for FILEIO_READ_DIR reply") # Why isn't this already decoded? reply = MsgFileioReadDirResp(reply) if reply.sequence != seq: raise Exception("Reply FILEIO_READ_DIR doesn't match request (%d vs %d)" % (reply.sequence, seq)) chunk = bytes(reply.contents).rstrip(b'\0') if len(chunk) == 0: return files files += chunk.split(b'\0')
def test_form_fields(self): """ When a ``POST`` request is received, form fields are parsed into ``TahoeLAFSRequest.fields``. """ form_data, boundary = multipart_formdata([ [ param(u"name", u"foo"), body(u"bar"), ], [ param(u"name", u"baz"), param(u"filename", u"quux"), body(u"some file contents"), ], ]) self._fields_test( b"POST", { b"content-type": b"multipart/form-data; boundary=" + bytes(boundary, 'ascii') }, form_data.encode("ascii"), AfterPreprocessing( lambda fs: {k: fs.getvalue(k) for k in fs.keys()}, Equals({ "foo": "bar", "baz": b"some file contents", }), ), )
def report_worker(self, w=0): report = self.get_worker_report(with_memory=True) if self.config["max_memory"] > 0: if report["process"]["mem"]["total"] > (self.config["max_memory"] * 1024 * 1024): self.shutdown_max_memory() if self.config["report_file"]: with open(self.config["report_file"], "wb") as f: f.write( bytes(json.dumps(report, ensure_ascii=False, default=str), 'utf-8')) # pylint: disable=no-member if "_id" in report: del report["_id"] try: self.mongodb_jobs.mrq_workers.update({"_id": ObjectId(self.id)}, {"$set": report}, upsert=True, w=w) except Exception as e: # pylint: disable=broad-except self.log.debug("Worker report failed: %s" % e)
def __init__(self, raw=None, fees=None, disassemble=False, pos=False, messages=False): # raw transaction data in byte format if raw: if not isinstance(raw, (bytearray, newbytes.newbytes)): raise AttributeError( "Raw data must be a bytestring, not {}".format(type(raw))) self._raw = bytes(raw) else: self._raw = None self.inputs = [] self.outputs = [] self.locktime = 0 if pos: self.n_time = 0 else: self.n_time = None # integer value, not encoded in the pack but for utility self.fees = fees self.version = None # stored as le bytes self._hash = None self.transaction_message = b"" if messages else None if disassemble: self.disassemble()
def raw_filename(str_filename): """Return a filename in raw bytes from a command line option string.""" # Non-unicode characters/bytes in the command line options are decoded by # using 'surrogateescape' and file system encoding, and this reverts that. # References: # https://www.python.org/dev/peps/pep-0383/ # https://docs.python.org/3/library/os.html#file-names-command-line-arguments-and-environment-variables return bytes(str_filename, sys.getfilesystemencoding(), 'surrogateescape')
def _cloud_init_build(self, cloud_init, **kwargs): cloud_init_script = cloud_init.build(self.config_data, **kwargs) if len(cloud_init_script) > MAX_CLOUD_INIT_LENGTH: output = BytesIO() with GzipFile(mode='wb', fileobj=output) as gzfile: gzfile.write(bytes(cloud_init_script, "utf-8")) cloud_init_script = "#!/bin/bash\necho '{}' | base64 -d | gunzip | /bin/bash".format(b64encode(output.getvalue()).decode()) return cloud_init_script
def __init__(self, data=None): if data is None: data = b"" else: if not isinstance(data, (bytes, bytearray)): raise TypeError("expected bytes or bytearray, not %s" % data.__class__.__name__) data = bytes(data) # Make a copy of the bytes! self.data = data
def disassemble(self, raw=None, dump_raw=False, fees=None): """ Unpacks a raw transaction into its object components. If raw is passed here it will set the raw contents of the object before disassembly. Dump raw will mark the raw data for garbage collection to save memory. """ if fees: self.fees = fees if raw: self._raw = bytes(raw) data = self._raw # first four bytes, little endian unpack self.version = self.funpack('<L', data[:4]) # decode the number of inputs and adjust position counter input_count, data = self.varlen_decode(data[4:]) # loop over the inputs and parse them out self.inputs = [] for i in range(input_count): # get the previous transaction hash and it's output index in the # previous transaction prevout_hash = data[:32] prevout_idx = self.funpack('<L', data[32:36]) # get length of the txn script ss_len, data = self.varlen_decode(data[36:]) script_sig = data[:ss_len] # get the script # get the sequence number seqno = self.funpack('<L', data[ss_len:ss_len + 4]) # chop off the this transaction from the data for next iteration # parsing data = data[ss_len + 4:] # save the input in the object self.inputs.append( Input(prevout_hash, prevout_idx, script_sig, seqno)) output_count, data = self.varlen_decode(data) self.outputs = [] for i in range(output_count): amount = self.funpack('<Q', data[:8]) # length of scriptPubKey, parse out ps_len, data = self.varlen_decode(data[8:]) pk_script = data[:ps_len] data = data[ps_len:] self.outputs.append( Output(amount, pk_script)) self.locktime = self.funpack('<L', data[:4]) # reset hash to be recacluated on next grab self._hash = None # ensure no trailing data... assert len(data) == 4 if dump_raw: self._raw = None return self
def CreateHmac( content, hmac_secret ): # Note that py2's str type passes this check (and that's ok) if not isinstance( content, bytes ): raise TypeError( 'content was not of bytes type; you have a bug!' ) if not isinstance( hmac_secret, bytes ): raise TypeError( 'hmac_secret was not of bytes type; you have a bug!' ) return bytes( hmac.new( hmac_secret, msg = content, digestmod = hashlib.sha256 ).digest() )
def putheader(self, header, *values): """Send a request header line to the server. For example: h.putheader('Accept', 'text/html') """ if self.__state != _CS_REQ_STARTED: raise CannotSendHeader() if hasattr(header, 'encode'): header = header.encode('ascii') values = list(values) for i, one_value in enumerate(values): if hasattr(one_value, 'encode'): values[i] = one_value.encode('latin-1') elif isinstance(one_value, int): values[i] = str(one_value).encode('ascii') value = bytes(b'\r\n\t').join(values) header = header + bytes(b': ') + value self._output(header)