def test048_savePajek1_saveDot1(): # This test requires to save/load/remove two files fron disk. g = Graph() g.addNode(1, nodeData=(256,128) ) g.addNode(2+3J, nodeData=(6,11) ) g.addNode((1,2), nodeData=(0,0) ) g.addNode("a", nodeData=(1,1) ) g.addBiArc(1, 2+3J, w=-2.1) g.addArc((1,2), 1, w=5) g.addArc(1, (1,2), w=1) g.addArc((1,2), 2+3J, w=11) g.addBiArc((1,2), "a", w=22) g.addBiArc(1,1, w=0) filename = "temp.paj" g.savePajek(filename) from bz2 import decompress from base64 import standard_b64decode from os import remove d = r"QlpoOTFBWSZTWUt6mzsAAB7fgAAQUH9/QCIAEQAusBwAIABqGqeFMmIMgGhiDTKTagABoAIdlpWRKsYoEkrTN7L0Q11CpFrFVxGkwEducZmeAE4o8xZPFFpGEyBYp84M8UWIhJB5kJFEOXB8hACdS4VoB78BSvVIAWrZ7CEPxdyRThQkEt6mzsA=" assert open(filename).read() == decompress(standard_b64decode(d)) remove(filename) filename = "temp.dot" g.saveDot(filename) d = r"QlpoOTFBWSZTWWJy76UAAEZfgAAQUG54AwCAAAo+9d8KIACKCVMkxDRHijQPUBsoJRENAAAA0BHOTIro1EW2CuVXKqcWM1Ts1zn4dA60ZJ6bNkZeEfMQsBUUIbWjRWKwu2B4P6zAcJJWoODdYyQY1CAXtCAewvvvSZ3xroFAjKFgzoGQQeyjwsVCN0XZNX3n/F3JFOFCQYnLvpQ=" assert open(filename).read() == decompress(standard_b64decode(d)) remove(filename)
def decode_fill_value(v, dtype): # early out if v is None: return v if dtype.kind == 'f': if v == 'NaN': return np.nan elif v == 'Infinity': return np.PINF elif v == '-Infinity': return np.NINF else: return np.array(v, dtype=dtype)[()] elif dtype.kind == 'S': # noinspection PyBroadException try: v = base64.standard_b64decode(v) except Exception: # be lenient, allow for other values that may have been used before base64 # encoding and may work as fill values, e.g., the number 0 pass v = np.array(v, dtype=dtype)[()] return v elif dtype.kind == 'V': v = base64.standard_b64decode(v) v = np.array(v, dtype=dtype.str).view(dtype)[()] return v elif dtype.kind == 'U': # leave as-is return v else: return np.array(v, dtype=dtype)[()]
def __decrypt_py2(secret, encrypted_value, logger): # If the string length is 0 or no secret was passed in, return the empty string. if not encrypted_value or not secret: return encrypted_value parts = encrypted_value.split("!") if len(parts) != 2: logger.warn('Encrypted value "%s" not in standard encrypted format, decryption skipped.' % encrypted_value) return encrypted_value iv_text = parts[0] encrypted_text = parts[1] iv_bytes = base64.standard_b64decode(iv_text) secret_bytes = base64.standard_b64decode(secret) if len(iv_bytes) != 16: logger.warn('Initialization Vector for "%s" not valid, decryption skipped.' % encrypted_value) return encrypted_value if len(secret_bytes) != 32: logger.warn('Passed in secret length is invalid, decryption skipped.') return encrypted_value cipher = Cipher(algorithms.AES(secret_bytes), modes.CBC(iv_bytes), backend=default_backend()) decryptor = cipher.decryptor() decrypted_bytes = decryptor.update(base64.standard_b64decode(encrypted_text)) + decryptor.finalize() decrypted_string = decrypted_bytes.encode('utf-8') return ''.join([char for char in decrypted_string if ord(char) > 31])
def post(self): username = self.request.get("username") assert username password_hash = self.request.get("password_hash") assert password_hash query = User.all() query.filter("username ="******"".join([chunk.chunk for chunk in chunks]) if passwords: # Existing data. self.response.out.write(json.dumps( { "version" : user.version, "last_modified": time.mktime(user.last_modified.timetuple()), "passwords": Decode(passwords) })) else: # New user. salt = self.request.get("salt") assert salt user = User( version = 2, username = username, salt = db.Blob(base64.standard_b64decode(salt)), password_hash = db.ByteString( base64.standard_b64decode(password_hash))) user.put() SetUserCookie(self.response, username, password_hash)
def __doCerts(self, sts, sks, token): home = expanduser("~") # if os.path.exists(home + '/magistral') == False: # os.makedirs(home + '/magistral') if os.path.exists(home + '/magistral/tmp') == False: os.makedirs(home + '/magistral/tmp') with open(home + '/magistral/tmp/ts', 'wb') as f: f.seek(0) f.write(bytearray(base64.standard_b64decode(sts))) f.close() with open(home + '/magistral/tmp/ks', 'wb') as f: f.seek(0) f.write(bytearray(base64.standard_b64decode(sks))) f.close() ks = jks.KeyStore.load(home + '/magistral/tmp/ks', 'magistral') self.uid = JksHandler.writePkAndCerts(ks, token) if os.path.exists(home + '/magistral/tmp'): shutil.rmtree(home + '/magistral/tmp')
def do_authn(self): try: header = cherrypy.request.headers['Authorization'] except KeyError: self.critical_error("No authentication data presented") auth_type, auth_content = header.split() try: # py3k compatibility auth_content = base64.standard_b64decode(auth_content) except TypeError: auth_content = \ base64.standard_b64decode(bytes(auth_content.encode('ascii'))) try: # py3k compatibility try: username, password = auth_content.split(":") except TypeError: username, pw = auth_content.split(bytes(":", encoding='utf-8')) password = pw.decode('utf-8') except ValueError: username = auth_content password = "" # FIXME: Get client cert cert = None address = (cherrypy.request.remote.ip, cherrypy.request.remote.name) return self.authenticate(cert, username, password, address)
def verify_certs_signature(service_id, xml_text, notary_pub_key_text): doc = parseString(xml_text) root = doc.documentElement sig_to_verify = base64.standard_b64decode(root.getAttribute("sig")) to_verify = service_id cert_elements = root.getElementsByTagName("certificate") for cert_elem in cert_elements: cert = base64.standard_b64decode(cert_elem.getAttribute("body")) to_verify += cert start_ts = int(cert_elem.getAttribute("start")) end_ts = int(cert_elem.getAttribute("end")) to_verify += struct.pack("!2I", start_ts, end_ts) bio = BIO.MemoryBuffer(notary_pub_key_text) rsa_pub = RSA.load_pub_key_bio(bio) pubkey = EVP.PKey() pubkey.assign_rsa(rsa_pub) pubkey.reset_context(md='sha256') pubkey.verify_init() pubkey.verify_update(to_verify) return pubkey.verify_final(sig_to_verify)
def loads_from_str(cls, things): if not things: logging.warning("No data to decode") return if isinstance(things, str): try: return pickle.loads(base64.standard_b64decode(bytes(things, encoding=cls.__code__))) except Exception as e: logging.error("Base64 Format Error! Error data: {}".format(things)) logging.error(traceback.format_exc()) return elif isinstance(things, bytes): try: return pickle.loads(base64.standard_b64decode(things)) except BaseException: logging.error('Unpickle error:{}'.format(things)) logging.error(traceback.format_exc()) return else: logging.warning( "Decode unknown type data to Object happened exception, type={}, Data={}".format( str(type(things)), str(things)) ) return
def restore_container_snapshots(container, snapshot): """ Restore the referenced container snapshot. Note: Backends may have preconditions before this operation can be run. """ if not isinstance(config.container_backend, SnapshotableContainerBackend): return error_precondition_required("Snapshotable backend required") try: restored_snapshot = config.container_backend.restore_container_snapshot( standard_b64decode(container), standard_b64decode(snapshot) ) return success_ok(restored_snapshot) except ContainerNotFoundError: return error_not_found("Container not found") except ContainerSnapshotNotFoundError: return error_not_found("Container snapshot not found") except IllegalContainerStateError: return error_precondition_failed("Container in illegal state for requested action") except ContainerBackendError: return error_unexpected_error("Unexpected backend error") except NotImplementedError: return error_not_implemented() except: return error_unexpected_error()
def authenticate(self): try: header = self.headers['Authorization'] except KeyError: self.logger.error("No authentication data presented") return False auth_type, auth_content = header.split() try: # py3k compatibility auth_content = base64.standard_b64decode(auth_content) except TypeError: auth_content = base64.standard_b64decode(bytes(auth_content.encode('ascii'))) try: # py3k compatibility try: username, password = auth_content.split(":") except TypeError: username, pw = auth_content.split(bytes(":", encoding='utf-8')) password = pw.decode('utf-8') except ValueError: username = auth_content password = "" cert = self.request.getpeercert() client_address = self.request.getpeername() return self.server.instance.authenticate(cert, username, password, client_address)
def get_token(): #To avoid this function put your api key in ./IXFtoken mytempfile = "./IXFtoken" if os.path.isfile(mytempfile): tokenf = open(mytempfile,"r") token = tokenf.readline() #Check the API key to see if it is a API key or token, either should work. API Keys are base64 encoded try: standard_b64decode(token) apiauthheader = "Basic " except TypeError: print "Your API key doesn't look right. Go get an API key then use the -a switch to save your user:pass to ./IXFtoken" exit() else: print "Support for Anonymous API has been removed. Go get an API key then use the -a switch to save your user:pass to ./IXFtoken" exit () #When the Anonymous API worked, this code block worked. The authentication header was also different. #url = "https://api.xforce.ibmcloud.com/auth/anonymousToken" #data = urllib2.urlopen(url) #t = json.load(data) #tokenf = open(mytempfile,"w") #token = str(t['token']) #tokenf.write(token) #apitype = "Bearer " return (token,apiauthheader)
def continue_carve(self, request): # First check if we have already received this block if request['block_id'] in FILE_CARVE_MAP[request['session_id']]['blocks_received']: return # Store block data to be reassembled later FILE_CARVE_MAP[request['session_id']]['blocks_received'][int(request['block_id'])] = request['data'] # Are we expecting to receive more blocks? if len(FILE_CARVE_MAP[request['session_id']]['blocks_received']) < FILE_CARVE_MAP[request['session_id']]['block_count']: return # If not, let's reassemble everything out_file_name = FILE_CARVE_DIR+FILE_CARVE_MAP[request['session_id']]['carve_guid'] # Check the first four bytes for the zstd header. If not no # compression was used, it's an uncompressed .tar if (base64.standard_b64decode(FILE_CARVE_MAP[request['session_id']]['blocks_received'][0])[0:4] == b'\x28\xB5\x2F\xFD'): out_file_name += '.zst' else: out_file_name += '.tar' f = open(out_file_name, 'wb') for x in range(0, FILE_CARVE_MAP[request['session_id']]['block_count']): f.write(base64.standard_b64decode(FILE_CARVE_MAP[request['session_id']]['blocks_received'][x])) f.close() debug("File successfully carved to: %s" % out_file_name) FILE_CARVE_MAP[request['session_id']] = {}
def __init__(self, protected_settings, public_settings): """ TODO: we should validate the parameter first """ self.blobs = [] self.backup_metadata = None self.public_config_obj = None self.private_config_obj = None self.blobs = None """ get the public configuration """ self.commandToExecute = public_settings.get(CommonVariables.command_to_execute) self.taskId = public_settings.get(CommonVariables.task_id) self.locale = public_settings.get(CommonVariables.locale) self.publicObjectStr = public_settings.get(CommonVariables.object_str) if(self.publicObjectStr is not None and self.publicObjectStr != ""): decoded_public_obj_string = base64.standard_b64decode(self.publicObjectStr) decoded_public_obj_string = decoded_public_obj_string.strip() decoded_public_obj_string = decoded_public_obj_string.strip('\'') self.public_config_obj = json.loads(decoded_public_obj_string) self.backup_metadata = self.public_config_obj['backupMetadata'] """ first get the protected configuration """ self.logsBlobUri = protected_settings.get(CommonVariables.logs_blob_uri) self.privateObjectStr = protected_settings.get(CommonVariables.object_str) if(self.privateObjectStr!=None and self.privateObjectStr != ""): decoded_private_obj_string = base64.standard_b64decode(self.privateObjectStr) decoded_private_obj_string = decoded_private_obj_string.strip() decoded_private_obj_string = decoded_private_obj_string.strip('\'') self.private_config_obj = json.loads(decoded_private_obj_string) self.blobs = self.private_config_obj['blobSASUri']
def _scan_from_spec_node(self, spec, scan_time): '''Gets the mzScan data from a 'spectrum' mzML node. Right now, does some error-checking due to the ambiguous nature of mzML's spec--no guarantees about what's in the tree. ''' if spec.find('%scvParam[@name="profile spectrum"]' % NS) is not None: scan_mode = 'p' else: scan_mode = 'c' array_length = int(spec.get('defaultArrayLength')) mz_array = None int_array = None for bin in spec.iterfind('%sbinaryDataArrayList/%sbinaryDataArray' % (NS*2)): if bin.find('%scvParam[@name="64-bit float"]' % NS) is not None: fmt = 'd' * array_length else: fmt = 'f' * array_length if bin.find('%scvParam[@name="no compression"]' % NS) is not None: compression = False else: compression = True if bin.find('%scvParam[@name="m/z array"]' % NS) is not None: if mz_array: print "Overwriting m/z array!?" if compression: mz_array = struct.unpack(fmt, zlib.decompress(base64.standard_b64decode(bin.find('%sbinary' % NS).text))) else: mz_array = struct.unpack(fmt, base64.standard_b64decode(bin.find('%sbinary' % NS).text)) elif bin.find('%scvParam[@name="intensity array"]' % NS) is not None: if int_array: print "Overwriting intensity array!?" if compression: int_array = struct.unpack(fmt, zlib.decompress(base64.standard_b64decode(bin.find('%sbinary' % NS).text))) else: int_array = struct.unpack(fmt, base64.standard_b64decode(bin.find('%sbinary' % NS).text)) else: print ("Found some other kind of binary array in here", [b.get('name') for b in bin.iterfind('%scvParam[@name]' % NS)]) if (mz_array and int_array): return mzScan(zip(mz_array, int_array), scan_time, mode=scan_mode) else: if mz_array: print "No intensity values" elif int_array: print "No m/z values" else: print "No m/z or intensity values" return None
def base64_binary_validator(x): match = NOT_BASE64_BINARY_PATTERN.search(x) if match is not None: reason = "not a base64 encoding: illegal character %r at position %d." % (match.group(0), match.span()[0]) yield XMLSchemaValidationError(base64_binary_validator, x, reason) else: try: base64.standard_b64decode(x) except (ValueError, TypeError) as err: yield XMLSchemaValidationError(base64_binary_validator, x, "not a base64 encoding: %s." % err)
def test_b64decode(self): eq = self.assertEqual tests = {b"d3d3LnB5dGhvbi5vcmc=": b"www.python.org", b'AA==': b'\x00', b"YQ==": b"a", b"YWI=": b"ab", b"YWJj": b"abc", b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==": b"abcdefghijklmnopqrstuvwxyz" b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" b"0123456789!@#0^&*();:<>,. []{}", b'': b'', } for data, res in tests.items(): eq(base64.b64decode(data), res) eq(base64.b64decode(data.decode('ascii')), res) # Non-bytes self.check_other_types(base64.b64decode, b"YWJj", b"abc") self.check_decode_type_errors(base64.b64decode) # Test with arbitrary alternative characters tests_altchars = {(b'01a*b$cd', b'*$'): b'\xd3V\xbeo\xf7\x1d', } for (data, altchars), res in tests_altchars.items(): data_str = data.decode('ascii') altchars_str = altchars.decode('ascii') eq(base64.b64decode(data, altchars=altchars), res) eq(base64.b64decode(data_str, altchars=altchars), res) eq(base64.b64decode(data, altchars=altchars_str), res) eq(base64.b64decode(data_str, altchars=altchars_str), res) # Test standard alphabet for data, res in tests.items(): eq(base64.standard_b64decode(data), res) eq(base64.standard_b64decode(data.decode('ascii')), res) # Non-bytes self.check_other_types(base64.standard_b64decode, b"YWJj", b"abc") self.check_decode_type_errors(base64.standard_b64decode) # Test with 'URL safe' alternative characters tests_urlsafe = {b'01a-b_cd': b'\xd3V\xbeo\xf7\x1d', b'': b'', } for data, res in tests_urlsafe.items(): eq(base64.urlsafe_b64decode(data), res) eq(base64.urlsafe_b64decode(data.decode('ascii')), res) # Non-bytes self.check_other_types(base64.urlsafe_b64decode, b'01a-b_cd', b'\xd3V\xbeo\xf7\x1d') self.check_decode_type_errors(base64.urlsafe_b64decode)
def verify(signed_data, signature_base64): """Returns whether the given data was signed with the private key.""" h = SHA.new() h.update(signed_data) # Scheme is RSASSA-PKCS1-v1_5. _PUBLIC_KEY = RSA.importKey(base64.standard_b64decode(_PUBLIC_KEY_BASE64)) verifier = PKCS1_v1_5.new(_PUBLIC_KEY) # The signature is base64 encoded. signature = base64.standard_b64decode(signature_base64) return verifier.verify(h, signature)
def b64decode_file(value): filename, data = value.split(';') filename = filename.split(':')[1] filename = base64.standard_b64decode(filename) filename = filename.decode('utf-8') data = data.split(':')[1] data = base64.standard_b64decode(data) return filename, data
def _load(self, filename): self.log.debug("Loading file %r", filename) file = self._File(encode_filename(filename)) file.tags = file.tags or {} metadata = Metadata() for origname, values in file.tags.items(): for value in values: name = origname if name == "date" or name == "originaldate": # YYYY-00-00 => YYYY value = sanitize_date(value) elif name == 'performer' or name == 'comment': # transform "performer=Joe Barr (Piano)" to "performer:Piano=Joe Barr" name += ':' if value.endswith(')'): start = value.rfind(' (') if start > 0: name += value[start + 2:-1] value = value[:start] elif name.startswith('rating'): try: name, email = name.split(':', 1) except ValueError: email = '' if email != self.config.setting['rating_user_email']: continue name = '~rating' value = unicode(int(round((float(value) * (self.config.setting['rating_steps'] - 1))))) elif name == "fingerprint" and value.startswith("MusicMagic Fingerprint"): name = "musicip_fingerprint" value = value[22:] elif name == "tracktotal": if "totaltracks" in file.tags: continue name = "totaltracks" elif name == "disctotal": if "totaldiscs" in file.tags: continue name = "totaldiscs" elif name == "metadata_block_picture": image = mutagen.flac.Picture(base64.standard_b64decode(value)) metadata.add_image(image.mime, image.data) continue metadata.add(name, value) if self._File == mutagen.flac.FLAC: for image in file.pictures: metadata.add_image(image.mime, image.data) # Read the unofficial COVERART tags, for backward compatibillity only if not "metadata_block_picture" in file.tags: try: for index, data in enumerate(file["COVERART"]): metadata.add_image(file["COVERARTMIME"][index], base64.standard_b64decode(data)) except KeyError: pass self._info(metadata, file) return metadata
def decrypt( self, b64_data ): """ Requires b64 data separated by # to decrypt """ # TODO: Verify the data and add a try statement? salt, iv, cipher_text = b64_data.split('#', 3) salt = base64.standard_b64decode(salt) iv = base64.standard_b64decode(iv) cipher_text = base64.standard_b64decode(cipher_text) key = pbkdf2(self.passphrase, salt, ITERATION_COUNT, keylen=KEY_LENGTH) cipher = AES.new(key, AES.MODE_CBC, iv ) return unpad(cipher.decrypt( cipher_text ))
def __perform_key_handshake(self): esn = self.nx_common.get_esn() self.nx_common.log(msg='perform_key_handshake: esn:' + esn) if not esn: return False header = self.__generate_msl_header( is_key_request=True, is_handshake=True, compressionalgo='', encrypt=False) request = { 'entityauthdata': { 'scheme': 'NONE', 'authdata': { 'identity': esn } }, 'headerdata': base64.standard_b64encode(header), 'signature': '', } #self.nx_common.log(msg='Key Handshake Request:') #self.nx_common.log(msg=json.dumps(request)) try: resp = self.session.post( url=self.endpoints['manifest'], data=json.dumps(request, sort_keys=True)) except: resp = None exc = sys.exc_info() self.nx_common.log( msg='[MSL][POST] Error {} {}'.format(exc[0], exc[1])) if resp and resp.status_code == 200: resp = resp.json() if 'errordata' in resp: self.nx_common.log(msg='Key Exchange failed') self.nx_common.log( msg=base64.standard_b64decode(resp['errordata'])) return False base_head = base64.standard_b64decode(resp['headerdata']) headerdata=json.JSONDecoder().decode(base_head) self.__set_master_token(headerdata['keyresponsedata']['mastertoken']) self.crypto.parse_key_response(headerdata) self.__save_msl_data() else: self.nx_common.log(msg='Key Exchange failed') self.nx_common.log(msg=resp.text)
def b64decode_file(value): if isinstance(value, six.text_type): value = value.encode('utf8') filename, data = value.split(b';') filename = filename.split(b':')[1] filename = base64.standard_b64decode(filename) filename = filename.decode('utf-8') data = data.split(b':')[1] data = base64.standard_b64decode(data) return filename, data
def post(self): success, user = AuthorizedUser(self.request.cookies) if not success: self.error(401) return # We need to store the new salt, password_hash, and encrypted passwords. salt = self.request.get("salt") assert salt user.salt = db.Blob(base64.standard_b64decode(salt)) password_hash = self.request.get("password_hash") user.password_hash = db.ByteString(base64.standard_b64decode(password_hash)) Save(user, self.request) SetUserCookie(self.response, user.username, password_hash)
def AuthorizedUser(cookies): session = cookies.get("session", "").split(".") username = base64.standard_b64decode(session[0]) password_hash = session[1] query = User.all() query.filter("username ="******"password_hash =", db.ByteString(base64.standard_b64decode(password_hash))) user = query.get() if not user: logging.error("Should never get here! Data received but username (%s) or " "password_hash (%s) is wrong!" % (username, password_hash)) return user <> None, user
def decryptSecret(encryption, secret_key): assert encryption["algorithm"] == "AES_256_CBC_HMAC_SHA256" iv = base64.standard_b64decode(encryption["iv"]) ciphertext = base64.standard_b64decode(encryption["ciphertext"]) signature = base64.standard_b64decode(encryption["signature"]) cipher = EVP.Cipher(alg="aes_256_cbc", key=secret_key, iv=iv, op=M2Crypto.decrypt) value = cipher.update(ciphertext) value += cipher.final() hmac = EVP.hmac(secret_key, iv + value, algo="sha256") if hmac != signature: raise Exception("invalid signature") return value
def recharge_verify(post_sign, post_notify_data, post_orderid, post_dealseq, post_uid, post_subject, post_v): if post_sign == "": logger.debug(" Unable to get required value") return False post_sign = base64.standard_b64decode(post_sign) # 对输入参数根据参数名排序,并拼接为key=value&key=value格式; # sourcestr = "?" + 'notify_data=' + post_notify_data # sourcestr += '&orderid=' + post_orderid # sourcestr += '&dealseq=' + post_dealseq # sourcestr += '&uid=' + post_uid # sourcestr += '&subject=' + post_subject # sourcestr += '&v=' + post_v # logger.debug('Raw sign is: %s', sourcestr) # 对数据进行验签,注意对公钥做格式转换 # verify = pub.verify(sourcestr, sig) # logger.debug('Verification result is %s', verify) # if verify: # logger.debug('Failed to verify data') # return False # 对加密的notify_data进行解密 decode_data = base64.standard_b64decode(post_notify_data) decode_notify_data = pub.public_decrypt(decode_data, RSA.pkcs1_padding) logger.debug('Notify data decoded as %s', decode_notify_data) result = parse_str(decode_notify_data) dealseq = result['dealseq'] fee = result['fee'] payresult = result['payresult'] logger.debug('dealseq:%s fee:%s payresult:%s', dealseq, fee, payresult) # 比较解密出的数据中的dealseq和参数中的dealseq是否一致 if dealseq != post_dealseq: logger.debug(" Dealseq values did not match:%s-%s", dealseq, post_dealseq) return False, 0 if payresult != '0': logger.error("recharge fail payresult:%s", payresult) return False, 0 logger.debug('kuaiyong verify success!') return True, fee
def admin_delete_video(self,sel,curr_url): """Completely delete a video from the site. """ vid_id = curr_url.split('/videos/')[1] sel.open("/videos/"+vid_id+"staff/delete") if sel.is_element_present("id_username"): sel.type("id_username", base64.standard_b64decode(testvars.ad_usr)) sel.type("id_password", base64.standard_b64decode(testvars.del_pw)) sel.click("//input[@value='Log in']") sel.wait_for_page_to_load(testvars.timeout) sel.open("/admin/logout") time.sleep(5) sel.open("/")
def basic_auth_creds(request): """ Extract any HTTP Basic authentication credentials for the request. Returns a tuple with the HTTP Basic access authentication credentials ``(username, password)`` if provided, otherwise ``None``. :param request: the request object :type request: pyramid.request.Request :returns: a tuple of (username, password) or None :rtype: tuple or NoneType """ try: authtype, value = request.authorization except TypeError: # no authorization header return None if authtype.lower() != 'basic': return None try: user_pass_bytes = base64.standard_b64decode(value) except TypeError: # failed to decode return None try: # See the lengthy comment in the tests about why we assume UTF-8 # encoding here. user_pass = user_pass_bytes.decode('utf-8') except UnicodeError: # not UTF-8 return None try: username, password = user_pass.split(':', 1) except ValueError: # not enough values to unpack return None return (username, password)
def _parse_config(self, ctxt): config = None try: config=json.loads(ctxt) except: self.error('JSON exception decoding ' + ctxt) if config is None: self.error("JSON error processing settings file:" + ctxt) else: handlerSettings = config['runtimeSettings'][0]['handlerSettings'] if handlerSettings.has_key('protectedSettings') and \ handlerSettings.has_key("protectedSettingsCertThumbprint") and \ handlerSettings['protectedSettings'] is not None and \ handlerSettings["protectedSettingsCertThumbprint"] is not None: protectedSettings = handlerSettings['protectedSettings'] thumb=handlerSettings['protectedSettingsCertThumbprint'] cert=waagent.LibDir+'/'+thumb+'.crt' pkey=waagent.LibDir+'/'+thumb+'.prv' unencodedSettings = base64.standard_b64decode(protectedSettings) openSSLcmd = "openssl smime -inform DER -decrypt -recip {0} -inkey {1}" cleartxt = waagent.RunSendStdin(openSSLcmd.format(cert, pkey), unencodedSettings)[1] if cleartxt is None: self.error("OpenSSL decode error using thumbprint " + thumb ) self.do_exit(1,"Enable",'error','1', 'Failed to decrypt protectedSettings') jctxt='' try: jctxt=json.loads(cleartxt) except: self.error('JSON exception decoding ' + cleartxt) handlerSettings['protectedSettings']=jctxt self.log('Config decoded correctly.') return config
def results_handler(self, jsondata): numMatches = 0 if 'results' in jsondata: if 'qSeq' in jsondata: self.wizard.popup_app.win.qSeqs[self.match_id] = str(jsondata['qSeq']) if 'matches' in jsondata: for index, match in enumerate(jsondata['matches']): # uncompress and decode matches unencoded = base64.standard_b64decode(match) uncompressed = zlib.decompress(unencoded) header = uncompressed.splitlines()[0] phid = re.search('/(.*?).pds', header).group(1).split('/') hid = phid[len(phid)-1] + '.' + str(index) print('found: ' + hid + ' ' + header.split('pds')[1]) # load the pdb and group self.cmd.read_pdbstr(str(uncompressed), hid) self.cmd.group(self.match_id, hid) numMatches = numMatches + 1 self.dictionary[self.match_id] = str(jsondata['tempdir']).split('/')[-1] # add current search to search history self.setReturn(numMatches) if (numMatches): self.cmd.get_wizard().popup_app.add_new_search(self.match_id)
def __init__(self, **kwargs): # core values self._last_error = None # Set up the config first. self._cfg = ArloCfg(self, **kwargs) # Create storage/scratch directory. if self._cfg.state_file is not None or self._cfg.dump_file is not None: try: os.mkdir(self._cfg.storage_dir) except Exception: pass # Create remaining components. self._bg = ArloBackground(self) self._st = ArloStorage(self) self._be = ArloBackEnd(self) self._ml = ArloMediaLibrary(self) # Failed to login, then stop now! if not self._be.is_connected: return self._lock = threading.Condition() self._bases = [] self._cameras = [] self._lights = [] self._doorbells = [] # On day flip we do extra work, record today. self._today = datetime.date.today() # Every few hours we can refresh the device list. self._refresh_devices_at = time.monotonic( ) + self._cfg.refresh_devices_every # default blank image when waiting for camera image to appear self._blank_image = base64.standard_b64decode(BLANK_IMAGE) # Slow piece. # Get devices, fill local db, and create device instance. self.info('pyaarlo starting') self._started = False self._refresh_devices() for device in self._devices: dname = device.get('deviceName') dtype = device.get('deviceType') if device.get('state', 'unknown') != 'provisioned': self.info('skipping ' + dname + ': state unknown') continue if dtype == 'basestation' or \ device.get('modelId') == 'ABC1000' or dtype == 'arloq' or dtype == 'arloqs' or \ device.get('modelId').startswith('AVD1001'): self._bases.append(ArloBase(dname, self, device)) if dtype == 'arlobridge': self._bases.append(ArloBase(dname, self, device)) if dtype == 'camera' or dtype == 'arloq' or dtype == 'arloqs' or \ device.get('modelId').startswith('AVD1001'): self._cameras.append(ArloCamera(dname, self, device)) if dtype == 'doorbell': self._doorbells.append(ArloDoorBell(dname, self, device)) if dtype == 'lights': self._lights.append(ArloLight(dname, self, device)) # Save out unchanging stats! self._st.set(['ARLO', TOTAL_CAMERAS_KEY], len(self._cameras)) self._st.set(['ARLO', TOTAL_BELLS_KEY], len(self._doorbells)) self._st.set(['ARLO', TOTAL_LIGHTS_KEY], len(self._lights)) # Always ping bases first! self._ping_bases() # Queue up initial config and state retrieval. self.debug('getting initial settings') self._bg.run_in(self._refresh_camera_thumbnails, REFRESH_CAMERA_DELAY) self._bg.run_in(self._refresh_camera_media, REFRESH_CAMERA_DELAY) self._bg.run_in(self._initial_refresh, INITIAL_REFRESH_DELAY) self._bg.run_in(self._ml.load, MEDIA_LIBRARY_DELAY) # Register house keeping cron jobs. self.debug('registering cron jobs') self._bg.run_every(self._fast_refresh, FAST_REFRESH_INTERVAL) self._bg.run_every(self._slow_refresh, SLOW_REFRESH_INTERVAL) # Wait for initial refresh if self._cfg.wait_for_initial_setup: with self._lock: while not self._started: self.debug('waiting for initial setup...') self._lock.wait(5) self.debug('finished...')
import base64 from typing import Optional import qrcode import secrets from datetime import datetime from . import cwa_pb2 as lowlevel PUBLIC_KEY_STR = 'gwLMzE153tQwAOf2MZoUXXfzWTdlSpfS99iZffmcmxOG9njSK4RTimFOFwDh6t0Tyw8XR01ugDYjtuKwj' \ 'juK49Oh83FWct6XpefPi9Skjxvvz53i9gaMmUEc96pbtoaA' PUBLIC_KEY = base64.standard_b64decode(PUBLIC_KEY_STR.encode('ascii')) class CwaEventDescription(object): def __init__(self): """Description of the Location, Required, String, max 100 Characters""" self.locationDescription: Optional[str] = None """Address of the Location, Required, String, max 100 Characters""" self.locationAddress: Optional[str] = None """Start of the Event, Optional, datetime in UTC""" self.startDateTime: Optional[datetime] = None """End of the Event, Optional, datetime in UTC""" self.endDateTime: Optional[datetime] = None """Type of the Location, Optional
def _validate_signature(self, receipt: str, signature: str) -> bool: try: sig = base64.standard_b64decode(signature) return rsa.verify(receipt.encode(), sig, self.public_key) except BaseException: return False
def handle_remote_print(self, msg): from base64 import standard_b64decode msg = standard_b64decode(msg).decode('utf-8') print(msg, end='', file=sys.stderr) sys.stderr.flush()
benchmark(fn) def _test_digest(digest): assert digest.K == 100 assert digest.n == 100000 assert len(digest) == 1083 assert digest.percentile(50) - 0.5 < 0.02 assert digest.percentile(10) - 0.1 < 0.01 assert digest.percentile(90) - 0.9 < 0.01 assert digest.percentile(1) - 0.01 < 0.005 assert digest.percentile(99) - 0.99 < 0.005 assert digest.percentile(0.1) - 0.001 < 0.001 assert digest.percentile(99.9) - 0.999 < 0.001 def test_tdigest_as_bytes(): digest = from_bytes(java_bytes) b = as_bytes(digest) digest = from_bytes(b) _test_digest(digest) assert len(b) == len(java_bytes) assert b == java_bytes # pylint: disable=line-too-long java_base64 = "AAAAAkBZAAAAAAAAAAAEOzZpD1w24ySbN288eDfDHOI3jwpPN7jIyze1xXM2BzmuNc6x9DdUUcs2o1QFNvb5tzeNwTo2l0VYNgD89jaAiB83GxMBNTdLZzVjwOk3oKiDNxhS4jZ2blc2zTiiN8rlKDc7gN01HN5jNgF8bDYhIGo3BsH5NlbMcDdtCKQ3eJMUNzzuazQuLpY2y0lcNqNDdDcNDr03zOJ1N3ESMjcqxd42omxHNdA+mDbJmlo3KrIGN5i5/DegwGw10QY2NRuEmjdARF42g8qeN8L4yjajFVs1oIo9NvoNwDdrnuk2LeJGNwFHnTgGqu82TzfHN41Syzbd4xU2XjMVN1GPQjbMZOI2l91oNnY8CDdCy7U1wCuMNwLfyjfGDDo3FWWBNiEsSTiE9ZQ3rY03N6fEbDULhxU3i9qZNxuifjbeoMQ3vJ9mNpxU6jbvhEE3qOmYNrG09jcions3F6YRN5Ny1DUG5+E3P2m7NxXWSzd9PD03GBO9N+INZjczo844exOsNxmKIjgnk242m3GdNxrymzcJGSI1MVGaN6OzizblJ+43D9D/NvxA1Df2mZw339fFOB/KWzdN4WM4MhJoNpShjDfafXk3uSflN/uHhDeIUvI3ZOFqNqUkCDgokRU4VWp2Nzz1UjfigVQ4PHzkN8bWhzc21Kc3vOyQN8SJPjhEt344cC6EOAc/ZjfA9D04NZB9OAx8mzgsvD83oqOINzpg9jg9CWo4Y2qdN/r4XjbiH544DQY6OMvJSThcl+g4mnOyOKqdIzd91to4K72fODbsgjiPb2o4AmM6OIXueDhuDMs4PW1yN3ci2jhZGWY4aM5oOCDGwjiBKsk4FLcON7gbNTgr/zQ4e9V2N7qMkjiRTE04OiCKOA+kqDhK2u83jJvIN/P+6Tgw7v04voUSOExQKTgt8OU4ND0lN9CbPzhIfws4UJvSOBqgKzhe1TM4yVlsOLqRxjhsUw03lttXOEGkjTiTqns4kcOmOG2D5DgFx7847tKPONixTDhm8a84mAD1OFCXQzif2W84eVdkOPgJ+jjQy0g4a1HVOHLm7zjKP0k40bH4ODTQizj5Vn448ubuOQbg9TkIbEw4nuqfONUhyTktbsc48dTQOWSR7TkfJfU47iIfOQDP3jkN52Y5OibNOR1tRTk4XgM5JS+XODkp1DjNnOo4zOE/OTcKaDkfd4c5HTjLOTfMtzk1Tng5HH8aOTdpejlQok44yYMwN68whzgmY3o4kGHlOIRTqTkd2Jg45Dd0OHlnWzkEqtA5PENgOT6ckzlmuTQ5LPhpOL2F3zmPFVg5sPneOWfCETkZWu85KkV9ONN2zzlVKg85k3xqOYMdETkujEg5FZlSOYv3FznTwq05w571OXYNQDlfBkQ5NaiZOK74YzjPWAY5BSnsOUOhdTmCIsM5aphbOTm7cjmYQPw5WyLLOV8xQznRMgU5zm+AOb5MBDmEpF45lqbbOW3LNzlc5LI5ny6QObux3zmCqUY5JJyxOXAibjm8mJA5zUCAOeW3Tznyf3w59LruOceUBzn0Gx05vTtsOfquPDoaISI58SiPOdEPpznD/cw5yU1bObG+/zm6Urs5vqXbOcfLwDmrd4s51P3sOhMXXTogDTA51iYXObgArDnGgzQ5/T2FOfi0RjndrFQ5y0IuOhXqcDorpag6RHR1Oixgxjnpq5Y5svRMOdkKbzmou5k5xefuOdiV6DooArU6SnueOkSo6ToPT2o6BvjmOgtxUjoXlSw6G2tQOfUe9jnksDw52dLDOi/y0TpDLTg6NmYNOgqIbTmzGkE52tMyOoJqNzqnDaQ6q5T1Opv6UTqQdX06g9A3OnfuGzqFI246hlrhOmJtZjpf25k6FCyPOhAMgDo1nyI6TLhWOoVgwzqXc746gJc8OlnKVjo/gk86iR5UOpq7PTqdzGY6fspZOp6HRjqEU7w6So02OiKgNToiGkg6aR5oOpHAEDpRg+M6TXTPOnXxkjq5Ct069YXFOvA6NDry/wY65ooNOtzuzDq+ECw6mx8DOpM6qzrBq7E6wn5oOsrlYDsOlM87DKunOw2+iDrzrio6xuE4OrWjPDqoohQ60tC2OvuLODrrWeE63cvKOtKc2jq/DwE6t3QKOrLbvDqrGOI6vpYpOv+cezslrTw7Gp54OxOm8zsWlYM7H8mYOz5/qzs1fHE7MGNuOzCnFDs1TVs7P/9NOx9ocTr+Wzs6+KXkOxOgZjrNhNA6gXVdOmH0LzqUolA6zwgZOw9UyTsD9I46/6KVOwVc4zsa2pc7CwuyOsUzETqLuas6mdYmOwWSnTswT4o7JX7qOwqBMzr5UBY66uyWOvW5NTsKjMk7JL4oOyJEiTsuq1Y7QQhEOzLQBjsjYeg7BRP5OvfqBTsPYUI7Lhs+O1NqIzuFBsU7eMaTOzuwvDsVjz47FheUOzc2FTtf4y87eSLlO4FCMDuE1iI7jx8lO6CXezuLCc87SkkEOw9XGzrUN/o60S9OOwRbLjs1xu87d3xMO32c8zuL04A7kPoQO4I88jtgMhk7PfEaOxILhDrxJ/g6+S+dOxZK4js70sU7ZAatO41myjuP8FU7iNOzO5IsQzuUZZ47anfmOzKbiDssSQo7RbdFOzZGbzsijbg7EdwqOzAoGzs/cx87c6CdO6fSjju/2mE7s2PuO7BVFDuvFvU7uGxCO6KcTzuFdpY7gW0tO1MZejs+0u87bhNOO5hzHDuuqfU7puR+O27/RjsnXA87FRP2OyNyRTtbJ4A7oHBjO6aMVTu2P3I7xGZWO8iz2TvO9Ns7xOtWO6PXxjt9d/47cPPYO31G+Ds3erY6yE0aOk8xCDpGySk6qq7zOvjgZTsgTBs7NM8GOx3z6Tsq29w7NmsfO0Hreztu3xY7dk9WO3TTGDtiz0s7ZOMLO46IeDuO1vk7bpo/O25jnDt3Vek7lJ5YO5mNajtsaXA7Fv+0Ou/dCTsJbAs7PTjsO22GJzt2eTk7XRjBO340CzuCQOw7clTLO19aJDs0gkk7EHVbOx2iqTsiMhI7I3XXOxQtAjsXZlM7JfrQOzPyIjtHLgk7fNvUO4nBdDuOYZI7hCk6O2l7ITtjevg7WwaEO1/S6TtFPiY7MW19O2bjLzt03jI7gsFbO4PWBDuGU9U7noVhO7Hg3zufXSU7gVMiO1o9xztFxDo7OaUcO2D1wztT9/87XyIcO0mFPzszGc47Kht5Oz23WjtmjYg7gKOVO3IZfDtIzFg7GVHmOuB6KDrOLOQ7CF6POyzH1TsuH3Q7OM/DO0xskTtuJHQ7cMB/O4bPzzuLdOU7h2SsO2f2xDs8IKw7JR0+Oyvwhjsae7E7DcaqOvnPADrXxC063tLlOvY0zTsTxUI7IkbcOzkVzzs/RpM7M1ZDOzBmnjs87aU7VcmTO0DL/zsbCFE69oQyOtABijrpNtM69c8UOu225DqbxxA6azZrOlKXRDp7vwE6p0qUOqxh8zqs7JE6rdnqOtzxNzsOlb47FRw2OxtFIDr6nlQ6yv8ROqncETqkc4I6m2kEOmU4oTp3bec6roWNOtkwujq4hkw6iT+5OnlU+DpkEhQ6aw67Opyl9DqM79c6S3J2Ofz8rDn30cA6HOhuOgXIsToEZQQ5znUNOgH78zoTKKU6YLyIOnWfAjqSDVk6hWPrOnGUwDpqn3c6eY3AOlo1SDqC9kU6bS35OoHBLzp8WgI6So6gOgZ8fzo4Ibw6elzBOo5ZVTp5wSU6d0e+OnCgezpxp1M6NoEzOiXlxzpUG4o6ZLCKOmLp9Tot5oU6Ja0OOhfxAzo4qNI6O9+0OjMq5zorV7w6Gv+gOhnd8zn7P1o6IRscOhLPqDngEEg5uIZJOdmxAToNmYA6Gm3GOmg3sjpCPz06WyRCOmGz3zovg7050RuqOfX3WDol3yg6ZUH8OjrgyzoIwlI6H9qmOhtrGTovTtc6SJ3rOkDdRzoYcX46D3P0OhFudToCa2A6BWyTOaegSzlmUn05X3MqOY5bPjnGv7Q55MBkOil2NDo4lt86Ro6YOlW4mjob5ig5wtjpOZheyTmLg/c5iuH1OYyCVjk+Om85JLQMOZCxJjmP6ro5kcX3OfVvATnGgWE5haCUOP8UPDk8C6Q5iKLHOUrz4zk0mMw5WFZuOUwQ6jl2t2A5qF4BOZp7xzmMEhU5yBgqOdWqmDnmxrg5e4YkOUTKvzkklbs42a5MOTN8WTkWOKU5YfgsOUAgnjmPd1g5h/cCOWbI1TmTGvQ5c2+oOYGlXTlz5jU5WuZmOUBj6Tka3hs5kuGEOUJ64DlaTDI5Qx6wOWLktTk6UT44uJu4OSVojjlryTM5HOuzOKzKFjkzpaA5IKMzOU6a+TkZ+fo4/z9xOJ4zlDlRXMw5JFNkOZ/S5Dm+nbg5mNQ2OTYfzjkIhDY5BF12OWhzpzl1yzU5L8odOUxnIDmfni05O9aXOM5YXzkHdR85Z1vyOVD4LDknGEU5AO+mOUFl9jj6flw4qTKoOMJxbDkZ23s45egCOSXNVzlR/6A5YzwWOIiTmzhDuiM4ckLKOGlfETiwlGw4nTLiOQarEzkghTg435iYOTcOYTk6mxE417BqOShVoTjZ/vA5EbZqOMiXfDj1EMo4qezxOG1Ozjixiu04dG/sOCBpCzihxbA4yQo4OK0e0jizeBg5NRWpOPpimziANrc4EZ7jOIeAXziW4aY5BGPgOOxqLzifzCM4JEU7OMG5kjht8D44Jl+qODrHFzgVIbg4mPQ2OCIORjb5Cp84e6X0OADpBDibM2E3YKiFNuQmfTiYFC04i3bKOL4ELjigoew4oYQDOJAK0Th/KW04Zp6VONEgiTjUZOo4jJV6OBFkKzeaXUc4GJisOBQsOjgCZYc39MHDOBTM6DiVqi84KANtOEaOoTiK1ic4DO70N+rSvDhFJ9Q3gn3TOHXRdTiQZXk4CJwPN8TvgzfSFxo4Gw8UN4leLTepicc39wbUOHg9JzgW8tY4bOD7OJ9hjzhJIeE4G7+IOCht/zhuOXM3EV0eOFMPMzcGRI84uDUbN458SjXUYcg3hQD/N6go1jezTCs3jAi9OA1kFDcSnxM3CklMN3sxdjbrFtQ31mLFN/TNLjdi2XI3qtLMOBHGRDgWdfk3wzlgN0BOqzc4DU43H7m6NpuDhzUjc3g3tgLNNwsZ2ja8xdE3cqBUNXhSZjgJ3V44SLYWN6ywljbIXyo4ALH1N4FO5Tdk/u032vrrNpsb/jb7F3k2lZaBN0W5CTdzvwo4K1P2NmooDzhI1os4IJJZN/xf4DddXcM3adDqN/JMMjgE5Ww3CbgmNzxbkzMyJWI3Jmn+NomgODatgGs3skvFNuHb1ze65og2929+NrFZrjcCh5c2wlG+NqssVDgEloc3RZefN1no3DatxX03KevuNrzoSzgIf/UyKB8LNvfYhjeIRBY3FDw7NkHG1zbsOyw3QEX0N3PYhDa2y0Q2Ew3jNrJhZDhi3UE1jsnzNALBbzbCKuU20kNGNxgSUDW1cus29u8qNvHROjbGJ+U2d2R4NqmGPDh5qFE2kj5gNWP33zZqkM43sf31NzEY+DZsWkQ2qJYDNqUvJDYwDfQ2le0fNvwp2TZtVbk3V1A4NT3PtDd6xmY4FhDEN3OfxjfwGV4238DDNoB/qzaIS3E3Jz+iNmZ4nDeLlU43qQX/N6VXuzU+6Bk2N4WXNeQWsTcpydI3j8D1NrSMsjZRwxU23KKANyvmiTaKHrQ3zzqsNf/SMTchLt411j5sN1t3rDWBMKw21n5rOBeltQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQECAQEBAgEBAQEBAQEBAgICAgEBAgIBAwMBAgIBAQIDAQICAQIBAQEBAwIBAgEBAgMDAwIFAQIDAgIBBAQDAgICAQIDBAMEBAIFAgICAwUHAgIEBgYGAggDBgYCBgQEAQUDBgQDAwoCAgQEAwUCAQgIBQIKBQgEBwQNCQIGBwkFBA4MCQUFBwIICAYHBA0FBQkPEQsLDAkQEA4OCxMRDhMECggIFBMQGhQUExIECQYIChELDA4TERYMDSEbEhEKDhQbEQ8PLSobGBkQCBEPFB4RFCYbIjIcHxwhFhUjHQ8UGCckJSkmIjEmOCsiMScsHy0hHxspODcmKi4/MxcvR1RHMyQoMBotOkpXPiw4OkA/KyE3SlE3JyAygQGUAX+CAU1tYGZpXzYvQUpWd3tKPltvdGF+ckxKOUZ5T1BMap4ByAGuAa4BsgGjAYsBbHufAXOxAeUB2gHpAaUBkQGHAaYBygHJAb0BrwGgAZkBoAF6iwGdAeQB+wHfAdMB3AGfArwChQL7AZ8CpgKYArwB1QG2Ac8BhAFSXnirAeMBwQGmAeAB8wGkAW9ojQH4AbwCgQLPAZoBrAGzAfgBgwL7AZkCqgL8AdEBwAHHAeMBgALiArID1gLrAdkB+gG7AoQDiQOgA60D1QP+A/8CggKzAY0BrAHhAd8C4wKaA8YDnAP1AtkCjQK9AaUBzQGDAtwClwO5A5IDmAOHBKoDygLxAb8CugKSAtQB9gGIArMCvQOeBN0ErgSIBJ8EsQSxA/UChAOdAtICpwOZBJgE1AOhAtsB5gGWApsD8wOFBKgEggWFBfEEmQTLA/ICgQPqAswBWj5kuAHiAaUC/AHfAb8CrALSAvgChAPRAswCgAPGA4MD2QLSAp4DgATQA44CtgGfAfgBzwLiAswC6gKLA4sD5wLEAucB7gHrAfcB9AHiAegB+QGMAuAChgOxA5AD0QLiArEC3wK7AoYCpgL7ApMDnwOpA6sDggSdBKQD+gLIAqgCxgLPAtoCvgKWAvUBhgLHAoYD2wLEAvcBxAGdAbsB7QGCApQCmALJAsUCiQOpA68D/AKSAvwB9wGHAukBugGpAaUBngG6AfwBiwKUApsClQKmArECsAKZAuEBkgGsAccBxAGLAVNFWm6PAYoBe5cBwQHhAe0BzgHLAYYBggGbAVhThAGaAaEBhQFJW1p5iQFhOic2NS0mHjJAYGpsYVZcR2VsWF5JOTBKa3hUWGdSSFJXS1NAL0VRQz83LTQ2QDYjFSs+VEpabVQzJjNAWkM1LT5MPVBDKyw5IxcUFyEgPkNNTUUxKRwfHh4PGBkZKSUZEwwZHRcXDxUeGhgdKiYaHhEQCxISEh0REyAdGhoYDxMWFhIWHhEIChQNEA0TDBMMCQ8QEx8mGwsQERcWERcRDQ8TFg4REhILBxMJCQ4UEQQDCAQFDg0EERQLFhIHDA4NBgcKAwQLCQMMEQYDBwcIEAwKBgkFAwQHCAIBCAUDCAEJCwcLBwoFBgYGBQIEBQMCAgMJAwkCAwUBAwMGAwMDAgUFAgEDBgYHBAYBAwcBBQIBBgMCAgYBAQQCAwMBAgMEAgMDAQEBAQEDAQEDAgIBAgECAwECAQMDAwECAwICAQMCAQEBAQEBAgICAQIBAQICAQEBAQEBAQICAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQE=" java_bytes = base64.standard_b64decode(java_base64)
def rpc_call_rlp(ssl_sock, method, params, default_timeout=False): for (key, val) in params.items(): params[key] = base64.standard_b64encode(val).decode() result = rpc_call(ssl_sock, method, params, default_timeout) return base64.standard_b64decode(result.encode()) if result else None
exponent = bits[1:12] mantissa = bits[12:64] mantissa_value = sum([(1 / 2)**(bit_num + 1) * int(mantissa[bit_num]) for bit_num in range(0, 52)]) exponent = int(exponent, base=2) return (1 + mantissa_value) * 2**(exponent - 1023) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-t', '--token', required=True) args = parser.parse_args() json_input = urllib.request.urlopen( f'{PROBLEM_URL}/problem?access_token={args.token}') b64_bytes_input = json.loads(json_input.read())['bytes'] raw_bytes = base64.standard_b64decode(b64_bytes_input) raw_bits = [b[2:].rjust(8, '0') for b in map(bin, bytearray(raw_bytes))] bit_blob = ''.join(raw_bits) # int is 32-bit wide int_part = bit_blob[0:32] # uint is also 32-bit wide uint_part = bit_blob[32:64] # short is 16-bit wide, but right after it there's 16 bits of whitespace (?) short_part = bit_blob[64:80] # float is 32-bit wide float_part = bit_blob[96:128] # double is 64-bit wide double_part = bit_blob[128:192] # big endian(ed) double is also 64-bit wide big_endian_double_part = bit_blob[192:256] results = {
import os from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes # Padding from cryptography.hazmat.primitives import padding import socket from threading import Thread, Event mysock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # mysock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) mysock.bind((socket.gethostname(), os.getenv('PORT', 8500))) mysock.listen(5) # Setting up the cryptography env_publickey = base64.standard_b64decode(os.environ['HIS_PUBLICKEY'].encode( encoding="utf-8", errors="strict")) loaded_public_key = X25519PublicKey.from_public_bytes(env_publickey) env_private_key = base64.standard_b64decode(os.environ['PRIVATEKEY'].encode( encoding="utf-8", errors="strict")) private_key = X25519PrivateKey.from_private_bytes(env_private_key) shared_key = private_key.exchange(loaded_public_key) # Perform key derivation. derived_key = HKDF( algorithm=hashes.SHA256(), length=32, salt=None, info=b'handshake data', ).derive(shared_key) HMAC_key = HKDF( algorithm=hashes.SHA256(), length=32,
def b64_to_hex_id(id_string): return binascii.hexlify(base64.standard_b64decode( str(id_string))).decode("utf-8")
def _authenticate_scram(credentials, sock_info, mechanism): """Authenticate using SCRAM.""" username = credentials.username if mechanism == 'SCRAM-SHA-256': digest = "sha256" digestmod = hashlib.sha256 data = saslprep(credentials.password).encode("utf-8") else: digest = "sha1" digestmod = hashlib.sha1 data = _password_digest(username, credentials.password).encode("utf-8") source = credentials.source cache = credentials.cache # Make local _hmac = hmac.HMAC ctx = sock_info.auth_ctx.get(credentials) if ctx and ctx.speculate_succeeded(): nonce, first_bare = ctx.scram_data res = ctx.speculative_authenticate else: nonce, first_bare, cmd = _authenticate_scram_start( credentials, mechanism) res = sock_info.command(source, cmd) server_first = res['payload'] parsed = _parse_scram_response(server_first) iterations = int(parsed[b'i']) if iterations < 4096: raise OperationFailure("Server returned an invalid iteration count.") salt = parsed[b's'] rnonce = parsed[b'r'] if not rnonce.startswith(nonce): raise OperationFailure("Server returned an invalid nonce.") without_proof = b"c=biws,r=" + rnonce if cache.data: client_key, server_key, csalt, citerations = cache.data else: client_key, server_key, csalt, citerations = None, None, None, None # Salt and / or iterations could change for a number of different # reasons. Either changing invalidates the cache. if not client_key or salt != csalt or iterations != citerations: salted_pass = _hi(digest, data, standard_b64decode(salt), iterations) client_key = _hmac(salted_pass, b"Client Key", digestmod).digest() server_key = _hmac(salted_pass, b"Server Key", digestmod).digest() cache.data = (client_key, server_key, salt, iterations) stored_key = digestmod(client_key).digest() auth_msg = b",".join((first_bare, server_first, without_proof)) client_sig = _hmac(stored_key, auth_msg, digestmod).digest() client_proof = b"p=" + standard_b64encode(_xor(client_key, client_sig)) client_final = b",".join((without_proof, client_proof)) server_sig = standard_b64encode( _hmac(server_key, auth_msg, digestmod).digest()) cmd = SON([('saslContinue', 1), ('conversationId', res['conversationId']), ('payload', Binary(client_final))]) res = sock_info.command(source, cmd) parsed = _parse_scram_response(res['payload']) if not compare_digest(parsed[b'v'], server_sig): raise OperationFailure("Server returned an invalid signature.") # A third empty challenge may be required if the server does not support # skipEmptyExchange: SERVER-44857. if not res['done']: cmd = SON([('saslContinue', 1), ('conversationId', res['conversationId']), ('payload', Binary(b''))]) res = sock_info.command(source, cmd) if not res['done']: raise OperationFailure('SASL conversation failed to complete.')
def process_song(self, root, file_path): """ :param root: :param pathlib.Path file_path: Path to the file thats being imported :rtype: None """ self.set_defaults() # Extract ProPresenter versionNumber try: self.version = int(root.get('versionNumber')) except (ValueError, TypeError): log.debug('ProPresenter versionNumber invalid or missing') return # Title self.title = root.get('CCLISongTitle') if not self.title or self.title == '': self.title = file_path.stem # Notes self.comments = root.get('notes') # Author for author_key in [ 'author', 'CCLIAuthor', 'artist', 'CCLIArtistCredits' ]: author = root.get(author_key) if author and len(author) > 0: self.parse_author(author) # ProPresenter 4 if 400 <= self.version < 500: self.copyright = root.get('CCLICopyrightInfo') self.ccli_number = root.get('CCLILicenseNumber') count = 0 for slide in root.slides.RVDisplaySlide: count += 1 if not hasattr(slide.displayElements, 'RVTextElement'): log.debug('No text found, may be an image slide') continue RTFData = slide.displayElements.RVTextElement.get('RTFData') rtf = base64.standard_b64decode(RTFData) words, encoding = strip_rtf(rtf.decode()) self.add_verse(words, "v{count}".format(count=count)) # ProPresenter 5 elif 500 <= self.version < 600: self.copyright = root.get('CCLICopyrightInfo') self.ccli_number = root.get('CCLILicenseNumber') count = 0 for group in root.groups.RVSlideGrouping: for slide in group.slides.RVDisplaySlide: count += 1 if not hasattr(slide.displayElements, 'RVTextElement'): log.debug('No text found, may be an image slide') continue RTFData = slide.displayElements.RVTextElement.get( 'RTFData') rtf = base64.standard_b64decode(RTFData) words, encoding = strip_rtf(rtf.decode()) self.add_verse(words, "v{count:d}".format(count=count)) # ProPresenter 6 elif 600 <= self.version < 700: self.copyright = root.get('CCLICopyrightYear') self.ccli_number = root.get('CCLISongNumber') count = 0 for group in root.array.RVSlideGrouping: for slide in group.array.RVDisplaySlide: count += 1 for item in slide.array: if not (item.get('rvXMLIvarName') == "displayElements"): continue if not hasattr(item, 'RVTextElement'): log.debug('No text found, may be an image slide') continue for contents in item.RVTextElement.NSString: b64Data = contents.text data = base64.standard_b64decode(b64Data) words = None if contents.get('rvXMLIvarName') == "RTFData": words, encoding = strip_rtf(data.decode()) break if words: self.add_verse(words, "v{count:d}".format(count=count)) if not self.finish(): self.log_error(self.import_source)
import email_grade from getpass import getpass import sys import time import config import subprocess import quopri import base64 if __name__ == '__main__': #eg = email_grade.EmailGrade('twentyoneguns') #print(str(eg.email_list())) # sample = '''/**\r\n * A simple method that returns a string with a hello world message.\r\n * @return The string "Hello World!!!".\r\n */\r\n public static String helloWorld(){\r\n return "Hello World!!!";\r\n }\r\n''' # print(eg._grade(assignmentname = config.assignmentname, email_body = sample, methodname = config.method)) sample = '''/*** Something */ public static String helloWorld(){ return "Correct String= !"; }''' decoded = quopri.decodestring(sample) print("quopri decoded: " + str(decoded)) decodedx2 = decoded.decode("utf-8") print("decoded quopri: " + decodedx2) b64decode = base64.standard_b64decode(sample) print("decoded b64: " + b64decode)
def __init__(self, **kwargs): """Constructor for the PyArlo object. """ # core values self._last_error = None # Set up the config first. self._cfg = ArloCfg(self, **kwargs) # Create storage/scratch directory. if self._cfg.state_file is not None or self._cfg.dump_file is not None: try: os.mkdir(self._cfg.storage_dir) except Exception: pass # Create remaining components. self._bg = ArloBackground(self) self._st = ArloStorage(self) self._be = ArloBackEnd(self) self._ml = ArloMediaLibrary(self) # Failed to login, then stop now! if not self._be.is_connected: return self._lock = threading.Condition() self._bases = [] self._cameras = [] self._lights = [] self._doorbells = [] # On day flip we do extra work, record today. self._today = datetime.date.today() # Every few hours we can refresh the device list. self._refresh_devices_at = time.monotonic() + self._cfg.refresh_devices_every # default blank image when waiting for camera image to appear self._blank_image = base64.standard_b64decode(BLANK_IMAGE) # Slow piece. # Get devices, fill local db, and create device instance. self.info('pyaarlo starting') self._started = False self._refresh_devices() for device in self._devices: dname = device.get('deviceName') dtype = device.get('deviceType') if device.get('state', 'unknown') != 'provisioned': self.info('skipping ' + dname + ': state unknown') continue # This needs it's own code now... Does no parent indicate a base station??? if dtype == 'basestation' or \ device.get('modelId') == 'ABC1000' or dtype == 'arloq' or dtype == 'arloqs' or \ device.get("modelId").startswith("FB1001A"): self._bases.append(ArloBase(dname, self, device)) # video doorbell can be its own base station, it can also be assigned to a real base station if device.get('modelId').startswith('AVD1001'): parent_id = device.get('parentId', None) if parent_id is None or parent_id == device.get('deviceId', None): self._bases.append(ArloBase(dname, self, device)) if dtype == 'arlobridge': self._bases.append(ArloBase(dname, self, device)) if dtype == 'camera' or dtype == 'arloq' or dtype == 'arloqs' or \ device.get('modelId').startswith('AVD1001'): self._cameras.append(ArloCamera(dname, self, device)) if dtype == 'doorbell': self._doorbells.append(ArloDoorBell(dname, self, device)) if dtype == 'lights': self._lights.append(ArloLight(dname, self, device)) # Save out unchanging stats! self._st.set(['ARLO', TOTAL_CAMERAS_KEY], len(self._cameras)) self._st.set(['ARLO', TOTAL_BELLS_KEY], len(self._doorbells)) self._st.set(['ARLO', TOTAL_LIGHTS_KEY], len(self._lights)) # Always ping bases first! self._ping_bases() # Initial config and state retrieval. if self._cfg.synchronous_mode: # Synchronous; run them one after the other self.debug('getting initial settings') self._refresh_bases(initial=True) self._refresh_ambient_sensors() self._ml.load() self._refresh_camera_thumbnails(True) self._refresh_camera_media(True) self._initial_refresh_done() else: # Asynchronous; queue them to run one after the other self.debug('queueing initial settings') self._bg.run(self._refresh_bases, initial=True) self._bg.run(self._refresh_ambient_sensors) self._bg.run(self._ml.load) self._bg.run(self._refresh_camera_thumbnails, wait=False) self._bg.run(self._refresh_camera_media, wait=False) self._bg.run(self._initial_refresh_done) # Register house keeping cron jobs. self.debug('registering cron jobs') self._bg.run_every(self._fast_refresh, FAST_REFRESH_INTERVAL) self._bg.run_every(self._slow_refresh, SLOW_REFRESH_INTERVAL) # Wait for initial refresh if self._cfg.wait_for_initial_setup: with self._lock: while not self._started: self.debug('waiting for initial setup...') self._lock.wait(1) self.debug('setup finished...')
def _validate_signature(self, receipt, signature): try: sig = base64.standard_b64decode(signature) return rsa.verify(receipt.encode(), sig, self.public_key) except (rsa.VerificationError, TypeError): return False
def load_pem(contents, pem_marker): """Loads a PEM file. :param contents: the contents of the file to interpret :param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY' when your file has '-----BEGIN RSA PRIVATE KEY-----' and '-----END RSA PRIVATE KEY-----' markers. :return: the base64-decoded content between the start and end markers. @raise ValueError: when the content is invalid, for example when the start marker cannot be found. """ # We want bytes, not text. If it's text, it can be converted to ASCII bytes. if not is_bytes(contents): contents = contents.encode('ascii') (pem_start, pem_end) = _markers(pem_marker) pem_lines = [] in_pem_part = False for line in contents.splitlines(): line = line.strip() # Skip empty lines if not line: continue # Handle start marker if line == pem_start: if in_pem_part: raise ValueError('Seen start marker "%s" twice' % pem_start) in_pem_part = True continue # Skip stuff before first marker if not in_pem_part: continue # Handle end marker if in_pem_part and line == pem_end: in_pem_part = False break # Load fields if b':' in line: continue pem_lines.append(line) # Do some sanity checks if not pem_lines: raise ValueError('No PEM start marker "%s" found' % pem_start) if in_pem_part: raise ValueError('No PEM end marker "%s" found' % pem_end) # Base64-decode the contents pem = b''.join(pem_lines) return base64.standard_b64decode(pem)
def test_b64decode(self): eq = self.assertEqual tests = { b"d3d3LnB5dGhvbi5vcmc=": b"www.python.org", b'AA==': b'\x00', b"YQ==": b"a", b"YWI=": b"ab", b"YWJj": b"abc", b"YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" b"RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" b"Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==": b"abcdefghijklmnopqrstuvwxyz" b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" b"0123456789!@#0^&*();:<>,. []{}", b'': b'', } for data, res in tests.items(): eq(base64.b64decode(data), res) eq(base64.b64decode(data.decode('ascii')), res) # Non-bytes self.check_other_types(base64.b64decode, b"YWJj", b"abc") self.check_decode_type_errors(base64.b64decode) # Test with arbitrary alternative characters tests_altchars = { (b'01a*b$cd', b'*$'): b'\xd3V\xbeo\xf7\x1d', } for (data, altchars), res in tests_altchars.items(): data_str = data.decode('ascii') altchars_str = altchars.decode('ascii') eq(base64.b64decode(data, altchars=altchars), res) eq(base64.b64decode(data_str, altchars=altchars), res) eq(base64.b64decode(data, altchars=altchars_str), res) eq(base64.b64decode(data_str, altchars=altchars_str), res) # Test standard alphabet for data, res in tests.items(): eq(base64.standard_b64decode(data), res) eq(base64.standard_b64decode(data.decode('ascii')), res) # Non-bytes self.check_other_types(base64.standard_b64decode, b"YWJj", b"abc") self.check_decode_type_errors(base64.standard_b64decode) # Test with 'URL safe' alternative characters tests_urlsafe = { b'01a-b_cd': b'\xd3V\xbeo\xf7\x1d', b'': b'', } for data, res in tests_urlsafe.items(): eq(base64.urlsafe_b64decode(data), res) eq(base64.urlsafe_b64decode(data.decode('ascii')), res) # Non-bytes self.check_other_types(base64.urlsafe_b64decode, b'01a-b_cd', b'\xd3V\xbeo\xf7\x1d') self.check_decode_type_errors(base64.urlsafe_b64decode)
def rpc_param_decode(param): return base64.standard_b64decode(param.encode())
def _load(self, filename): log.debug("Loading file %r", filename) file = self._File(encode_filename(filename)) file.tags = file.tags or {} metadata = Metadata() for origname, values in file.tags.items(): for value in values: name = origname if name == "date" or name == "originaldate": # YYYY-00-00 => YYYY value = sanitize_date(value) elif name == 'performer' or name == 'comment': # transform "performer=Joe Barr (Piano)" to "performer:Piano=Joe Barr" name += ':' if value.endswith(')'): start = len(value) - 2 count = 1 while count > 0 and start > 0: if value[start] == ')': count += 1 elif value[start] == '(': count -= 1 start -= 1 if start > 0: name += value[start + 2:-1] value = value[:start] elif name.startswith('rating'): try: name, email = name.split(':', 1) except ValueError: email = '' if email != sanitize_key( config.setting['rating_user_email']): continue name = '~rating' try: value = str( round((float(value) * (config.setting['rating_steps'] - 1)))) except ValueError: log.warning('Invalid rating value in %r: %s', filename, value) elif name == "fingerprint" and value.startswith( "MusicMagic Fingerprint"): name = "musicip_fingerprint" value = value[22:] elif name == "tracktotal": if "totaltracks" in file.tags: continue name = "totaltracks" elif name == "disctotal": if "totaldiscs" in file.tags: continue name = "totaldiscs" elif name == "metadata_block_picture": image = mutagen.flac.Picture( base64.standard_b64decode(value)) try: coverartimage = TagCoverArtImage( file=filename, tag=name, types=types_from_id3(image.type), comment=image.desc, support_types=True, data=image.data, ) except CoverArtImageError as e: log.error('Cannot load image from %r: %s' % (filename, e)) else: metadata.images.append(coverartimage) continue elif name in self.__translate: name = self.__translate[name] metadata.add(name, value) if self._File == mutagen.flac.FLAC: for image in file.pictures: try: coverartimage = TagCoverArtImage( file=filename, tag='FLAC/PICTURE', types=types_from_id3(image.type), comment=image.desc, support_types=True, data=image.data, ) except CoverArtImageError as e: log.error('Cannot load image from %r: %s' % (filename, e)) else: metadata.images.append(coverartimage) # Read the unofficial COVERART tags, for backward compatibility only if "metadata_block_picture" not in file.tags: try: for data in file["COVERART"]: try: coverartimage = TagCoverArtImage( file=filename, tag='COVERART', data=base64.standard_b64decode(data)) except CoverArtImageError as e: log.error('Cannot load image from %r: %s' % (filename, e)) else: metadata.images.append(coverartimage) except KeyError: pass self._info(metadata, file) return metadata
def decode_component(x): return standard_b64decode(x).decode('utf-8')
def decode_base64(data): missing_padding = len(data) % 4 if missing_padding != 0: data += '=' * (4 - missing_padding) return base64.standard_b64decode(data)
def recover_transmission(string, simple=False): if not simple: return base64.standard_b64decode(string[:(len(string) // 2)].encode(ENCODING)) else: return base64.standard_b64decode(string)
def tokenBase64(self, encodedToken): """ Add deviceToken as base64 encoded string (not binary) """ self.deviceToken = base64.standard_b64decode(encodedToken) return self
def decodeBussinessApp(html, iframeReferer): #print html response = "" jsFile = "http://www.businessapp1.pw/jwplayer5/addplayer/jwplayer.js" if html.find("jwplayer5/addplayer/jwplayer.js") > -1: jsFile = Decoder.rExtractWithRegex( "http://", "jwplayer5/addplayer/jwplayer.js", html) logger.info("updated js player to: " + jsFile) elif html.find("http://www.playerhd1.pw") > -1: jsFile = "http://www.playerhd1.pw/jwplayer5/addplayer/jwplayer.js" token = "" try: token = Decoder.extractBusinessappToken(iframeReferer, jsFile) except: logger.error("Error, trying without token") pass swfUrl = "http://www.businessapp1.pw/jwplayer5/addplayer/jwplayer.flash.swf" if html.find( "jwplayer5/addplayer/jwplayer.flash.swf" ) > -1: #http://www.playerapp1.pw/jwplayer5/addplayer/jwplayer.flash.swf swfUrl = Decoder.rExtractWithRegex( "http://", "jwplayer5/addplayer/jwplayer.flash.swf", html) logger.info("updated swf player to: " + swfUrl) elif jsFile.find("businessapp1.pw") == -1: swfUrl = "http://" + Decoder.extract( '//', "/", jsFile) + "/jwplayer5/addplayer/jwplayer.flash.swf" logger.info("updated swf player to: " + swfUrl) elif html.find("http://www.playerhd1.pw") > -1: swfUrl = "http://www.playerhd1.pw/jwplayer5/addplayer/jwplayer.flash.swf" if html.find('<input type="hidden" id="ssx1" value="') > -1: ssx1 = Decoder.extract('<input type="hidden" id="ssx1" value="', '"', html) ssx4 = Decoder.extract('<input type="hidden" id="ssx4" value="', '"', html) escaped = Decoder.extract( "<script type='text/javascript'>document.write(unescape('", "'", html) unescaped = urllib.unquote(escaped) decodedssx1 = base64.standard_b64decode(ssx1) decodedssx4 = base64.standard_b64decode(ssx4) #print "decoded{"+decodedssx1+","+decodedssx4+"} unescaped: "+unescaped iframeReferer = urllib.unquote_plus( iframeReferer.replace("+", "@#@")).replace( "@#@", "+") #unquote_plus replaces '+' characters if decodedssx4.find(".m3u8") > -1: logger.info("Found simple link: " + decodedssx4) response = Decoder.getContent(decodedssx4, "", iframeReferer).read() if response.find("chunklist.m3u8") > -1: finalSimpleLink2 = decodedssx4[:decodedssx4.rfind("/") + 1] + "chunklist.m3u8" response = Decoder.getContent(finalSimpleLink2, "", iframeReferer).read() logger.debug("response for m3u8(1): " + response) response = finalSimpleLink2 + "|Referer=" + iframeReferer else: logger.debug("response for m3u8(2): " + response) response = decodedssx4 elif token != "" and decodedssx4.find("vod/?token=") > -1: app = decodedssx4[decodedssx4.find("vod/?token="):] response = decodedssx4 + " playpath=" + decodedssx1 + " app=" + app + " swfUrl=" + swfUrl + " token=" + token + " flashver=WIN/2019,0,0,226 live=true timeout=14 pageUrl=" + iframeReferer elif token != "": app = decodedssx4[decodedssx4.find("redirect/?token="):] response = decodedssx4 + " playpath=" + decodedssx1 + " app=" + app + " swfUrl=" + swfUrl + " token=" + token + " flashver=WIN/2019,0,0,226 live=true timeout=14 pageUrl=" + iframeReferer else: #m3u8 file logger.info("link1: " + decodedssx1) logger.info("link2: " + decodedssx4) logger.info("to player: " + response) else: playPath = "" rtmpValue = "" #i = 0 finalSimpleLink = "" for splittedHtml in html.split('<input type="hidden" id="'): if splittedHtml.find( "DOCTYPE html PUBLIC") == -1 and splittedHtml.find( ' value=""') == -1: #logger.info("processing hidden: "+splittedHtml) extracted = splittedHtml[splittedHtml.find('value="') + len('value="'):] extracted = extracted[0:extracted.find('"')] logger.info("extracted hidden value: " + extracted) if playPath == "": playPath = base64.standard_b64decode(extracted) else: rtmpValue = base64.standard_b64decode(extracted) decodedAndExtracted = base64.standard_b64decode(extracted) logger.info("original: " + extracted + ", extracted: " + decodedAndExtracted) if decodedAndExtracted.find(".m3u8") > -1: finalSimpleLink = decodedAndExtracted #i+=1 if finalSimpleLink != "": logger.info("Found simple link: " + finalSimpleLink) response = Decoder.getContent(finalSimpleLink, "", iframeReferer).read() if response.find("chunklist.m3u8") > -1: finalSimpleLink2 = finalSimpleLink[:finalSimpleLink. rfind("/") + 1] + "chunklist.m3u8" response = Decoder.getContent(finalSimpleLink2, "", iframeReferer).read() logger.debug("response for m3u8(a): " + response) response = finalSimpleLink2 + "|Referer=" + iframeReferer else: logger.debug("response for m3u8(b): " + response) response = finalSimpleLink elif rtmpValue.find("vod/?token=") > -1: app = rtmpValue[rtmpValue.find("vod/?token="):] iframeReferer = urllib.unquote_plus( iframeReferer.replace("+", "@#@")).replace( "@#@", "+") #unquote_plus replaces '+' characters token = Decoder.extractBusinessappToken(iframeReferer, jsFile) response = rtmpValue + " playpath=" + playPath + " app=" + app + " swfUrl=" + swfUrl + " token=" + token + " flashver=WIN/2019,0,0,226 live=true timeout=14 pageUrl=" + iframeReferer else: app = "redirect" + rtmpValue[rtmpValue.find("?token=play@"):] token = Decoder.extractBusinessappToken(iframeReferer, jsFile) response = rtmpValue + " playpath=" + playPath + " app=" + app + " swfUrl=" + swfUrl + " token=" + token + " flashver=WIN/2019,0,0,226 live=true timeout=14 pageUrl=" + iframeReferer return response
def unpack_data(data): data = data.encode('utf-8') data = base64.standard_b64decode(data) return data.rstrip("\0")
def set_cert_from_base64(self, content_base64): try: raw = base64.standard_b64decode(str(content_base64)) except (TypeError, binascii.Error): raise Exception('The provided certificate is not Base64-encoded') self.set_cert_from_raw(raw)
status = 200 self.send_response(status) for key, value in headers.items(): self.send_header(key, value) self.end_headers() self.wfile.write(output) DATA_JPG = base64.standard_b64decode(""" /9j/4AAQSkZJRgABAQEASABIAAD/2wBD AAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEB AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEB AQEBAQEBAQEBAQEBAQEBAQH/2wBDAQEB AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEB AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEB AQEBAQEBAQEBAQEBAQH/wAARCAABAAED AREAAhEBAxEB/8QAFAABAAAAAAAAAAAA AAAAAAAACv/EABQQAQAAAAAAAAAAAAAA AAAAAAD/xAAUAQEAAAAAAAAAAAAAAAAA AAAA/8QAFBEBAAAAAAAAAAAAAAAAAAAA AP/aAAwDAQACEQMRAD8AfwD/2Q==""") DATA_PNG = base64.standard_b64decode(""" iVBORw0KGgoAAAANSUhEUgAAAAEAAAAB CAAAAAA6fptVAAAACklEQVQIHWP4DwAB AQEANl9ngAAAAABJRU5ErkJggg==""") DATA_GIF = base64.standard_b64decode(""" R0lGODdhAQABAIAAAP///////ywAAAAA AQABAAACAkQBADs=""")
import os import base64 import json etcd_file=open("etcd_data.txt","r") etcd_data=json.load(etcd_file) for data in etcd_data["kvs"]: key,value=data["key"],data["value"] key=base64.standard_b64decode(key) value=base64.standard_b64decode(value) if value.startswith("-"): os.system( "ETCDCTL_API=3 /home/qboxserver/ava-etcd/_package/etcdctl --endpoints=http://10.200.20.87:2379,http://10.200.20.89:2379,http://10.200.20.90:2379 put " + key + " -- '" + value + "'") continue os.system("ETCDCTL_API=3 /home/qboxserver/ava-etcd/_package/etcdctl --endpoints=http://10.200.20.87:2379,http://10.200.20.89:2379,http://10.200.20.90:2379 put "+key+" '"+value+"'") root="/Users/cj/qiniu/deploy-test/playbook/ava-serving/apps" apps=($(ls apps | xargs)) for app in $apps; do sed '' -i 's/http:\/\/10.200.30.13:2379,http:\/\/10.200.30.14:2379,http:\/\/10.200.30.15:2379/http:\/\/10.200.20.87:2379,http:\/\/10.200.20.89:2379,http:\/\/10.200.20.90:2379/' $root/$app/app.yaml; done
def process_frame(self, numbered_datum): start_time = time.time() frame_data = None raw_image = None try: json_time = 0 featurization_time = 0 thumbnail_time = 0 interframe_time = 0 frame_number, datum = numbered_datum frame_data = json.loads(datum.decode('utf-8')) if frame_number is not None: frame_data['frameNumber'] = frame_number image_rows = 0 image_cols = 0 has_raw_image = "rawImage" in frame_data.keys() has_thumbnail = "thumbnail" in frame_data.keys() raw_image = None if has_raw_image: image_rows = frame_data["rawImage"]["rows"] image_cols = frame_data["rawImage"]["columns"] image_bytes = base64.standard_b64decode( frame_data["rawImage"]["binary"]) image_array = numpy.frombuffer(image_bytes, dtype=numpy.uint8) raw_image = numpy.reshape(image_array, (image_rows, image_cols, 3)) del frame_data["rawImage"] if not has_thumbnail: thumb_start_time = time.time() resized_image = cv2.resize(raw_image, (240, 135)) r, buf = cv2.imencode('.jpg', resized_image, [int(cv2.IMWRITE_JPEG_QUALITY), 50]) frame_data['thumbnail'] = { 'binary': base64.standard_b64encode(buf).decode('ascii'), 'originalCols': image_cols, 'originalRows': image_rows } thumbnail_time = time.time() - thumb_start_time elif has_thumbnail: image_rows = frame_data["thumbnail"]["originalRows"] image_cols = frame_data["thumbnail"]["originalColumns"] # Featurization featurization_start_time = time.time() # extract key points bodies = frame_data['people'] rects = [] person_poses = [] if self.area_of_interest is not None: bodies = list( map( lambda b: get_pts_of_interest_from_person( b, self.area_of_interest), bodies)) bodies = list(filter(lambda b: check_body_pts(b['body']), bodies)) for body in bodies: body_keypoints = body["body"] face_keypoints = body["face"] if "face" in body.keys( ) else None # prune body keypoints body_keypoints = prune_body_pts(body_keypoints) body["body"] = body_keypoints pose = get_pose_pts(body_keypoints) body['inference'] = {'posture': {}, 'face': {}, 'head': {}} # prepare inter-frame tracking box = get_pose_box(pose) rects.append(box.astype("int")) person_poses.append(pose) # Interframe interframe_start_time = time.time() tracking_id = None objects, poses = self.centroid_tracker.update(rects, person_poses) for body in bodies: body_keypoints = body["body"] pose = get_pose_pts(body_keypoints) for (objectID, person_pose) in poses.items(): if pose[1][0] == person_pose[1][0] and pose[1][ 1] == person_pose[1][1]: body['inference']['trackingId'] = objectID + 1 if self.video_out is not None or ( self.file_params is not None and self.file_params['image']): text = "ID {}".format(objectID) cv2.putText(raw_image, text, (person_pose[1][0] - 10, person_pose[1][1] - 10), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2) cv2.circle(raw_image, (person_pose[1][0], person_pose[1][1]), 4, (0, 255, 0), -1) break interframe_time = time.time() - interframe_start_time if self.channel == 'instructor': y_min = None instructor_body_track = None for (objectID, person_pose) in poses.items(): if y_min is None and person_pose[1][1] > 0 and person_pose[ 1][0] > 0: y_min = person_pose[1][1] instructor_body_track = person_pose else: if person_pose[1][1] < y_min and person_pose[1][ 1] > 0 and person_pose[1][0] > 0: y_min = person_pose[1][1] instructor_body_track = person_pose new_bodies = [] if instructor_body_track is not None: for body in bodies: body_keypoints = body["body"] pose = get_pose_pts(body_keypoints) if pose[1][0] == instructor_body_track[1][0] and pose[ 1][1] == instructor_body_track[1][1]: new_bodies.append(body) break bodies = new_bodies for body in bodies: body_keypoints = body["body"] face_keypoints = body["face"] if "face" in body.keys( ) else None pose = get_pose_pts(body_keypoints) # face orientation faceOrientation = None faceOrientation = get_facing_direction(pose) # Sit stand sit_stand, color_stand, pts = predict_sit_stand(body_keypoints) if self.video_out is not None or (self.file_params is not None and self.file_params['image']): cv2.putText(raw_image, sit_stand, (int(pts[1][0]) - 10, int(pts[1][1]) + 30), cv2.FONT_HERSHEY_SIMPLEX, 0.75, color_stand, 2, cv2.LINE_AA) # Armpose armpose, color_pose, pts = predict_armpose(body_keypoints) if self.video_out is not None or (self.file_params is not None and self.file_params['image']): cv2.putText(raw_image, armpose, (int(pts[1][0]) - 10, int(pts[1][1]) + 10), cv2.FONT_HERSHEY_SIMPLEX, 0.75, color_pose, 2, cv2.LINE_AA) raw_image = render_pose_draw([pose], raw_image, color_pose, color_stand) # Mouth mouth = None smile = None if face_keypoints is not None: mouth, _, smile, _ = predict_mouth(face_keypoints) tvec = None yaw = None pitch = None roll = None gaze_vector = None face = get_face(pose) if (self.process_gaze): tvec = get_3d_head_position(pose, raw_image.shape) # face box if face is not None: face_crop = raw_image[face[0][1]:face[1][1], face[0][0]:face[1][0]] if face_crop.shape[0] >= 64 and face_crop.shape[ 1] >= 64: yaw, pitch, roll, gaze_start, gaze_stop = get_head_pose_vector( face_crop, face) yaw = yaw.flatten().tolist()[0] pitch = pitch.flatten().tolist()[0] roll = roll.flatten().tolist()[0] gaze_vector = [gaze_start, gaze_stop] cv2.line(raw_image, gaze_start, gaze_stop, (255, 255, 255), 2) if armpose is not None: body['inference']['posture']['armPose'] = armpose if sit_stand is not None: body['inference']['posture']['sitStand'] = sit_stand if face is not None: body['inference']['face']['boundingBox'] = face if mouth is not None: body['inference']['face']['mouth'] = mouth if smile is not None: body['inference']['face']['smile'] = smile if yaw is not None: body['inference']['head']['yaw'] = yaw if pitch is not None: body['inference']['head']['pitch'] = pitch if roll is not None: body['inference']['head']['roll'] = roll if faceOrientation is not None: body['inference']['face']['orientation'] = faceOrientation if gaze_vector is not None: body['inference']['head']['gazeVector'] = gaze_vector if tvec is not None: body['inference']['head']['translationVector'] = tvec featurization_time = time.time() - featurization_start_time # Acceleration prev_objects = self.state['prev_objects'] previous_time = self.state['prev_time'] current_time = time.time() for body in bodies: body_keypoints = body["body"] pose = get_pose_pts(body_keypoints) for (objectID, person_pose) in poses.items(): if prev_objects is not None: for (objectID_2, person_pose2) in prev_objects.items(): if objectID == objectID_2: if person_pose2[1][0] != 0 and person_pose2[1][ 1] != 0 and person_pose[1][ 0] != 0 and person_pose[1][1] != 0: if pose[1][0] == person_pose[1][0] and pose[ 1][1] == person_pose[1][1]: centroid_delta = [] if previous_time is not None and float( current_time - previous_time ) < self.centroid_time_live: centroid_delta = [ person_pose[1][0] - person_pose2[1][0], person_pose[1][1] - person_pose2[1][1] ] if self.video_out is not None or ( self.file_params is not None and self.file_params['image']): cv2.arrowedLine( raw_image, (person_pose2[1][0], person_pose2[1][1]), (person_pose[1][0], person_pose[1][1]), (255, 255, 255), 2, cv2.LINE_AA) arm_delta = None draw_arm_index = [3, 4, 6, 7] for i in draw_arm_index: delta = (0, 0) if person_pose2[i][ 0] == 0 or person_pose2[i][ 1] == 0 or person_pose[ i][0] == 0 or person_pose[ i][1] == 0: delta = (0, 0) elif previous_time is not None and float( current_time - previous_time ) < self.centroid_time_live: delta = (person_pose[i][0] - person_pose2[i][0], person_pose[i][1] - person_pose2[i][1]) if self.video_out is not None or ( self.file_params is not None and self. file_params['image']): cv2.arrowedLine( raw_image, (person_pose2[i][0], person_pose2[i][1]), (person_pose[i][0], person_pose[i][1]), (255, 255, 0), 2, cv2.LINE_AA) if arm_delta is None: arm_delta = [delta] else: arm_delta.append(delta) body['inference']['posture'][ 'centroidDelta'] = centroid_delta body['inference']['posture'][ 'armDelta'] = arm_delta if previous_time is None or float( current_time - previous_time) > self.centroid_initialize_time: self.state['prev_time'] = current_time self.state['prev_objects'] = poses.copy() frame_data['channel'] = self.channel frame_data['people'] = bodies if self.profile: print('json,%f' % json_time) print('featurization,%f' % featurization_time) print('thumbnail,%f' % thumbnail_time) print('interframe,%f' % interframe_time) except Exception as e: traceback.print_exc(file=sys.stdout) return raw_image, frame_data
def encrypt_stream( encryption_material: SnowflakeFileEncryptionMaterial, src: IO[bytes], out: IO[bytes], chunk_size: int = 64 * kilobyte, # block_size * 4 * 1024, ) -> EncryptionMetadata: """Reads content from src and write the encrypted content into out. This function is sensitive to current position of src and out. It does not seek to position 0 in neither stream objects before or after the encryption. Args: encryption_material: The encryption material for file. src: The input stream. out: The output stream. chunk_size: The size of read chunks (Default value = block_size * 4 * 1024 Returns: The encryption metadata. """ logger = getLogger(__name__) use_openssl_only = os.getenv("SF_USE_OPENSSL_ONLY", "False") == "True" decoded_key = base64.standard_b64decode( encryption_material.query_stage_master_key) key_size = len(decoded_key) logger.debug("key_size = %s", key_size) # Generate key for data encryption iv_data = SnowflakeEncryptionUtil.get_secure_random(block_size) file_key = SnowflakeEncryptionUtil.get_secure_random(key_size) if not use_openssl_only: data_cipher = AES.new(key=file_key, mode=AES.MODE_CBC, IV=iv_data) else: backend = default_backend() cipher = Cipher(algorithms.AES(file_key), modes.CBC(iv_data), backend=backend) encryptor = cipher.encryptor() padded = False while True: chunk = src.read(chunk_size) if len(chunk) == 0: break elif len(chunk) % block_size != 0: chunk = PKCS5_PAD(chunk, block_size) padded = True if not use_openssl_only: out.write(data_cipher.encrypt(chunk)) else: out.write(encryptor.update(chunk)) if not padded: if not use_openssl_only: out.write( data_cipher.encrypt(block_size * chr(block_size).encode(UTF8))) else: out.write( encryptor.update(block_size * chr(block_size).encode(UTF8))) if use_openssl_only: out.write(encryptor.finalize()) # encrypt key with QRMK if not use_openssl_only: key_cipher = AES.new(key=decoded_key, mode=AES.MODE_ECB) enc_kek = key_cipher.encrypt(PKCS5_PAD(file_key, block_size)) else: cipher = Cipher(algorithms.AES(decoded_key), modes.ECB(), backend=backend) encryptor = cipher.encryptor() enc_kek = (encryptor.update(PKCS5_PAD(file_key, block_size)) + encryptor.finalize()) mat_desc = MaterialDescriptor( smk_id=encryption_material.smk_id, query_id=encryption_material.query_id, key_size=key_size * 8, ) metadata = EncryptionMetadata( key=base64.b64encode(enc_kek).decode("utf-8"), iv=base64.b64encode(iv_data).decode("utf-8"), matdesc=matdesc_to_unicode(mat_desc), ) return metadata
def from_base64(cls, s: str) -> raw: """ Convert Base64 UTF-8 string representation of a raw value. """ return bytes.__new__(cls, base64.standard_b64decode(s))