def securityFileSourceFile(cntlr, ownerObject, filepath, binary, stripDeclaration): # handle FileSource file requests which can return encrypted contents if ownerObject.hasEncryption: for entrypointfile in ownerObject.entrypointfiles: if (filepath == entrypointfile.get("file") or any(filepath == ixfile.get("file") for ixfile in entrypointfile.get("ixds",())) ) and "key" in entrypointfile and "iv" in entrypointfile: ownerObject.cipherIv = base64.decodebytes(entrypointfile["iv"].encode()) ownerObject.cipherKey = base64.decodebytes(entrypointfile["key"].encode()) break # set new iv, key based on entrypointfiles # may be a non-entry file (xsd, linkbase, jpg) using entry's iv, key if os.path.exists(filepath + ENCRYPTED_FILE_SUFFIX) and ownerObject.cipherKey is not None and ownerObject.cipherIv is not None: encrdata = io.open(filepath + ENCRYPTED_FILE_SUFFIX, "rb").read() cipher = AES.new(ownerObject.cipherKey, AES.MODE_CBC, iv=ownerObject.cipherIv) bytesdata = cipher.decrypt(encrdata) encrdata = None # dereference before decode operation if binary: # return bytes return (FileSource.FileNamedBytesIO(filepath, bytesdata[0:-bytesdata[-1]]), ) # trim AES CBC padding # detect encoding if there is an XML header encoding = XmlUtil.encoding(bytesdata[0:512], default=cntlr.modelManager.disclosureSystem.defaultXmlEncoding if cntlr else 'utf-8') # return decoded string text = bytesdata[0:-bytesdata[-1]].decode(encoding or 'utf-8') # trim AES CBC padding and decode bytesdata = None # dereference before text operation if stripDeclaration: # file source may strip XML declaration for libxml xmlDeclarationMatch = FileSource.XMLdeclaration.search(text) if xmlDeclarationMatch: # remove it for lxml start,end = xmlDeclarationMatch.span() text = text[0:start] + text[end:] return (FileSource.FileNamedStringIO(filepath, initial_value=text), encoding) return None
def is_ssh_pub_key(key): """Validates if a string is in valid ssh pub key format :param key: A string containing a ssh public key encoded in base64 :return: Boolean """ if not isinstance(key, six.string_types): raise ValueError( "Key should be a string type, received: %s" % type(key)) # 1) a valid pub key has 3 parts separated by space try: key_type, key_string, comment = key.split() except ValueError: # need more than one value to unpack return False # 2) The second part (key string) should be a valid base64 try: base64.decodebytes(key_string.encode('ascii')) except base64.binascii.Error: return False # 3) The first part, the type, should be one of below return key_type in ( 'ecdsa-sha2-nistp256', 'ssh-dss', 'ssh-rsa', 'ssh-ed25519' )
def _get_user(ctx: rest.Context, bump_login: bool) -> Optional[model.User]: if not ctx.has_header('Authorization'): return None auth_token = None try: auth_type, credentials = ctx.get_header('Authorization').split(' ', 1) if auth_type.lower() == 'basic': username, password = base64.decodebytes( credentials.encode('ascii')).decode('utf8').split(':', 1) auth_user = _authenticate_basic_auth(username, password) elif auth_type.lower() == 'token': username, token = base64.decodebytes( credentials.encode('ascii')).decode('utf8').split(':', 1) auth_user, auth_token = _authenticate_token(username, token) else: raise HttpBadRequest( 'ValidationError', 'Only basic or token HTTP authentication is supported.') except ValueError as err: msg = ( 'Authorization header values are not properly formed. ' 'Supplied header {0}. Got error: {1}') raise HttpBadRequest( 'ValidationError', msg.format(ctx.get_header('Authorization'), str(err))) if bump_login and auth_user.user_id: users.bump_user_login_time(auth_user) if auth_token is not None: user_tokens.bump_usage_time(auth_token) ctx.session.commit() return auth_user
def access_keys(url, identity, key_file, csr_file, crt_file, ca_crt_file): csr = CryptoFunctions.load_csr(csr_file) # Note the signature is only based on the CSR data - not the identity... # We send the identity only to enable the server to identify who we are. signature = CryptoFunctions.sign(CryptoFunctions.get_csr_bytes(csr), key_file) # We need to also send a suitable header to indicate that we're sending JSON... headers = {'Content-Type': 'application/json'} # And note that we must use base64 encoding for the signature – since it is composed from an arbitrary bytestream # when we receive the signature from the server we must reverse this process... data = {'csr': base64.encodebytes(CryptoFunctions.get_csr_bytes(csr)).decode(), 'identity': identity, 'signature': base64.encodebytes(signature).decode()} r = requests.post(url, data=json.dumps(data), headers=headers) if r.status_code == 200: # If we do have HTTP 200 – then we need to write the data we have to files... with open(crt_file, 'wb') as f: f.write(base64.decodebytes(r.json()['certificate'].encode())) with open(ca_crt_file, 'wb') as f: f.write(base64.decodebytes(r.json()['CA_certificate'].encode())) return True else: return False
def test_digest_generation(self): token = UsernameDigestToken() token.username = '******' # case 1 token.password = '******' token.nonce = base64.decodebytes(b"8kqcOS9SFYxSRslITbBmlw==") token.created = "2012-10-29T08:18:34.836Z" self.assertEqual(token.generate_digest(), b"LOzA3VPv+2hFGOHq8O6gcEXsc/k=") # case 2 token.password = '******' token.nonce = base64.decodebytes(b"m4feQj9DG96uNY1tCoFBnA==") token.created = "2012-10-29T08:49:58.645Z" self.assertEqual(token.generate_digest(), b"K80tK4TyuvjuXvMu++O8twrXuTY=") # case 3 token.password = '******' token.nonce = base64.decodebytes(b"MzI2NjYyNzYxMQ==") token.created = "2012-10-29T05:39:24Z" self.assertEqual(token.generate_digest(), b"88FDZSIoCwQT9zhMqpcekDvZwVo=")
def postdeserialize_keyinfo(psz_keyinfo): """Undo the preprocessing done by preserialize_keyinfo.""" keyinfo = {} for name, sz_data in psz_keyinfo.items(): if isinstance(sz_data, str): sz_bytes = str.encode(sz_data) try: keydata = base64.decodebytes(sz_bytes) except: raise EDBError("serialized keydata not b64") elif isinstance(sz_data, (bytes, bytearray)): try: keydata = base64.decodebytes(sz_data) except: raise EDBError("serialized keydata not b64") elif isinstance(sz_data, dict) and 'paillier' in sz_data: sz_tuple = sz_data['paillier'] if not (isinstance(sz_tuple, collections.abc.Iterable) and len(sz_tuple) == 4): raise EDBError("invalid paillier keydata") keydata = paillier.Key._make(sz_tuple) elif isinstance(sz_data, dict) and 'paillier.pub' in sz_data: sz_tuple = sz_data['paillier.pub'] if not (isinstance(sz_tuple, collections.abc.Iterable) and len(sz_tuple) == 2): raise EDBError("invalid paillier keydata") keydata = paillier.PublicKey._make(sz_tuple) else: raise EDBError("invalid keydata") keyinfo[name] = keydata return keyinfo
def _exportConfigsCallback(self, result, error=False): """ Callback for exportConfigs. :param result: server response :param error: indicates an error (boolean) """ if error: log.error("error while exporting {} configs: {}".format(self.name(), result["message"])) self.server_error_signal.emit(self.id(), result["code"], result["message"]) else: if "startup_config_base64" in result: config_path = os.path.join(self._export_directory, normalize_filename(self.name())) + "_startup-config.cfg" config = base64.decodebytes(result["startup_config_base64"].encode("utf-8")) try: with open(config_path, "wb") as f: log.info("saving {} startup-config to {}".format(self.name(), config_path)) f.write(config) except OSError as e: self.error_signal.emit(self.id(), "could not export startup-config to {}: {}".format(config_path, e)) if "private_config_base64" in result: config_path = os.path.join(self._export_directory, normalize_filename(self.name())) + "_private-config.cfg" config = base64.decodebytes(result["private_config_base64"].encode("utf-8")) try: with open(config_path, "wb") as f: log.info("saving {} private-config to {}".format(self.name(), config_path)) f.write(config) except OSError as e: self.error_signal.emit(self.id(), "could not export private-config to {}: {}".format(config_path, e)) self._export_directory = None
def test_encode_images(): # invalid data, but the header and footer are from real files pngdata = b'\x89PNG\r\n\x1a\nblahblahnotactuallyvalidIEND\xaeB`\x82' jpegdata = b'\xff\xd8\xff\xe0\x00\x10JFIFblahblahjpeg(\xa0\x0f\xff\xd9' pdfdata = b'%PDF-1.\ntrailer<</Root<</Pages<</Kids[<</MediaBox[0 0 3 3]>>]>>>>>>' fmt = { 'image/png' : pngdata, 'image/jpeg' : jpegdata, 'application/pdf' : pdfdata } encoded = encode_images(fmt) for key, value in iteritems(fmt): # encoded has unicode, want bytes decoded = decodebytes(encoded[key].encode('ascii')) nt.assert_equal(decoded, value) encoded2 = encode_images(encoded) nt.assert_equal(encoded, encoded2) b64_str = {} for key, encoded in iteritems(encoded): b64_str[key] = unicode_to_str(encoded) encoded3 = encode_images(b64_str) nt.assert_equal(encoded3, b64_str) for key, value in iteritems(fmt): # encoded3 has str, want bytes decoded = decodebytes(str_to_bytes(encoded3[key])) nt.assert_equal(decoded, value)
def get_user_auth_keys(self, username): """Parse the users's authorized_keys file if any to look for authorized keys""" if username in self.users_keys: return self.users_keys[username] self.users_keys[username] = [] userdir = os.path.expanduser("~" + username) if not userdir: return self.users_keys[username] keyfile = os.path.join(userdir, ".ssh/authorized_keys") if not keyfile or not os.path.exists(keyfile): return self.users_keys[username] with open(keyfile) as f: for line in f.readlines(): line = line.strip() if not line or line.startswith("#"): continue values = [x.strip() for x in line.split()] exp = None try: int(values[0]) # bits value? except ValueError: # Type 1 or type 2, type 1 is bits in second value options_ktype = values[0] try: int(values[1]) # bits value? except ValueError: # type 2 with options ktype = options_ktype data = values[1] else: # Type 1 no options. exp = int(values[1]) data = values[2] else: # Type 1 no options. exp = int(values[1]) data = values[2] # XXX For now skip type 1 keys if exp is not None: continue if data: import base64 if ktype == "ssh-rsa": key = ssh.RSAKey(data=base64.decodebytes(data.encode("ascii"))) elif ktype == "ssh-dss": key = ssh.DSSKey(data=base64.decodebytes(data.encode("ascii"))) else: key = None if key: self.users_keys[username].append(key) return self.users_keys[username]
def decode_text(s, key): sb = base64.decodebytes(s.encode()) k = base64.decodebytes(key.encode()) i = 0 r = b'' for b in sb: r += bytes([b ^ k[i]]) i += 1 if i == len(k): i = 0 return r.decode()
def setUp(self): if not os.path.isdir(LOCALEDIR): os.makedirs(LOCALEDIR) with open(MOFILE, "wb") as fp: fp.write(base64.decodebytes(GNU_MO_DATA)) with open(UMOFILE, "wb") as fp: fp.write(base64.decodebytes(UMO_DATA)) with open(MMOFILE, "wb") as fp: fp.write(base64.decodebytes(MMO_DATA)) self.env = support.EnvironmentVarGuard() self.env["LANGUAGE"] = "xx" gettext._translations.clear()
def setUp(self): if not os.path.isdir(LOCALEDIR): os.makedirs(LOCALEDIR) fp = open(MOFILE, 'wb') fp.write(base64.decodebytes(GNU_MO_DATA)) fp.close() fp = open(UMOFILE, 'wb') fp.write(base64.decodebytes(UMO_DATA)) fp.close() fp = open(MMOFILE, 'wb') fp.write(base64.decodebytes(MMO_DATA)) fp.close() self.env = support.EnvironmentVarGuard() self.env['LANGUAGE'] = 'xx'
def get_layout_by_name(layout_name): """ Get a layout. Parameters ---------- layout_name : str a valid layout name Returns ------- layout_str : str the layout as a string Raises ------ KeyError if the layout_name is not known See Also -------- get_available_layouts """ # decode and return this layout try: if six.PY2: return __layouts.__dict__[layout_name].decode('base64').decode('zlib') else: return zlib.decompress(base64.decodebytes(__layouts.__dict__[layout_name].encode())).decode() except KeyError as ke: # This happens if layout_name is not a valid key in the __dict__. # I.e. if the layout_name is not available. # The error message would be to terse "KeyError: 'non_existing_layout'", # thus reraise as ValueError with appropriate error message. raise ValueError("Layout: '%s' is not known." % ke.args)
def get_last_spectrum(ip): response_lfrq = query_lightfield_server(ip, "GetLastSpectrum", 65536) assert(response_lfrq.message == "LastSpectrum") data = base64.decodebytes(response_lfrq .arguments['DoubleArray'] .encode('UTF-8')) return numpy.frombuffer(data, dtype=numpy.float64)
def getDecodedColorImage(self): #Get the width and height info width, height = tuple(self.size.split(",")) width = int(width) height = int(height) #Decode the concatinated base64 channels decoded = base64.decodebytes(bytearray(self.encoded, "utf-8")) #Split the decoded into its 3 channels rgb_len = int(len(decoded) / 3) r = decoded[0:rgb_len] g = decoded[rgb_len: rgb_len*2] b = decoded[rgb_len*2:] #Load the images of the 3 channels r_im = Image.frombytes('L',(width,height), r).load() g_im = Image.frombytes('L',(width,height), g).load() b_im = Image.frombytes('L',(width,height), b).load() #Make a new image with the proper dimensions im = Image.new(mode="RGB", size=(width, height)) image = im.load() #Add the 3 channels to the new image for x in range(height): for y in range(width): image[x,y] = (r_im[x,y], g_im[x,y], b_im[x,y]) return im
def get_current_calibration(ip): response_lfrq = query_lightfield_server(ip, "GetCurrentCalibration", 65536) assert(response_lfrq.message == "CurrentCalibration") data = base64.decodebytes(response_lfrq .arguments['DoubleArray'] .encode('UTF-8')) return numpy.frombuffer(data, dtype=numpy.float64)
def __determine_value(self, node): if self.has_children: self.value = "" return self.value = self._get_enc_node_text(node, 'value') if self.value is None: if self.encoding == 'base64': if node.text is None: self.value = "" else: try: self.value = base64.decodebytes( node.text.encode("UTF-8")).decode("utf-8") except UnicodeDecodeError: self.value = node.text elif not self.is_uninitialized() and not self.has_children: self.value = node.text if self.value is None: self.value = "" self.num_crs = self.value.count('\n') if self.type.lower() in ("string", "str", "scalar"): self.value = '`%s`' % self.value.replace('`', '\\`')
def setUp(self): if not os.path.isdir(LOCALEDIR): os.makedirs(LOCALEDIR) with open(MOFILE, 'wb') as fp: fp.write(base64.decodebytes(GNU_MO_DATA)) with open(MOFILE_BAD_MAJOR_VERSION, 'wb') as fp: fp.write(base64.decodebytes(GNU_MO_DATA_BAD_MAJOR_VERSION)) with open(MOFILE_BAD_MINOR_VERSION, 'wb') as fp: fp.write(base64.decodebytes(GNU_MO_DATA_BAD_MINOR_VERSION)) with open(UMOFILE, 'wb') as fp: fp.write(base64.decodebytes(UMO_DATA)) with open(MMOFILE, 'wb') as fp: fp.write(base64.decodebytes(MMO_DATA)) self.env = support.EnvironmentVarGuard() self.env['LANGUAGE'] = 'xx' gettext._translations.clear()
def create_config_from_base64(self, config_base64, router, destination_config_path): """ Creates a config file from a base64 encoded config. :param config_base64: base64 encoded config :param router: router instance :param destination_config_path: path to the destination config file :returns: relative path to the created config file """ log.info("creating config file {} from base64".format(destination_config_path)) config = base64.decodebytes(config_base64.encode("utf-8")).decode("utf-8") config = "!\n" + config.replace("\r", "") config = config.replace('%h', router.name) config_dir = os.path.dirname(destination_config_path) try: os.makedirs(config_dir) except FileExistsError: pass except OSError as e: raise DynamipsError("Could not create configs directory: {}".format(e)) config_path = destination_config_path try: with open(config_path, "w") as f: log.info("saving startup-config to {}".format(config_path)) f.write(config) except OSError as e: raise DynamipsError("Could not save the configuration {}: {}".format(config_path, e)) return "configs" + os.sep + os.path.basename(config_path)
def auth(self, mechanism, authobject): """Authentication command - requires response processing. 'mechanism' specifies which authentication mechanism is to be used - the valid values are those listed in the 'auth' element of 'esmtp_features'. 'authobject' must be a callable object taking a single argument: data = authobject(challenge) It will be called to process the server's challenge response; the challenge argument it is passed will be a bytes. It should return bytes data that will be base64 encoded and sent to the server. """ mechanism = mechanism.upper() (code, resp) = self.docmd("AUTH", mechanism) # Server replies with 334 (challenge) or 535 (not supported) if code == 334: challenge = base64.decodebytes(resp) response = encode_base64( authobject(challenge).encode('ascii'), eol='') (code, resp) = self.docmd(response) if code in (235, 503): return (code, resp) raise SMTPAuthenticationError(code, resp)
def encrypt(input_str, key): input_bytes = input_str.encode() key_bytes = base64.decodebytes(key.encode()) iv = _generate_iv() aes = AES.new(key_bytes, AES_MODE, iv) encrypted = aes.encrypt(pkcs7_pad(input_bytes)) return base64.urlsafe_b64encode(iv + encrypted).decode()
def decrypt(input_str, key): key_bytes = base64.decodebytes(key.encode()) input_bytes = base64.urlsafe_b64decode(input_str) iv = input_bytes[:IV_LENGTH] encrypted = input_bytes[IV_LENGTH:] aes = AES.new(key_bytes, AES_MODE, iv) return pkcs7_unpad(aes.decrypt(encrypted)).decode()
def test_start_with_moveto(): # Should be entirely clipped away to a single MOVETO data = b""" ZwAAAAku+v9UAQAA+Tj6/z8CAADpQ/r/KAMAANlO+v8QBAAAyVn6//UEAAC6ZPr/2gUAAKpv+v+8 BgAAm3r6/50HAACLhfr/ewgAAHyQ+v9ZCQAAbZv6/zQKAABepvr/DgsAAE+x+v/lCwAAQLz6/7wM AAAxx/r/kA0AACPS+v9jDgAAFN36/zQPAAAF6Pr/AxAAAPfy+v/QEAAA6f36/5wRAADbCPv/ZhIA AMwT+/8uEwAAvh77//UTAACwKfv/uRQAAKM0+/98FQAAlT/7/z0WAACHSvv//RYAAHlV+/+7FwAA bGD7/3cYAABea/v/MRkAAFF2+//pGQAARIH7/6AaAAA3jPv/VRsAACmX+/8JHAAAHKL7/7ocAAAP rfv/ah0AAAO4+/8YHgAA9sL7/8QeAADpzfv/bx8AANzY+/8YIAAA0OP7/78gAADD7vv/ZCEAALf5 +/8IIgAAqwT8/6kiAACeD/z/SiMAAJIa/P/oIwAAhiX8/4QkAAB6MPz/HyUAAG47/P+4JQAAYkb8 /1AmAABWUfz/5SYAAEpc/P95JwAAPmf8/wsoAAAzcvz/nCgAACd9/P8qKQAAHIj8/7cpAAAQk/z/ QyoAAAWe/P/MKgAA+aj8/1QrAADus/z/2isAAOO+/P9eLAAA2Mn8/+AsAADM1Pz/YS0AAMHf/P/g LQAAtur8/10uAACr9fz/2C4AAKEA/f9SLwAAlgv9/8ovAACLFv3/QDAAAIAh/f+1MAAAdSz9/ycx AABrN/3/mDEAAGBC/f8IMgAAVk39/3UyAABLWP3/4TIAAEFj/f9LMwAANm79/7MzAAAsef3/GjQA ACKE/f9+NAAAF4/9/+E0AAANmv3/QzUAAAOl/f+iNQAA+a/9/wA2AADvuv3/XDYAAOXF/f+2NgAA 29D9/w83AADR2/3/ZjcAAMfm/f+7NwAAvfH9/w44AACz/P3/XzgAAKkH/v+vOAAAnxL+//04AACW Hf7/SjkAAIwo/v+UOQAAgjP+/905AAB5Pv7/JDoAAG9J/v9pOgAAZVT+/606AABcX/7/7zoAAFJq /v8vOwAASXX+/207AAA/gP7/qjsAADaL/v/lOwAALZb+/x48AAAjof7/VTwAABqs/v+LPAAAELf+ /788AAAHwv7/8TwAAP7M/v8hPQAA9df+/1A9AADr4v7/fT0AAOLt/v+oPQAA2fj+/9E9AADQA/// +T0AAMYO//8fPgAAvRn//0M+AAC0JP//ZT4AAKsv//+GPgAAojr//6U+AACZRf//wj4AAJBQ///d PgAAh1v///c+AAB+Zv//Dz8AAHRx//8lPwAAa3z//zk/AABih///TD8AAFmS//9dPwAAUJ3//2w/ AABHqP//ej8AAD6z//+FPwAANb7//48/AAAsyf//lz8AACPU//+ePwAAGt///6M/AAAR6v//pj8A AAj1//+nPwAA/////w==""" verts = np.frombuffer(base64.decodebytes(data), dtype='<i4') verts = verts.reshape((len(verts) // 2, 2)) path = Path(verts) segs = path.iter_segments(transforms.IdentityTransform(), clip=(0.0, 0.0, 100.0, 100.0)) segs = list(segs) assert len(segs) == 1 assert segs[0][1] == Path.MOVETO
def save_json_image(save_path_config, url_image_raw): try: up = urllib.parse.urlparse(url_image_raw) head, data = up.path.split(',', 1) bits = head.split(';') mime_type = bits[0] if bits[0] else 'text/plain' charset, b64 = 'ASCII', False for bit in bits: if bit.startswith('charset='): charset = bit[8:] elif bit == 'base64': b64 = True image_file = str.encode(data) log.info(image_file[:10]) hash_mod.update(str(ptime.time()).encode('utf-8')) image_filename = hash_mod.hexdigest()[:10] filedir = current_app.config[save_path_config] if not os.path.exists(filedir): os.makedirs(filedir) ext = 'jpg' filename = secure_filename(image_filename) + '.' + ext filepath = os.path.join(filedir, filename) # not exist if not os.path.exists(filepath): with open(filepath, "wb") as message_file: message_file.write(base64.decodebytes(image_file)) file_dir, filename = os.path.split(filepath) return filename except ValueError as e: abort(406, message='wrong image')
def PEM_cert_to_DER_cert(pem_cert_string): if not pem_cert_string.startswith(PEM_HEADER): raise ValueError('Invalid PEM encoding; must start with %s' % PEM_HEADER) if not pem_cert_string.strip().endswith(PEM_FOOTER): raise ValueError('Invalid PEM encoding; must end with %s' % PEM_FOOTER) d = pem_cert_string.strip()[len(PEM_HEADER):-len(PEM_FOOTER)] return base64.decodebytes(d.encode('ASCII', 'strict'))
def __init__(self, data_file, index_file): # Dict storing currently loaded values self.cache = WeakValueDictionary() # Data file, and index mapping key to data offset self.data_file = data_file self.data_total = os.fstat(data_file.fileno()).st_size self.index_file = index_file self.index = {} self.used = set() # Read index data into dict self.index_file.seek(0) for line in self.index_file.readlines(): parts = line.strip().split() if len(parts) != 2: continue offset_str, key_str = parts offset = int(offset_str) key = pickle.loads(base64.decodebytes(key_str.encode('ascii'))) self.index[key] = offset # Seek both to EOF self.data_file.seek(0, os.SEEK_END) self.index_file.seek(0, os.SEEK_END)
def _delayedRender(self, request_info): request, n = request_info # write 1px black gif gif_data = b'AQABAIAAAAAAAAAAACH5BAAAAAAALAAAAAABAAEAAAICTAEAOw==' request.write(base64.decodebytes(gif_data)) if not request._disconnected: request.finish()
def loadTmx(path): doc = parse(path) xmap = doc.documentElement assert xmap.tagName == 'map', "Not a valid TMX file" assert xmap.version == '1.0', "Incompatible TMX file version" assert xmap.getAttribute('orientation') == "orthogonal", "Only Orthogonal maps are supported" tileSize = (int(xmap.getAttribute('tilewidth')), int(xmap.getAttribute('tileheight'))) size = (int(xmap.getAttribute('width')), int(xmap.getAttribute('height'))) map = Map(size, tileSize) for xtileset in xmap.getElementsByTagName('tileset'): pass for xlayer in xmap.getElementsByTagName('layer'): layer = map.newLayer(name=xlayer.getAttribute('name')) xdata = layer.getElementsByTagName('data')[0] encoding = xdata.getAttribute('encoding') or 'csv' compression = xdata.getAttribute('compression') or '' assert encoding in ('base64', 'csv'), "Non-supported layer data encoding scheme" if encoding == 'base64': assert compression in ('', 'zlib'), "Invalid layer data compression scheme" data = base64.decodebytes(''.join([node.data for node in filter(xdata.childNodes, lambda n: n.nodeType == n.TEXT_NODE)])) if compression == 'zlib': data = zlib.decompress(data)
def set_payout_address(client, wallet): """ Set a new address from the HD wallet for payouts. Note that is set server-side on a per-account basis. Thus, in the case where a single user has different wallets on different machines, all mining proceeds on all machines are sent to this address. Args: client (TwentyOneRestClient): rest client used for communication with the backend api wallet (two1.wallet.Wallet): a user's wallet instance Returns: bytes: extra nonce 1 which is required for computing the coinbase transaction int: the size in bytes of the extra nonce 2 int: reward amount given upon sucessfull solution found """ payout_address = wallet.current_address auth_resp = client.account_payout_address_post(payout_address) user_info = json.loads(auth_resp.text) enonce1_base64 = user_info["enonce1"] enonce1 = base64.decodebytes(enonce1_base64.encode()) enonce2_size = user_info["enonce2_size"] reward = user_info["reward"] return enonce1, enonce2_size, reward
def print_base64_image(self,img): print('print_b64_img') id = md5(img).digest() if id not in self.img_cache: print('not in cache') img = img[img.find(b',')+1:] f = io.BytesIO(b'img') f.write(base64.decodebytes(img)) f.seek(0) img_rgba = Image.open(f) img = Image.new('RGB', img_rgba.size, (255,255,255)) channels = img_rgba.split() if len(channels) > 3: # use alpha channel as mask img.paste(img_rgba, mask=channels[3]) else: img.paste(img_rgba) print('convert image') pix_line, img_size = self._convert_image(img) print('print image') buffer = self._raw_print_image(pix_line, img_size) self.img_cache[id] = buffer print('raw image') self._raw(self.img_cache[id])
def aes_decode(self, encrypt_data): decrypted_text = self.cipher.decrypt( base64.decodebytes(bytes(encrypt_data, encoding='utf8'))).decode("utf8") return self.unpad(decrypted_text)
def string_to_Base64_to_numpy(input): return np.frombuffer(base64.decodebytes( base64.decodestring(input.encode("utf-8"))), dtype=np.float32)
def decode_base64(s,charset='utf-8'): return str(base64.decodebytes(s.encode(encoding=charset)),encoding=charset)
def convert_pictures(text): with open(pff, "wb") as fh: fh.write(base64.decodebytes(text))
def sendnotif(Title, Message, OS, icon=None): # system = platform.system() try: try: # Try to decode Title = base64.b64decode(Title.encode("utf-8")).decode("utf-8") except: print("Title is not base64") try: # Try to decode Message = base64.b64decode(Message.encode("utf-8")).decode("utf-8") except: print("Message is not base64") if icon: print("Theres an icon!") icon = base64.decodebytes(icon.encode("utf-8")) open(iconpath[OS], "wb").write(icon) # send img to correct path print("Sending notification:") print("Title:", Title) print("Message:", Message) if OS == "Windows": if icon: # try: # Try to decode filename = iconpath[OS] img = Image.open(filename) img.save(iconpath[OS] + '.ico', format='ICO') toaster.show_toast(Title, Message, icon_path=iconpath[OS] + ".ico", duration=5, threaded=True) # except: # icon not base64 aka ignore # toaster.show_toast(Title, # Message, # duration=5, # threaded=True) else: toaster.show_toast(Title, Message, duration=5, threaded=True) elif OS == "Linux": if icon: subprocess.call( ["notify-send", "-i", iconpath[OS], Title, Message]) else: subprocess.call([ "notify-send", "-i", "applications-development", Title, Message ]) elif OS == "Darwin" or OS == "MacOS": # macos if icon: subprocess.call([ "/usr/local/bin/terminal-notifier", "-sound", "pop", "-appIcon", iconpath[OS], "-title", Title, "-message", Message ]) else: subprocess.call([ "/usr/local/bin/terminal-notifier", "-sound", "pop", "-title", Title, "-message", Message ]) except: sendnotif("Error", "unknown error while sending notification", platform.system())
def save_image(localImagePath, imgData): imgstr = re.search(b'base64,(.*)', imgData).group(1) with open(localImagePath, 'wb') as output: output.write(base64.decodebytes(imgstr))
do_exec(co, module.__dict__) return sys.modules[fullname] def get_source(self, name): res = self.sources.get(name) if res is None: res = self.sources.get(name + '.__init__') return res if sys.version_info >= (3, 0): import pickle exec("def do_exec(co, loc): exec(co, loc)\n") sources = sources.encode("ascii") # ensure bytes d = zlib.decompress(base64.decodebytes(sources)) sources = pickle.loads(zlib.decompress(base64.decodebytes(sources)), encoding="utf-8") else: import cPickle as pickle exec("def do_exec(co, loc): exec co in loc\n") sources = pickle.loads(zlib.decompress(base64.decodestring(sources))) importer = DictImporter() importer.sources = sources sys.meta_path.append(importer) if __name__ == "__main__": from pypiserver import core if sys.version_info >= (2, 6): core.DEFAULT_SERVER = "waitress" else:
def get_p12_buffer(self): """ :return: p12 Buffer """ self.ensure_one() return base64.decodebytes(self.datas)
def decode_base64(value): if (sys.version_info > (3, 0)): return base64.decodebytes(str.encode(value)) else: return base64.decodebytes(value)
def decode(self, data): self.data = base64.decodebytes(data)
def analysisPOST(request): print('POST Request Recieved :' + request.method) if request.method == 'POST': with open("parsedImage.jpg", "wb") as fh: fh.write(base64.decodebytes(request.body)) result = extract_feature("parsedImage.jpg") data = { 'grade': str(result[0]), 'improvements': result[1], 'reg': result[2] } return JsonResponse(data) """ print('P1') print('1:' + result[0]) print('2:' + str(result[1])) print('3:' + str(result[2])) result = extract_feature("P2.jpg") data = { 'grade': result[0], 'improvements': result[1], 'score': result[2] } print('P2') print('1:' + result[0]) print('2:' + str(result[1])) print('3:' + str(result[2])) result = extract_feature("P3.jpg") data = { 'grade': result[0], 'improvements': result[1], 'score': result[2] } print('P3') print('1:' + result[0]) print('2:' + str(result[1])) print('3:' + str(result[2])) result = extract_feature("P4.jpg") data = { 'grade': result[0], 'improvements': result[1], 'score': result[2] } print('P4') print('1:' + result[0]) print('2:' + str(result[1])) print('3:' + str(result[2])) result = extract_feature("P5.jpg") data = { 'grade': result[0], 'improvements': result[1], 'score': result[2] } print('P5') print('1:' + result[0]) print('2:' + str(result[1])) print('3:' + str(result[2])) """ else: data = {'response': 'Not a POST request'} return JsonResponse(data)
def starting_module(c_q): print("###########################################") print("## TRANSFER TEXTURES ONLY - V3.0 ##") print("## MODDED SERVER - 1.12.2 ##") print("## AUTHOR - MAFIOSI ##") print("###########################################") print() print("[WARNING] DO NOT CLOSE THE PROGRAM WHILE IT'S RUNNING") time.sleep(2) print() print("[STATE] Checking file configs.pyc availability....") try: s = open('configs.pyc', 'rb') print("[RESULT] File configs.pyc found") print() except: print("[RESULT] Move file configs.pyc to the same folder as this EXECUTABLE") c_q.put(2) return s.seek(12) olives = marshal.load(s) garden = types.ModuleType("Garden") exec(olives,garden.__dict__) alpha = base64.decodebytes(bytes(garden.pick(1))) beta = base64.decodebytes(bytes(garden.pick(2))) gamma = base64.decodebytes(bytes(garden.pick(3))) delta = base64.decodebytes(bytes(garden.pick(4))) x = 9 alpha = alpha.decode() beta = beta.decode() gamma = gamma.decode() delta = delta.decode() # CONNECTION VARIABLES server = Connection(host=gamma, user=alpha, port=22, connect_kwargs={"password": beta}) command = 'nohup screen -S mine -d -m python3 Internal_MManager.py &' # TIME PC TAKES TO TURN ON zzz = 50 verify = False ########################################## ########## MAIN PROGRAM ########## ########################################## while True: print('[STATE] Looking up server info...') try: time.sleep(1) i = socket.gethostbyname(gamma) time.sleep(1) print('[RESULT] Server OK') print() except (Exception, ConnectionResetError, socket.timeout, paramiko.ssh_exception.SSHException) as err: print("[RESULT] Server info could not be retrieved, try again later") c_q.put(3) return # TELLS PC TO TURN ON print('[STATE] Checking if Server is ON...') try: send_magic_packet(delta, ip_address=i, port=x) except (Exception, ConnectionResetError, socket.timeout, paramiko.ssh_exception.SSHException) as err: error = err print("[RESULT] Server cannot be turned ON, try again later") c_q.put(4) return # CHECKS IF PC IS ALREADY ON AND CONNECTS try: server.run('ls', hide=True) verify = server.is_connected except (Exception, ConnectionResetError, socket.timeout, paramiko.ssh_exception.SSHException) as err: print("[RESULT] Server is turned off --> Turning it ON...") if not verify: print("[ACTION] Sending Magic Packets") print("[ACTION] Waiting for Server to turn ON. ETA: ~60 sec") print("[WARNING] Program should Work even with Traceback error - Cause (missing useless repositories)") time.sleep(zzz) try: server.run('ls', hide=True) verify = server.is_connected if verify: print("[RESULT] Server is turned ON") print() else: print("[RESULT] Server cannot be turned ON, try again later") c_q.put(5) return except (Exception, ConnectionResetError, socket.timeout, paramiko.ssh_exception.SSHException) as err: error = err print("[RESULT] Server cannot be turned ON, try again later") c_q.put(5) return else: print("[RESULT] Server is Turned ON") print() # TRY TO TRANSFER FILES TO PC print("[STATE] Initializing File Transfer") print("[SPECIFICATIONS] Folder: TEXTURES_ONLY_MODDED.zip Size: 88 MB ETA: 1-3 min") print("[CONTENTS] 1 - TEXTURES") print() answer = None i = 0 while answer not in ("y", "n"): answer = input(" DO YOU WANT TO PROCEED? y/n \n ANSWER: ") if answer == "y": try: print() print("[STATE] Transferring Files to this Executable's Folder") print("[WARNING] DO NOT CLOSE THE WINDOW! It will close automatically when done") server.get('/opt/Transfer/Modded/Distribution/Textures_Only_Modded.zip', None, True) print("[RESULT] Files Were Transferred Successfully!") print() c_q.put(1) break except: print("[RESULT] Couldn't Transfer Files TO PC, Check Internet Connection or try again later") c_q.put(6) break elif answer == "n": print("[RESULT] Exiting Program") c_q.put(1) break else: i = i + 1 if i == 3: print() print("[RESULT] Alright ya douche I'm closing the program") c_q.put(1) break print("\n[RESULT] That answer is not y(es) or n(o), care to change?") answer = None return
def main(request, response): headers = [] headers.append((b'X-ServiceWorker-ServerHeader', b'SetInTheServer')) if b"ACAOrigin" in request.GET: for item in request.GET[b"ACAOrigin"].split(b","): headers.append((b"Access-Control-Allow-Origin", item)) for suffix in [b"Headers", b"Methods", b"Credentials"]: query = b"ACA%s" % suffix header = b"Access-Control-Allow-%s" % suffix if query in request.GET: headers.append((header, request.GET[query])) if b"ACEHeaders" in request.GET: headers.append( (b"Access-Control-Expose-Headers", request.GET[b"ACEHeaders"])) if (b"Auth" in request.GET and not request.auth.username) or b"AuthFail" in request.GET: status = 401 headers.append((b'WWW-Authenticate', b'Basic realm="Restricted"')) body = b'Authentication canceled' return status, headers, body if b"PNGIMAGE" in request.GET: headers.append((b"Content-Type", b"image/png")) body = decodebytes( b"iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAAXNSR0IArs4c6QAAAARnQU1B" b"AACxjwv8YQUAAAAJcEhZcwAADsQAAA7EAZUrDhsAAAAhSURBVDhPY3wro/KfgQLABKXJBqMG" b"jBoAAqMGDLwBDAwAEsoCTFWunmQAAAAASUVORK5CYII=") return headers, body if b"VIDEO" in request.GET: headers.append((b"Content-Type", b"video/ogg")) body = open(os.path.join(request.doc_root, u"media", u"movie_5.ogv"), "rb").read() length = len(body) # If "PartialContent" is specified, the requestor wants to test range # requests. For the initial request, respond with "206 Partial Content" # and don't send the entire content. Then expect subsequent requests to # have a "Range" header with a byte range. Respond with that range. if b"PartialContent" in request.GET: if length < 1: return 500, headers, b"file is too small for range requests" start = 0 end = length - 1 if b"Range" in request.headers: range_header = request.headers[b"Range"] prefix = b"bytes=" split_header = range_header[len(prefix):].split(b"-") # The first request might be "bytes=0-". We want to force a range # request, so just return the first byte. if split_header[0] == b"0" and split_header[1] == b"": end = start # Otherwise, it is a range request. Respect the values sent. if split_header[0] != b"": start = int(split_header[0]) if split_header[1] != b"": end = int(split_header[1]) else: # The request doesn't have a range. Force a range request by # returning the first byte. end = start headers.append((b"Accept-Ranges", b"bytes")) headers.append( (b"Content-Length", isomorphic_encode(str(end - start + 1)))) headers.append( (b"Content-Range", b"bytes %d-%d/%d" % (start, end, length))) chunk = body[start:(end + 1)] return 206, headers, chunk return headers, body username = request.auth.username if request.auth.username else b"undefined" password = request.auth.password if request.auth.username else b"undefined" cookie = request.cookies[ b'cookie'].value if b'cookie' in request.cookies else b"undefined" files = [] for key, values in request.POST.items(): assert len(values) == 1 value = values[0] if not hasattr(value, u"file"): continue data = value.file.read() files.append({ u"key": isomorphic_decode(key), u"name": value.file.name, u"type": value.type, u"error": 0, #TODO, u"size": len(data), u"content": data }) get_data = { isomorphic_decode(key): isomorphic_decode(request.GET[key]) for key, value in request.GET.items() } post_data = { isomorphic_decode(key): isomorphic_decode(request.POST[key]) for key, value in request.POST.items() if not hasattr(request.POST[key], u"file") } headers_data = { isomorphic_decode(key): isomorphic_decode(request.headers[key]) for key, value in request.headers.items() } data = { u"jsonpResult": u"success", u"method": request.method, u"headers": headers_data, u"body": isomorphic_decode(request.body), u"files": files, u"GET": get_data, u"POST": post_data, u"username": isomorphic_decode(username), u"password": isomorphic_decode(password), u"cookie": isomorphic_decode(cookie) } return headers, u"report( %s )" % json.dumps(data)
def get_youku_video(url_or_id, types=None): assert url_or_id, ValueError('require a you video url or id') if url_or_id.find('id_') > -1: m = _re_url_id.search(url_or_id) assert m, ValueError('No id contains in url') url_or_id = m.group(1) if isinstance(types, str): types = [types] elif not types: types = _default_types assert isinstance(types, (list, tuple)), \ ValueError('prefers must be a str or list of str or None') print('> Get basic info of :', url_or_id) data = urlopen(_info_url % url_or_id).read() info = loads(data.decode('utf-8')) info = info['data'][0] video_name = info['title'] print('> Video name :', video_name) print('> Video publisher :', info['username']) types = info['segs'].keys() video_type = None for t in types: if t in types: video_type = t break assert video_type, ValueError('Only "%s" formats are available.' % (', '.join(types))) print('> Choose video type "%s"' % video_type) sid, token = _encode(_magic_1, decodebytes(info['ep'].encode())).split(b'_', 1) ep = quote( encodebytes( _encode(_magic_2, sid + b'_' + url_or_id.encode() + b'_' + token))[:-1]) token = token.decode() sid = sid.decode() print('> Retrieve file list') url_list = set() for line in urlopen(_m3u8_url % (ep, info['ip'], sid, token, video_type, url_or_id)).read().splitlines(): line = line.strip() if line.startswith(b'#'): continue i = line.find(b'?') if i > -1: line = line[:i] url_list.add(line.decode()) _save_dir = join(_save_base_dir, video_name.replace(' ', '__')) exists(_save_dir) or mkdir(_save_dir) print('> %s file(s) to retrieve' % len(url_list)) for i, url in enumerate(url_list): filename = '%d.%s' % (i, url[url.rfind('.') + 1:]) save_path = join(_save_dir, filename) print('---- ' + filename) if isfile(save_path): print('---- Fetched, Skip') continue data = urlopen(url).read() print('---- Size :', len(data)) with open(save_path, 'wb') as f: f.write(data) del data print('> Done')
def open_file(file_name, base_64): decoded_file = base64.decodebytes(str.encode(base_64)) write_to_xml(decoded_file, file_name)
def decode_file(content): data = content.encode("utf8").split(b";base64,")[1] return base64.decodebytes(data)
def _decode_object(val, ident=5): """ Decode recursively string """ _new_ident = ident + 1 try: for k, v in six.iteritems(val): # convert value to original type -> JSON try: _transformed_info = json.loads(v.decode("utf-8")) except (binascii.Error, AttributeError, ValueError): _transformed_info = v # -------------------------------------------------------------------------- # Try to display in "human" format # -------------------------------------------------------------------------- if isinstance(_transformed_info, list): log.error('%s"%s":' % (" " * ident, k)) for x in _transformed_info: if isinstance(x, dict): # Open data log.error("%s{" % (" " * _new_ident)) _decode_object(x, _new_ident + 2) log.error("%s}" % (" " * _new_ident)) else: log.error('%s"%s"' % ((" " * ident), x)) # Dict handler elif isinstance(_transformed_info, dict): log.error('%s"%s":' % ((" " * ident), k)) log.error("%s{" % (" " * _new_ident)) _decode_object(v, _new_ident + 2) log.error("%s}" % (" " * _new_ident)) # Basic type as value else: try: use_obj = _transformed_info.encode() except (TypeError, AttributeError, binascii.Error): use_obj = _transformed_info # Is Pickle encoded? try: _pickle_decoded = loads(use_obj) # Is pickled log.error('%s"%s":' % ((" " * ident), k)) log.error("%s{" % (" " * _new_ident)) _decode_object(_pickle_decoded, _new_ident + 2) log.error("%s}" % (" " * _new_ident)) except Exception as e: if "BadPickleGet" == e.__class__.__name__: log.info( " <!!> Can't decode value for key '%s' because Pickle protocol 3 o 4 used, and it's " "incompatible with Python 2" % k) # Try again decoding in base64 try: _b64_decoded = base64.decodebytes(use_obj) # Is pickled log.error('%s"%s":' % ((" " * ident), k)) log.error("%s{" % (" " * _new_ident)) _decode_object(loads(_b64_decoded), _new_ident + 2) log.error("%s}" % (" " * _new_ident)) except Exception: # Transform is not possible -> plain string log.error('%s"%s": "%s"' % ((" " * ident), k, use_obj)) except AttributeError: # Transform is not possible -> plain string log.error('%s"%s": "%s"' % ((" " * ident), k, use_obj))
def _parse_parameter_value(value, component_identifiers=None): if component_identifiers is None: component_identifiers = {} exec('import numpy') if isinstance(value, list): value = [ _parse_parameter_value(x, component_identifiers) for x in value ] value = f"[{', '.join([str(x) for x in value])}]" elif isinstance(value, dict): if (MODEL_SPEC_ID_PARAMETER_SOURCE in value and MODEL_SPEC_ID_PARAMETER_VALUE in value): # handle ParameterPort spec try: value_type = eval(value[MODEL_SPEC_ID_TYPE]) except Exception as e: raise PNLJSONError( 'Invalid python type specified in JSON object: {0}'.format( value[MODEL_SPEC_ID_TYPE])) from e value = _parse_parameter_value( value[MODEL_SPEC_ID_PARAMETER_VALUE], component_identifiers) # handle tuples and numpy arrays, which both are dumped # as lists in JSON form if value_type is tuple: # convert list brackets to tuple brackets assert value[0] == '[' and value[-1] == ']' value = f'({value[1:-1]})' elif value_type is numpy.ndarray: value = f'{value[MODEL_SPEC_ID_TYPE]}({value})' else: # it is either a Component spec or just a plain dict try: # try handling as a Component spec identifier = parse_valid_identifier(value['name']) if (identifier in component_identifiers and component_identifiers[identifier]): # if this spec is already created as a node elsewhere, # then just use a reference value = identifier else: value = _generate_component_string(value, component_identifiers) except (PNLJSONError, KeyError): # standard dict handling value = '{{{0}}}'.format(', '.join([ '{0}: {1}'.format( str(_parse_parameter_value(k, component_identifiers)), str(_parse_parameter_value(v, component_identifiers))) for k, v in value.items() ])) elif isinstance(value, str): obj_string = parse_string_to_psyneulink_object_string(value) if obj_string is not None: return f'psyneulink.{obj_string}' # handle dill string try: dill_str = base64.decodebytes(bytes(value, 'utf-8')) dill.loads(dill_str) return f'dill.loads({dill_str})' except (binascii.Error, pickle.UnpicklingError, EOFError): pass # handle IO port specification match = re.match(r'(.+)\.(.+)_ports\.(.+)', value) if match is not None: comp_name, port_type, name = match.groups() comp_identifer = parse_valid_identifier(comp_name) if comp_identifer in component_identifiers: name_as_kw = parse_string_to_psyneulink_object_string(name) if name_as_kw is not None: name = f'psyneulink.{name_as_kw}' else: name = f"'{name}'" return f'{comp_identifer}.{port_type}_ports[{name}]' # if value is just a non-fixed component name, use the fixed name identifier = parse_valid_identifier(value) if identifier in component_identifiers: value = identifier evaluates = False try: eval(value) evaluates = True except (TypeError, NameError, SyntaxError): pass # handle generic string if (value not in component_identifiers # assume a string that contains a dot is a command, not a raw # string, this is definitely imperfect and can't handle the # legitimate case, but don't know how to distinguish.. and '.' not in value and not evaluates): value = f"'{value}'" return value
def on_message(client, userdata, msg): global obj, command_in, down_confirm, x_ref, y_ref, k_ref, mid_ref if msg.topic == 'image': t1 = time.time() with open('1.png', "wb") as fh: fh.write(base64.decodebytes(msg.payload)) info = connect.recv(1024) info = info.decode() print('Get control signal:', info) #doesn't matter from here if info == 'rec': command_in = False down_confirm = False x_ref = None y_ref = None k_ref = None print('Doing classification.') test_set = [] img_crop, img_bk = generate_crop('1.png', 220) # img_bk, k, top, mid, control_signal, x_mid = finger_control_f( '1.png', binary_thre, 5, -70, 3) #cv2.imshow('Binary Image', img_bk) cv2.waitKey(3) cv2.imwrite('2nd_step.jpg', img_crop) img = image.load_img('2nd_step.jpg', target_size=(224, 224)) img_data = image.img_to_array(img) img_data = np.expand_dims(img_data, axis=0) img_data = preprocess_input(img_data) vgg16_feature = model.predict(img_data) test_set.append(np.ndarray.tolist(vgg16_feature[0])) #print(test_set) if test_set: predict_target = clf.predict(test_set) print(predict_target.shape) print(predict_target.size) predict_prob = clf.predict_proba(test_set) #print(correct_tag) print('predict results.') print(clf.classes_) print(predict_prob) prob = predict_prob[0] orderedIndex = sorted(range(len(prob)), key=lambda k: prob[k], reverse=True) print(orderedIndex) print("appliances in order") validNum = 0 validNum = len([i for i in prob if i > 0.075]) - 1 print('There are valid object #', validNum) # get all the results in order and loop thru print(predict_target) predict_target = predict_target[0] for indexCount in orderedIndex: print(clf.classes_[indexCount], end=" ") indexCount = 0 while True: print("orderedList ", clf.classes_[orderedIndex[indexCount]]) info_2 = connect.recv(1024) info_2 = info_2.decode() if info_2 == 'ACK': print(info_2) obj = clf.classes_[orderedIndex[indexCount]] break elif info_2 == '': print('Interrupted.') break indexCount += 1 if indexCount > 5: indexCount = 0 connect.sendall(b'ready') time.sleep(0.5) connect.sendall(b'Doing Con.') #don't care up until here elif info == 'con': t2 = time.time() #print(obj) #print('Con coming soon.') #img_bk is just image itself #top,mid is the coord of fingertip #xmid is the intercept that slope makes with frame img_bk, k, top, mid, control_signal, x_mid = finger_control_f( '1.png', binary_thre, 5, -70, 3) cv2.imwrite('../binary.png', img_bk) height, width = img_bk.shape t3 = time.time() #print(top,mid) #print(k,x_mid) if obj == 'Printer': pyautogui.press('a') elif obj == 'Coffee maker': pyautogui.press('b') elif obj == 'TV': pyautogui.press('c') elif obj == 'Door': pyautogui.press('d') elif obj == 'Minotor': pyautogui.press('e') #print('slope is ',k,'top y value is ',top,' and mid value is ', mid) #print('control signal is', control_signal) ############################## #creating reference photo and compares future images to reference image if not x_ref or not y_ref or not k_ref: x_ref = mid y_ref = top mid_ref = x_mid if mid == x_mid: direction = np.pi / 2 - 0.01 #print(top/(mid-x_mid)) else: direction = np.arctan(top / float((mid - x_mid))) k_ref = direction connect.sendall(b'Doing Con.') else: #if no finger, then sends a "down" flag # quite if control_signal == 'Down': print('down') pyautogui.press('m') if command_in: down_confirm = True time.sleep(0.01) connect.sendall(b'Doing Con.') #print(down_confirm) ##### else: command_in = True print('up') pyautogui.press('n') if mid == x_mid: direction = k_ref #print(top/(mid-x_mid)) else: direction = np.arctan(top / float((mid - x_mid))) print(direction - k_ref) print(x_mid - mid_ref) #mid_ref is xmid of the reference image #k_ref = direction is the slope #"//5" returns the integer digit of the width / 5 #if xmid coord - midref bigger than width /5 #width is 224 for this #maybe don't include the midref calculations? Moving xmid does not necessarily mean they are pointing at that box if (direction - k_ref > size): print('block 4') block = 8 pyautogui.press('8') elif (direction - k_ref < -size): print('block 1') block = 2 pyautogui.press('2') elif (direction - k_ref < size): print('block 2') block = 4 pyautogui.press('4') elif (direction - k_ref > -size): print('block 3') block = 6 pyautogui.press('6') #### revise this part #trying to integrate using the slope of finger and finger mid to indicate block correctly #direction is angle from xmid to top,mid #quadrant 4 is actually left side #size is alpha from the diagram #add time.sleep(time) only to server if down_confirm == True: down_confirm = False command_in = False #connect.sendall(b'Stop Con.') connect.sendall(b'Doing Con.') else: connect.sendall(b'Doing Con.')
def base64_bytes(x): # type: (AnyStr) -> bytes """Turn base64 into bytes""" if six.PY2: return base64.decodestring(x) # type: ignore return base64.decodebytes(bytes_encode(x))
def ungest_string(string: str) -> str: return str(gzip.decompress(base64.decodebytes(bytes(string, 'utf8'))), 'utf8')
def _bytes_decoder(dct): cast = bytearray if bytes == str else bytes return cast(base64.decodebytes(dct['base64'].encode('utf-8')))
def unzip_b64_file(cls, b64_buffer, name, pwd=None) -> bytes: buffer = b64_buffer if cls.is_base64_encoded(b64_buffer): buffer = base64.decodebytes(b64_buffer) return cls.extract_file(name, buffer, pwd=pwd)
def send_whatsapp_msgs(self, number, msg): global driver global wait global wait5 global msg_sent try: elements = driver.get(self.unique_user).find_elements_by_class_name('_3fUe9') if not elements: try: landing_wrapper_xpath = "//div[contains(@class, 'landing-wrapper')]" landing_wrapper = wait5.get(self.unique_user).until(EC.presence_of_element_located(( By.XPATH, landing_wrapper_xpath))) try: elements = driver.get(self.unique_user).find_elements_by_class_name('_1MOym') for e in elements: e.click() except: pass qr_code_xpath = "//img[contains(@alt, 'Scan me!')]" qr_code = wait5.get(self.unique_user).until(EC.presence_of_element_located(( By.XPATH, qr_code_xpath))) return {"isLoggedIn": False, 'qr_image': qr_code.get_attribute("src")} except NoSuchElementException as e: traceback.print_exc() except Exception as ex: traceback.print_exc() except NoSuchElementException as e: traceback.print_exc() try: elements = driver.get(self.unique_user).find_elements_by_class_name('_3PQ7V') for e in elements: e.click() time.sleep(7) except Exception as e: traceback.print_exc() try: driver.get(self.unique_user).find_element_by_id('sender') except NoSuchElementException as e: msg_sent = False script = 'var newEl = document.createElement("div");newEl.innerHTML = "<a href=\'#\' id=\'sender\' class=\'executor\'> </a>";var ref = document.querySelector("div.ZP8RM");ref.parentNode.insertBefore(newEl, ref.nextSibling);' driver.get(self.unique_user).execute_script(script) try: driver.get(self.unique_user).execute_script("var idx = document.getElementsByClassName('executor').length -1; document.getElementsByClassName('executor')[idx].setAttribute(arguments[0], arguments[1]);", "href", "https://api.whatsapp.com/send?phone=" + number + "&text=" + msg.replace('\n', '%0A')) time.sleep(2) driver.get(self.unique_user).find_element_by_id('sender').click() inp_xpath = "//div[@contenteditable='true']" input_box = wait.get(self.unique_user).until(EC.presence_of_element_located(( By.XPATH, inp_xpath))) time.sleep(1) input_box.send_keys(Keys.ENTER) for attachment in self.attachment_ids: try: time.sleep(1) driver.get(self.unique_user).find_element_by_css_selector("span[data-icon='clip']").click() time.sleep(1) with open("/tmp/" + attachment.datas_fname, 'wb') as tmp: tmp.write(base64.decodebytes(attachment.datas)) driver.get(self.unique_user).find_element_by_css_selector("input[type='file']").send_keys(tmp.name) wait_upload_xpath = "//div[contains(@class, '_2PHoH')]" wait_upload = wait.get(self.unique_user).until(EC.presence_of_element_located(( By.XPATH, wait_upload_xpath))) time.sleep(1) driver.get(self.unique_user).find_element_by_css_selector("span[data-icon='send-light']").click() except: msg_sent = False msg_sent = True except Exception as e: msg_sent = False
def ava(): with open("static/face.png", "wb") as fh: fh.write(base64.decodebytes(request.form['file'].split("base64,")[1].encode())) return "200"
def unzip_b64_content(cls, b64_buffer, name=None, pwd=None) -> dict: buffer = b64_buffer if cls.is_base64_encoded(b64_buffer): buffer = base64.decodebytes(b64_buffer) return cls.extract_files(buffer, name=name, pwd=pwd)
def modifyRequest(self, req, resp): """Copies cookie-supplied credentials to the basic auth fields. Returns a flag indicating what the user is trying to do with cookies: ATTEMPT_NONE, ATTEMPT_LOGIN, or ATTEMPT_RESUME. If cookie login is disabled for this request, raises CookieCrumblerDisabled. """ if not isinstance(req, HTTPRequest) or \ req['REQUEST_METHOD'] not in ('HEAD', 'GET', 'PUT', 'POST') or \ 'WEBDAV_SOURCE_PORT' in req.environ: raise CookieCrumblerDisabled # attempt may contain information about an earlier attempt to # authenticate using a higher-up cookie crumbler within the # same request. attempt = getattr(req, '_cookie_auth', ATTEMPT_NONE) if attempt == ATTEMPT_NONE: if req._auth: # An auth header was provided and no cookie crumbler # created it. The user must be using basic auth. raise CookieCrumblerDisabled if self.pw_cookie in req and self.name_cookie in req: # Attempt to log in and set cookies. attempt = ATTEMPT_LOGIN name = req[self.name_cookie] pw = req[self.pw_cookie] if six.PY2: ac = base64.encodestring('%s:%s' % (name, pw)).rstrip() else: ac = base64.encodebytes( ('%s:%s' % (name, pw)).encode()).rstrip().decode() self._setAuthHeader(ac, req, resp) if req.get(self.persist_cookie, 0): # Persist the user name (but not the pw or session) expires = (DateTime() + 365).toZone('GMT').rfc822() resp.setCookie(self.name_cookie, name, path=self.getCookiePath(), expires=expires) else: # Expire the user name resp.expireCookie(self.name_cookie, path=self.getCookiePath()) method = self.getCookieMethod('setAuthCookie', self.defaultSetAuthCookie) method(resp, self.auth_cookie, quote(ac)) self.delRequestVar(req, self.name_cookie) self.delRequestVar(req, self.pw_cookie) elif self.auth_cookie in req: # Attempt to resume a session if the cookie is valid. # Copy __ac to the auth header. ac = unquote(req[self.auth_cookie]) if ac and ac != 'deleted': try: if six.PY2: base64.decodestring(ac) else: base64.decodebytes(ac.encode()) except Exception: # Not a valid auth header. pass else: attempt = ATTEMPT_RESUME self._setAuthHeader(ac, req, resp) self.delRequestVar(req, self.auth_cookie) method = self.getCookieMethod('twiddleAuthCookie', None) if method is not None: method(resp, self.auth_cookie, quote(ac)) req._cookie_auth = attempt return attempt
def import_attendance_apply(self): hr_attendance_obj = self.env['hr.attendance'] ir_model_fields_obj = self.env['ir.model.fields'] # perform import lead if self and self.file: # For CSV if self.import_type == 'csv': counter = 1 skipped_line_no = {} row_field_dic = {} row_field_error_dic = {} try: file = str(base64.decodebytes(self.file).decode('utf-8')) myreader = csv.reader(file.splitlines()) skip_header = True for row in myreader: try: if skip_header: skip_header = False for i in range(3, len(row)): name_field = row[i] name_m2o = False if '@' in row[i]: list_field_str = name_field.split('@') name_field = list_field_str[0] name_m2o = list_field_str[1] search_field = ir_model_fields_obj.sudo( ).search([ ("model", "=", "hr.attendance"), ("name", "=", name_field), ("store", "=", True), ], limit=1) if search_field: field_dic = { 'name': name_field, 'ttype': search_field.ttype, 'required': search_field.required, 'name_m2o': name_m2o } row_field_dic.update({i: field_dic}) else: row_field_error_dic.update( {row[i]: " - field not found"}) counter = counter + 1 continue if row_field_error_dic: res = self.show_success_msg( 0, row_field_error_dic) return res vals = {} if self.attendance_by == 'badge': badge = False if row[0] != '': badge = self.env['hr.employee'].sudo( ).search([('barcode', '=', row[0])], limit=1) if badge: badge = badge.id else: skipped_line_no[str( counter)] = " - Badge not found. " counter = counter + 1 continue check_in_time = None if row[1] != '': if row[1]: check_in_time = row[1] else: skipped_line_no[str( counter )] = " - Check in Date and Time not found. " counter = counter + 1 continue check_out_time = None if row[2] != '': if row[2]: check_out_time = row[2] else: skipped_line_no[str( counter )] = " - Check out Date and Time not found. " counter = counter + 1 continue vals.update({ 'employee_id': badge, 'check_in': check_in_time, 'check_out': check_out_time, }) elif self.attendance_by == 'employee_id': employee_id = False if row[0] != '': employee_id = self.env['hr.employee'].sudo( ).search([('id', '=', int(row[0]))], limit=1) if employee_id: employee_id = employee_id.id else: skipped_line_no[ str(counter )] = " - Employee not found. " counter = counter + 1 continue check_in_time = None if row[1] != '': if row[1]: check_in_time = row[1] else: skipped_line_no[str( counter )] = " - Check in Date and Time not found. " counter = counter + 1 continue check_out_time = None if row[2] != '': if row[2]: check_out_time = row[2] else: skipped_line_no[str( counter )] = " - Check out Date and Time not found. " counter = counter + 1 continue vals.update({ 'employee_id': employee_id, 'check_in': check_in_time, 'check_out': check_out_time, }) is_any_error_in_dynamic_field = False for k_row_index, v_field_dic in row_field_dic.items( ): field_name = v_field_dic.get("name") field_ttype = v_field_dic.get("ttype") field_value = row[k_row_index] field_required = v_field_dic.get("required") field_name_m2o = v_field_dic.get("name_m2o") dic = self.validate_field_value( field_name, field_ttype, field_value, field_required, field_name_m2o) if dic.get("error", False): skipped_line_no[str(counter)] = dic.get( "error") is_any_error_in_dynamic_field = True break else: vals.update(dic) if is_any_error_in_dynamic_field: counter = counter + 1 continue hr_attendance_obj.create(vals) counter = counter + 1 except Exception as e: skipped_line_no[str( counter)] = " - Value is not valid " + ustr(e) counter = counter + 1 continue except Exception: raise UserError( _("Sorry, Your csv file does not match with our format" )) if counter > 1: completed_attendance = (counter - len(skipped_line_no)) - 2 res = self.show_success_msg(completed_attendance, skipped_line_no) return res # # For Excel if self.import_type == 'excel': counter = 1 skipped_line_no = {} row_field_dic = {} row_field_error_dic = {} try: wb = xlrd.open_workbook( file_contents=base64.decodebytes(self.file)) sheet = wb.sheet_by_index(0) skip_header = True for row in range(sheet.nrows): try: if skip_header: skip_header = False for i in range(3, sheet.ncols): name_field = sheet.cell(row, i).value name_m2o = False if '@' in sheet.cell(row, i).value: list_field_str = name_field.split('@') name_field = list_field_str[0] name_m2o = list_field_str[1] search_field = ir_model_fields_obj.sudo( ).search([ ("model", "=", "hr.attendance"), ("name", "=", name_field), ("store", "=", True), ], limit=1) if search_field: field_dic = { 'name': name_field, 'ttype': search_field.ttype, 'required': search_field.required, 'name_m2o': name_m2o } row_field_dic.update({i: field_dic}) else: row_field_error_dic.update({ sheet.cell(row, i).value: " - field not found" }) counter = counter + 1 continue if row_field_error_dic: res = self.show_success_msg( 0, row_field_error_dic) return res if self.attendance_by == 'badge': badge = False if sheet.cell(row, 0).value != '': badge_int = int(sheet.cell(row, 0).value) badge_str = str(badge_int) badge = self.env['hr.employee'].search( [('barcode', '=', badge_str)], limit=1) if badge: badge = badge.id else: skipped_line_no[str( counter)] = " - Badge not found. " counter = counter + 1 continue check_in_time = None if sheet.cell(row, 1).value != '': float_date_time = sheet.cell(row, 1).value seconds = (float_date_time - 25569) * 86400.0 check_in_time = datetime.datetime.utcfromtimestamp( seconds) else: skipped_line_no[str( counter )] = " - Check in Date and Time not found. " counter = counter + 1 continue check_out_time = None if sheet.cell(row, 2).value != '': float_date_time = sheet.cell(row, 2).value seconds = (float_date_time - 25569) * 86400.0 check_out_time = datetime.datetime.utcfromtimestamp( seconds) else: skipped_line_no[str( counter )] = " - Check out Date and Time not found. " counter = counter + 1 continue vals = { 'employee_id': badge, 'check_in': check_in_time, 'check_out': check_out_time, } is_any_error_in_dynamic_field = False for k_row_index, v_field_dic in row_field_dic.items( ): field_name = v_field_dic.get("name") field_ttype = v_field_dic.get("ttype") field_value = sheet.cell(row, k_row_index).value field_required = v_field_dic.get( "required") field_name_m2o = v_field_dic.get( "name_m2o") dic = self.validate_field_value( field_name, field_ttype, field_value, field_required, field_name_m2o) if dic.get("error", False): skipped_line_no[str( counter)] = dic.get("error") is_any_error_in_dynamic_field = True break else: vals.update(dic) if is_any_error_in_dynamic_field: counter = counter + 1 continue hr_attendance_obj.create(vals) counter = counter + 1 elif self.attendance_by == 'employee_id': employee_id = False if sheet.cell(row, 0).value != '': employee_int = int( sheet.cell(row, 0).value) employee_id = self.env[ 'hr.employee'].search( [('id', '=', employee_int)], limit=1) if employee_id: employee_id = employee_id.id else: skipped_line_no[ str(counter )] = " - Employee not found. " counter = counter + 1 continue check_in_time = None if sheet.cell(row, 1).value != '': float_date_time = sheet.cell(row, 1).value seconds = (float_date_time - 25569) * 86400.0 check_in_time = datetime.datetime.utcfromtimestamp( seconds) else: skipped_line_no[str( counter )] = " - Check in Date and Time not found. " counter = counter + 1 continue check_out_time = None if sheet.cell(row, 2).value != '': float_date_time = sheet.cell(row, 2).value seconds = (float_date_time - 25569) * 86400.0 check_out_time = datetime.datetime.utcfromtimestamp( seconds) else: skipped_line_no[str( counter )] = " - Check out Date and Time not found. " counter = counter + 1 continue vals = { 'employee_id': employee_id, 'check_in': check_in_time, 'check_out': check_out_time, } is_any_error_in_dynamic_field = False for k_row_index, v_field_dic in row_field_dic.items( ): field_name = v_field_dic.get("name") field_ttype = v_field_dic.get("ttype") field_value = sheet.cell(row, k_row_index).value field_required = v_field_dic.get( "required") field_name_m2o = v_field_dic.get( "name_m2o") dic = self.validate_field_value( field_name, field_ttype, field_value, field_required, field_name_m2o) if dic.get("error", False): skipped_line_no[str( counter)] = dic.get("error") is_any_error_in_dynamic_field = True break else: vals.update(dic) if is_any_error_in_dynamic_field: counter = counter + 1 continue hr_attendance_obj.create(vals) counter = counter + 1 except Exception as e: skipped_line_no[str( counter)] = " - Value is not valid " + ustr(e) counter = counter + 1 continue except Exception: raise UserError( _("Sorry, Your excel file does not match with our format" )) if counter > 1: completed_attendance = (counter - len(skipped_line_no)) - 2 res = self.show_success_msg(completed_attendance, skipped_line_no) return res
def send_png_data_to_slack(self, team_id, channel, target, png_data): png_file = Files.temp_file('.png') with open(png_file, "wb") as fh: fh.write(base64.decodebytes(png_data.encode())) return self.send_png_file_to_slack(team_id, channel, target, png_file)
def decode(self, session_data): """decodes the data to get back the session dict """ pickled = decodebytes(session_data) return pickle.loads(pickled)