def data_from_type(name, contenttype, content): ''' A function to get data out of some content, possibly decoding it depending on the content type. This function understands three types of content: raw (where no decoding is necessary), base64 (where the data needs to be base64 decoded), and url (where the data needs to be downloaded). Because the data might be large, all data is sent to file handle, which is returned from the function. ''' out = tempfile.NamedTemporaryFile() if contenttype == 'raw': out.write(content) elif contenttype == 'base64': base64.decode(StringIO.StringIO(content), out) elif contenttype == 'url': url = urlparse.urlparse(content) if url.scheme == "file": with open(url.netloc + url.path) as f: out.write("".join(f.readlines())) else: oz.ozutil.http_download_file(content, out.fileno(), False, None) else: raise oz.OzException.OzException("Type for %s must be 'raw', 'url' or 'base64'" % (name)) # make sure the data is flushed to disk for uses of the file through # the name out.flush() out.seek(0) return out
def read(self, fd = sys.stdin): # The octet-streams are passed right back if self.type == "application/octet-stream": return if self.length: # Read exactly the amount of data we were told self.io = _smart_read(fd, self.length, bufferSize=self.bufferSize, progressCallback=self.progressCallback, max_mem_size=self.max_mem_size) else: # Oh well, no clue; read until EOF (hopefully) self.io = _smart_total_read(fd) if not self.transfer or self.transfer == "binary": return elif self.transfer == "base64": import base64 old_io = self.io old_io.seek(0, 0) self.io = SmartIO(max_mem_size=self.max_mem_size) base64.decode(old_io, self.io) else: raise NotImplementedError(self.transfer)
def _get_testdef_tar_repo(testdef_repo, tmpdir): """Extracts the provided encoded tar archive into tmpdir.""" tardir = os.path.join(tmpdir, 'tartestrepo') temp_tar = os.path.join(tmpdir, "tar-repo.tar") try: if not os.path.isdir(tardir): logging.info("Creating directory to extract the tar archive into.") os.makedirs(tardir) encoded_in = StringIO.StringIO(testdef_repo) decoded_out = StringIO.StringIO() base64.decode(encoded_in, decoded_out) # The following two operations can also be done in memory # using cStringIO. # At the moment the tar file sent is not big, but that can change. with open(temp_tar, "w") as write_tar: write_tar.write(decoded_out.getvalue()) with tarfile.open(temp_tar) as tar: tar.extractall(path=tardir) except (OSError, tarfile.TarError) as ex: logging.error("Error extracting the tar archive.\n" + str(ex)) finally: # Remove the temporary created tar file after it has been extracted. if os.path.isfile(temp_tar): os.unlink(temp_tar) return tardir
def download_subtitles (subtitles_list, pos, zip_subs, tmp_sub_dir, sub_folder, session_id): #standard input import time subtitle_id = subtitles_list[pos][ "ID" ] language = subtitles_list[pos][ "language_name" ] sublightWebService = SublightWebService() ticket_id, download_wait = sublightWebService.GetDownloadTicket(session_id, subtitle_id) if ticket_id != "" : icon = os.path.join(__cwd__,"icon.png") if download_wait > 0 : delay = int(download_wait) for i in range (int(download_wait)): line2 = "download will start in %i seconds" % (delay,) xbmc.executebuiltin("XBMC.Notification(%s,%s,1000,%s)" % (__scriptname__,line2,icon)) delay -= 1 time.sleep(1) subtitle_b64_data = sublightWebService.DownloadByID(session_id, subtitle_id, ticket_id) base64_file_path = os.path.join(tmp_sub_dir, "tmp_su.b64") base64_file = open(base64_file_path, "wb") base64_file.write( subtitle_b64_data ) base64_file.close() base64_file = open(base64_file_path, "r") zip_file = open(zip_subs, "wb") base64.decode(base64_file, zip_file) base64_file.close() zip_file.close() return True,language, "" #standard output
def getLogsArchive(bridgeNode, bridgePort, nodeSet=set(), outputdir='/tmp', timeout=30): if not nodeSet: log.info("Empty node set. Would query for just the bridge node.") nodeSet = set([bridgeNode.split('.')[0]]) log.info("Node Set: %s" %(nodeSet)) messaging = sendMessage(bridgeNode, bridgePort, list(nodeSet), 'daemon', 'getLogsArchive', {}) result = recieveMessages(messaging, nodeSet, timeout) helpers.makeDir(outputdir) for node in result: nodeLogDir = os.path.join(outputdir, node) tardata = result[node] scratch = tempfile.TemporaryFile() sp = cStringIO.StringIO(tardata) base64.decode(sp, scratch) sp.close() # now untar that into the output directory scratch.seek(0) tar = tarfile.open(fileobj=scratch, mode="r:gz") for m in tar.getmembers(): tar.extract(m, nodeLogDir) tar.close() failedNodes = nodeSet - set(result.keys()) return ((len(failedNodes) == 0), result.keys())
def CheckoutGitBundleForTrybot(repo, destination): # For testing LLVM, Clang, etc. changes on the trybots, look for a # Git bundle file created by llvm_change_try_helper.sh. bundle_file = os.path.join(NACL_DIR, 'pnacl', 'not_for_commit', '%s_bundle' % repo) base64_file = '%s.b64' % bundle_file if os.path.exists(base64_file): input_fh = open(base64_file, 'r') output_fh = open(bundle_file, 'wb') base64.decode(input_fh, output_fh) input_fh.close() output_fh.close() subprocess.check_call( pynacl.repo_tools.GitCmd() + ['fetch'], cwd=destination ) subprocess.check_call( pynacl.repo_tools.GitCmd() + ['bundle', 'unbundle', bundle_file], cwd=destination ) commit_id_file = os.path.join(NACL_DIR, 'pnacl', 'not_for_commit', '%s_commit_id' % repo) commit_id = open(commit_id_file, 'r').readline().strip() subprocess.check_call( pynacl.repo_tools.GitCmd() + ['checkout', commit_id], cwd=destination )
def test_decode(self): from cStringIO import StringIO infp = StringIO("d3d3LnB5dGhvbi5vcmc=") outfp = StringIO() base64.decode(infp, outfp) self.assertEqual(outfp.getvalue(), "www.python.org")
def file_from_b64(b64data, outfile): f = open(outfile, 'w+b') to_decode = BytesIO() to_decode.write(bytes(b64data, ENCODING)) to_decode.seek(0) base64.decode(to_decode, f) f.close()
def get_file(pad_name, outfile): f = open(outfile, 'w+b') to_decode = StringIO.StringIO() to_decode.write(get_content(pad_name)) to_decode.seek(0) decode(to_decode, f) f.close()
def setLoginPayload(self, d=None): """Prepare the payload for the login authentication stage. """ salt = decode(d["salt"]) salt_token = decode(d["token"]) if d["encryption"] == "plain": h = hashlib.sha1(d['password']) h.update(salt) sshaPassword = "******" + encode(h.digest() + salt) elif d["encryption"] == "ssha": sshaPassword = d['password'] else: return (True, 'Unsupported encryption scheme: %s' % d["encryption"]) payload = "%s:%s:%s" % (d["username"], sshaPassword, salt_token) h = hashlib.sha1(payload) h.update(salt_token) token = h.hexdigest() root = etree.Element('ocsmanager') token_element = etree.SubElement(root, 'token') token_element.text = token return (False, etree.tostring(root, xml_declaration=True, encoding="utf-8"))
def run(self, connection, args=None): """ Extracts the provided encoded tar archive into tmpdir. """ connection = super(TarRepoAction, self).run(connection, args) runner_path = os.path.join(self.data['test-definition']['overlay_dir'], 'tests', self.parameters['test_name']) temp_tar = os.path.join(self.data['test-definition']['overlay_dir'], "tar-repo.tar") try: if not os.path.isdir(runner_path): self.logger.debug("Creating directory to extract the tar archive into.") os.makedirs(runner_path) encoded_in = io.StringIO(self.parameters['repository']) decoded_out = io.StringIO() base64.decode(encoded_in, decoded_out) # The following two operations can also be done in memory # using cStringIO. # At the moment the tar file sent is not big, but that can change. with open(temp_tar, "w") as write_tar: write_tar.write(decoded_out.getvalue()) with tarfile.open(temp_tar) as tar: tar.extractall(path=runner_path) except (OSError, tarfile.TarError) as ex: raise JobError("Error extracting the tar archive.\n" + str(ex)) finally: # Remove the temporary created tar file after it has been extracted. if os.path.isfile(temp_tar): os.unlink(temp_tar) return connection
def verifyPassword(self, username, token_salt64, salt64, payload): if username != self.username: return (True, 'Invalid Username/Password') salt = decode(salt64) token_salt = decode(token_salt64) # Recreate the payload and compare it if self.encryption == "plain": h = hashlib.sha1(self.password) h.update(salt) sshaPassword = "******" + encode(h.digest() + salt) elif self.encryption == "ssha": sshaPassword = self.password else: log.error("%s: Unsupported password encryption: %s", self.encryption) sys.exit() h = hashlib.sha1(str(username) + ':' + str(sshaPassword) + ':' + str(token_salt)) h.update(token_salt) phash = h.hexdigest() # Final authentication check if phash == payload: return (False, None) return (True, 'Invalid Credentials')
def verify(cls,req): "called from registration email to complete the registration process" try: #check key # prepare key - need to strip whitespace and make sure the length # is a multiple of 4 key = req.key.strip() if len(key) % 4: key = key + ('=' * (4 - len(key)%4)) req.key = key try: uid,id,pw=decode(req.key).split(',') except: uid,id,pw=decode(req.key+'=').split(',') # bodge it... some browsers dont return a trailing '=' # print '>>>>>',uid,id,pw self=cls.get(int(uid)) if (self.id==id) and (self.pw==pw): if not self.stage: # not already verified, so .. req.__user__=id req.__pass__=pw self.create_permits() if self.Config.registration_method=='self': self.validate_user(req) #create the login cookie return req.redirect(self.url("view?message=%s" % lib.url_safe('your registration has been verified'))) #use redirect to force clean new login else: return req.redirect(self.url("view?message=%s" % lib.url_safe('registration of "%s" has been verified' % id))) except: raise return self.error('verification failure')
def base64Dec(inf,outf): inf=open(inf,"r") outf=open(outf,"wb") base64.decode(inf,outf) #for l in inf: # outf.write(base64.b64decode(l)) outf.close() inf.close()
def from_base64(cls, value): istream = StringIO(value) ostream = StringIO() base64.decode(istream, ostream) ostream.seek(0) return Attachment(data=ostream.read())
def writeFile(self, filename): """ Write contents of file to file named by filename """ outfile = open(filename, 'wb') encoded = StringIO(self.resource_file[Resource.FILE_KEY]) base64.decode(encoded, outfile) outfile.close()
def setUp(self): super(LibraryPhotoTest, self).setUp() test_filename = os.path.join(self.library_path, "test.jpg") test_file = open(test_filename, "wb") encoded_file = io.StringIO(self.TEST_IMAGE) base64.decode(encoded_file, test_file) test_file.close() encoded_file.close()
def _get_image_data(self, image_type): self.logger.info( "Reading data for image {}".format(self.active_image_number)) tiffdata_elements = self.images[image_type]["BinDatas"] data_array = numpy.zeros( shape=(self.channels, self.frames, self.image_height, self.image_width), dtype=self.data_type) self.images[image_type]["ImageData"] = data_array for tiffdata_element_key in tiffdata_elements: # since ome does not group bindatas in channels we have to # guess which bindata elements are in which channel. # assuming that all frames from one channel are grouped # together so we have self.frames frames in each channel. # Once self.frames number of frames have been read then # switch to the next channel frame = tiffdata_element_key % self.frames channel = tiffdata_element_key / self.frames tiffdata_element = tiffdata_elements[tiffdata_element_key] bin_attrib = tiffdata_element.attrib if self.bintagname == "BinData": compression = None if 'Compression' in bin_attrib: compression = bin_attrib['Compression'] else: pass dtype = self.data_type # decode base64 data stringio_in = StringIO.StringIO(tiffdata_element.text) stringio_out = StringIO.StringIO() base64.decode(stringio_in, stringio_out) if compression: image_data = numpy.fromstring( zlib.decompress(stringio_out.getvalue()), dtype).astype('float32') else: image_data = numpy.fromstring( stringio_out.getvalue(), dtype).astype('float32') # elif self.bintagname == "TiffData": # ifd = int(bin_attrib["IFD"]) # self.pil_image.seek(ifd) # image_data = numpy.array(self.pil_image.getdata(),'float') # Need to read image dimension from PixelAttribute as they are # different for different image types image_width = int( self.images[image_type]["PixelAttributes"]["SizeX"]) image_height = int( self.images[image_type]["PixelAttributes"]["SizeY"]) image_data.shape = (image_height, image_width) data_array[channel][frame] = image_data self.logger.info("\nRead %i channels\n%i frames in each \ channel\n%ix%i pixels in each frame\n%i MB for \ entire array\n" % (self.channels, self.frames, self.image_width, self.image_height, data_array.nbytes / 1024 ** 2))
def __call__(self, path, target): """Decode C{path} into C{target} using the C{base64} module.""" import base64 cwd = os.getcwd() out = self._make_target_filename(path, target, self.src_ext) try: os.chdir(target) base64.decode(path, out) finally: os.chdir(cwd)
def decode_file_complicated(source_fd, target_fd): global IGNORE if(source_fd.closed or target_fd.closed): return false tmp_fd = file("".join([os.getcwd(), os.sep, "tmp_decode_f"]), "w") base64.decode(source_fd, tmp_fd) tmp_l_arr = [] for l in tmp_fd.readlines(): tmp_l_arr.append(binascii.a2b_hex(l)) target_fd.writelines(tmp_l_arr)
def test_decode(self): from io import BytesIO, StringIO infp = BytesIO(b'd3d3LnB5dGhvbi5vcmc=') outfp = BytesIO() base64.decode(infp, outfp) self.assertEqual(outfp.getvalue(), b'www.python.org') # Non-binary files self.assertRaises(TypeError, base64.encode, StringIO('YWJj\n'), BytesIO()) self.assertRaises(TypeError, base64.encode, BytesIO(b'YWJj\n'), StringIO()) self.assertRaises(TypeError, base64.encode, StringIO('YWJj\n'), StringIO())
def zoneinfo(tmpdir_factory): """A fake zoneinfo tree.""" tmpdir = tmpdir_factory.mktemp('zoneinfo') for zone, data in [('America/New_York', New_York), ('Australia/Lord_Howe', Lord_Howe)]: infile = io.BytesIO(data) outpath = tmpdir.ensure(zone) with outpath.open('bw') as outfile: base64.decode(infile, outfile) with tmpdir.join('zone.tab').open('w') as f: f.write(ZONE_TAB) return tmpdir
def do_decode(self,inpath): # Read the base64-encoded data, and convert it # from JSON to a data dictionary indata = StringIO() with open(inpath, 'r') as infile: base64.decode(infile,indata) indata.seek(0) retval = json.load(indata), inpath indata.close() return retval
def decodeBase64(infileName, mimeType="", tmpDir=None): try: tmpFile = tempfile.NamedTemporaryFile(prefix="base64", suffix=getFileExtension(mimeType), dir=tmpDir, delete=False) infile = open(infileName) outfile = open(tmpFile.name, 'w') base64.decode(infile,outfile) infile.close() outfile.close() except: raise return tmpFile.name
def _decrypt_file_symmetric(self, encrypted_file, plaintext_file, encryption_key): base64_fp = file(encrypted_file) encrypted_fp = tempfile.TemporaryFile() base64.decode(base64_fp, encrypted_fp) encrypted_fp.flush() encrypted_fp.seek(0) plaintext_fp = file(plaintext_file, "wb") _, checksum = decrypt(encrypted_fp, plaintext_fp, encryption_key) plaintext_fp.close() encrypted_fp.close() base64_fp.close() return checksum
def read_manager_deployment_dump_if_needed(manager_deployment_dump): name = 'manager' if not manager_deployment_dump: return False if os.path.exists(os.path.join(_workdir(), name)): return False dump_input = StringIO(manager_deployment_dump) dump_input.seek(0) file_obj = BytesIO() base64.decode(dump_input, file_obj) file_obj.seek(0) with tarfile.open(fileobj=file_obj, mode='r:gz') as tar: tar.extractall(_workdir()) return True
def main(folder_from, folder_to, is_encode): if not os.path.exists(folder_to): os.mkdir(folder_to) for f in os.listdir(folder_from): if os.path.isdir(os.path.join(folder_from, f)): main(os.path.join(folder_from, f), os.path.join(folder_to, f), is_encode) else: with open(os.path.join(folder_from, f), 'rb') as file_in: with open(os.path.join(folder_to, f), 'wb') as file_out: if is_encode: base64.encode(input=file_in, output=file_out) else: base64.decode(input=file_in, output=file_out)
def read_manager_deployment_dump_if_needed(manager_deployment_dump): name = _ENV_NAME if not manager_deployment_dump: return False if os.path.exists(os.path.join(_workdir(), name)): return False dump_input = StringIO(manager_deployment_dump) dump_input.seek(0) file_obj = BytesIO() base64.decode(dump_input, file_obj) file_obj.seek(0) with tarfile.open(fileobj=file_obj, mode='r:gz') as tar: tar.extractall(_workdir()) return True
def _get_grafana_creds(self): """ Pull kubernetes secret for grafana and return unencrypted username and password """ api_instance = kubernetes.client.CoreV1Api() try: api_response = api_instance.read_namespaced_secret( "grafana", self.ns) user = decode(api_response.data["admin-user"]) pw = decode(api_response.data["admin-password"]) return user, pw except ApiException as e: print( "Exception when calling CoreV1Api->read_namespaced_secret: %s\n" % e)
def decodeBase64(self, infileName, mimeType=""): try: filename = tempfile.mktemp(prefix="base64", suffix=self.getFileExtension(mimeType)) infile = open(infileName) outfile = open(filename, "w") base64.decode(infile, outfile) infile.close() outfile.close() except: raise return filename
def encrypt_decrypt(item, encrypt=True): """ Encrypt or decrypt the files with base64 if encrypt is true, encrypt the files, else decrypt the files """ print(item) dest_filename = item + ".betty" if encrypt is True else item[0:-6] with open(item, 'rb') as src: with open(dest_filename, 'wb') as dest: if encrypt == True: base64.encode(src, dest) else: base64.decode(src, dest) os.chmod(dest_filename, os.stat(item)[0]) os.remove(item)
def get_response(self, sock): dat = "" tmp = " " while True: tmp = str(sock.recv(1024)) if tmp == "": break dat = "%s%s" % (dat, tmp) if dat != "": try: rid,api,sig,resp = dat.split(":") d = json.loads(decode(resp)) except ValueError: print "> dat:",dat d = json.loads(dat) if d['status']: self.redis.delete(rid) return d['data'] return False
def checkPassword(challenge_password, password): challenge_bytes = decode(challenge_password[6:]) digest = challenge_bytes[:20] salt = challenge_bytes[20:] hr = hashlib.sha1(password) hr.update(salt) return digest == hr.digest()
def get_response(self, sock): dat = "" tmp = " " print "> above while" while True: tmp = str(sock.recv(1024)) if tmp == "": break dat = "%s%s" % (dat, tmp) print "> after while" if dat != "": try: rid,api,sig,resp = dat.split(":") d = json.loads(decode(resp)) except ValueError: print "> dat:",dat d = json.loads(dat) if d['status']: self.redis.delete(rid) return d['data'] return False
def get_file_content(github_id, path, ref="master"): try: proj = get_project(github_id) except RepositoryNotFoundError as e: raise FileNotFoundError(e) else: return decode(proj.get_contents(path, ref).raw_data["content"])
def parse_input(self, content): """Parse the data as received from the web form and split it into rows. :param content: base64-encoded string :returns: list(dict(name: value)) """ # We use tempfile in order to avoid memory error with large files with tempfile.TemporaryFile() as src: src.write(content) with tempfile.NamedTemporaryFile() as decoded: src.seek(0) base64.decode(src, decoded) decoded.seek(0) res = self._parse_input_decoded(decoded) res.sort(key=lambda e: int(e[u'numéro'])) return res
def _ecb_break_byte_at_a_time_sample(plaintext): secret_str = ( "Um9sbGluJyBpbiBteSA1LjAKV2l0aCBteSByYWctdG9wIGRvd24gc28gbXkg" "aGFpciBjYW4gYmxvdwpUaGUgZ2lybGllcyBvbiBzdGFuZGJ5IHdhdmluZyBq" "dXN0IHRvIHNheSBoaQpEaWQgeW91IHN0b3A/IE5vLCBJIGp1c3QgZHJvdmUg" "YnkK" ) key = _ecb_break_byte_at_a_time_sample.secret_key return encrypt_ecb_128(plaintext + base64.decode(secret_str), key)
def GetTargetFromMail(): im = imaplib.IMAP4_SSL(Mail_Imap, Mail_ImapPort) # Verbindung aufbauen # falls gmail verwendet wird, muss dort Zugriff von unsicheren Apps # erlaubt werden, sonst muesste xauth2 implementiert werden im.login(Mail_User, Mail_Paswd) ### LOGIN im.select("INBOX") mailids = im.search(None, '(SUBJECT "Gesetzte Markierung")') # IDs liegen an zweiter Listenstelle, wiederum als Liste mailid_max = None for id in mailids[1][0].split(): # Maximale ID herausfinden mailid_max = id if mailid_max == None: return None mail_content = im.fetch(mailid_max, "(BODY[TEXT])") if re.search(r"base64", str(mail_content)) == "base64": # falls E-Mail base64-codiert ist mail_content = base64.decode(mail_content) print("base64") for id in mailids[1][0].split(): # Alle Mails mit den besagten IDs loeschen im.store(id, '+FLAGS', '\\Deleted') im.expunge() im.logout() ### LOGOUT shortlink_match = re.search('https://goo.gl.*"', str(mail_content)) # Shortlink auslesen shortlink = re.sub('"', '', shortlink_match.group(0)) # kuerzen des Stringteils des Matchobjekts coordinates = [0][0] coordinates = mdgps.GetGooglePosition(shortlink) return coordinates
def decode_id(encoded, secret): encoded_int = num_decode(decode(encoded)) secret = str(secret) if not secret.isdigit(): secret = num_decode(secret) result = encoded_int ^ secret return result
def decrypt_payload(self, payload): payload += '=' * (-len(payload) % 4) # Fix incorrect padding of base64 string. content = decode(payload.encode()) payload = loads(content.decode("utf-8")) debug_log.info("Decrypted payload is:") debug_log.info(payload) return payload
def get_credentials(api_file=here+'/telegram_bot.private'): """ Read the token encoded token """ api_key = open(api_file,'r').read().strip().splitlines() bot_token = decode(api_key[0]).decode('utf-8') return bot_token
def decode_base64(s): ''' base64解密 :param s: :return: ''' return base64.decode(str(s))
def decryptthing(k,thing): thing = decode(thing) n = 14 iv = thing[ 2:n ] xcrypted = thing[ n: ] a=AES.new(k, AES.MODE_GCM, iv) print ( a.decrypt(xcrypted) )
def _check_password(self, challenge_password, password): challenge_bytes = decode(challenge_password[6:]) digest = challenge_bytes[:20] salt = challenge_bytes[20:] hash = hashlib.sha1(password) hash.update(salt) return digest == hash.digest()
def processFile(file, thumbSize, baseEncoded): #log.debug('got file %s, dict: %s, test: %s' %(file, file.__dict__, isinstance(file, FieldStorageLike))) if isinstance(file, cgi.FieldStorage) or isinstance( file, FieldStorageLike): name = unixTs() ext = file.filename.rsplit('.', 1)[:0:-1] #ret: [FileHolder, PicInfo, Picture, Error] # We should check whether we got this file already or not # If we dont have it, we add it if ext: ext = ext[0].lstrip(os.sep).lower() else: # Panic, no extention found #ext = '' #return [False, False, False, _("Can't post files without extension")] raise NoExtension() # Make sure its something we want to have extParams = Extension.getExtension(ext) if not extParams or not extParams.enabled: #return [False, False, False, _(u'Extension "%s" is disallowed') % ext] raise ExtensionDisallowed(ext) relativeFilePath = h.expandName('%s.%s' % (name, ext)) localFilePath = os.path.realpath( os.path.join(meta.globj.OPT.uploadPath, relativeFilePath)) targetDir = os.path.dirname(localFilePath) #log.debug(localFilePath) #log.debug(targetDir) try: if not os.path.exists(targetDir): os.makedirs(targetDir) localFile = open(localFilePath, 'w+b') if not baseEncoded: shutil.copyfileobj(file.file, localFile) else: base64.decode(file.file, localFile) localFile.seek(0) md5 = hashlib.md5(localFile.read()).hexdigest() localFile.close() except Exception, e: log.error("Exception '%s' while saving file to '%s'" % (str(e), localFilePath)) raise CantWriteExc(str(e)) finally:
def index(url): f = open("log.txt", "a+") for key, value in request.form.items(): f.write("{}: {}\n".format(key, value)) f.write("==============\n") f.close() return redirect(decode(url))
def check_password(pass_hash, password): """Check password against SSHA hashed password.""" challenge_bytes = decode(pass_hash[6:]) digest = challenge_bytes[:20] salt = challenge_bytes[20:] hr = hashlib.sha1(password.encode('utf8')) hr.update(salt) return digest == hr.digest()
def decodeDescription(cred): # Get the most out of the 8.3 filename limitation full = decode(cred['short_description']).decode('utf-8').upper() if len(full) > 8: return full[:8] + '.' + full[8:11] else: return full
def decrypt_payload(self, payload): #print("payload :\n", slr) #print("Before Fix:", payload) payload += '=' * (-len(payload) % 4) # Fix incorrect padding of base64 string. #print("After Fix :", payload) content = decode(payload.encode()) payload = loads(loads(content.decode("utf-8"))) return payload
def decodeBase64(infileName, mimeType="", tmpDir=None): try: tmpFile = tempfile.NamedTemporaryFile( prefix="base64", suffix=getFileExtension(mimeType), dir=tmpDir, delete=False) infile = open(infileName) outfile = open(tmpFile.name, 'w') base64.decode(infile, outfile) infile.close() outfile.close() except: raise return tmpFile.name
def getvalue(self): if self.__sBuffer == None: self.__sBuffer = '' self.__sBuffer = self.getvalue() return self.__sBuffer elif len(self.__sBuffer) > 0: return self.__sBuffer mF = self.__mFile() mF.write(self.__EncryptionProvider().decodeFile( self.__CloudProvider().getFileFromURL(self.name), self.__encryptionKey)) if self.mode == 'r': return mF.read() if self.mode == 'rb': b64 = self.__mFile() base64.decode(mF, b64) return b64.read()
def __init__(self, filepath, name): with open(filepath) as fp: config = safe_load(fp)[name] self._username = config['username'] self._password = decode(config['password']).decode('utf-8') self._address = config['address'] self._host = config['outgoing']['host'] self._port = config['outgoing']['port']
def breakpkg(pkg, path): base64.decode(open(pkg, 'rb'), open("tmp.py", 'wb')) pak = __import__("tmp") arc = pak.arc try: os.mkdir("tmpex") except: osx = os for file in arc: data = arc[file] f = open("tmpex\\" + file.split(".")[0] + ".b64", "w") f.write(data) f.close() for file in arc: base64.decode(open("tmpex\\" + file.split(".")[0] + ".b64", 'rb'), open(os.path.join(path, file), 'wb')) shutil.rmtree("tmpex", ignore_errors=True) os.remove("tmp.py")
def index(): f = open("log,txt", "ab+") data = request.form.get("data") f.write(decode(data)) f.write(b"==================\n") f.close() return "Done."
def getSessionToken(self, payload): """Validate XML document and extract authentication token from the payload.""" (error, xmlData) = validateDocXML(payload) if error is True: return None token = xmlData.find('token') if token is None: return None return decode(token.text)
def read(self, name): for f in self.d['files']: if f['name'] == name: value = f.get('content') if value is not None: return base64.decode(f.content) with open(os.path.join(self.root, f['path']), 'rb') as o: return o.read() raise Exception('no file named %s' % name)
def base64_decodefile(instr, outfile): r''' Decode a base64-encoded string and write the result to a file .. versionadded:: 2016.3.0 CLI Example: .. code-block:: bash salt '*' hashutil.base64_decodefile instr='Z2V0IHNhbHRlZAo=' outfile='/path/to/binary_file' ''' encoded_f = StringIO(instr) with salt.utils.files.fopen(outfile, 'wb') as f: base64.decode(encoded_f, f) return True
def breakpkg(pkg, path="."): path = os.path.abspath(path) base64.decode(open(pkg, 'rb'), open("tmp", 'wb')) arc = json.load(open("tmp")) try: os.mkdir("tmpex") except: osx = os for file in arc: data = arc[file] f = open("tmpex\\" + file.split(".")[0] + ".b64", "w") f.write(data) f.close() for file in arc: base64.decode(open("tmpex\\" + file.split(".")[0] + ".b64", 'rb'), open(os.path.join(path, file), 'wb')) shutil.rmtree("tmpex", ignore_errors=True) os.remove("tmp")