async def img(self, ctx, *search): async with ctx.typing(): path, url = await self.bot.loop.run_in_executor( None, self.az.img, *search) if path: size_ok = os.stat(path).st_size / 1024 / 1024 <= upload_limit else: size_ok = False logger.debug('img (%s) - %s', type(path), str(path)) if not path: error = formatter.error( f'Could not find image matching: {search}') await ctx.send(error) elif type(path) != str: await ctx.send(embed=path) elif path.rpartition('.')[2] in ('zip', 'cbz'): zf = zipfile(path, 'r') for fl in zf.filelist: f = zf.open(fl.filename) await ctx.send(file=discord.File(f, fl.filename)) f.close() zf.close() elif path.rpartition('.')[2] in ('gif', 'png', 'jpg', 'jpeg') and size_ok: await ctx.send(file=discord.File(path)) else: await ctx.send(url)
def extractArchive(fileName): ext = path(fileName) ext = ''.join(ext.suffixes) if ext == ".tar.gz": tarfile.open(fileName).extractall() else: with zipfile(fileName, 'r') as archive: archive.extractall()
def unzip(zipName,bname): z = zipfile(path.realpath(zipName)) cpath = getcwd() chdir(bname) for f in z.namelist(): if f.endswith('/'): if not path.exists(f): makedirs(f) else: z.extract(f) chdir(cpath) return
def unzip(zipName, bname): z = zipfile(path.realpath(zipName)) cpath = getcwd() chdir(bname) for f in z.namelist(): if f.endswith('/'): if not path.exists(f): makedirs(f) else: z.extract(f) chdir(cpath) return
def export_stream(model, **kwargs): stream = BufferedWriter() if not isinstance(model, Course): raise NotImplementedError( 'cannot export anything else than a course model ({} provided)'. format(model.__class__.__name_)) with zipfile.zipfile(stream) as zfile: zfile.write(export_meta(model), IMSCC_MANIFEST_FILENAME) file_output(model, zfile) qtis = course_xmlqti_builder(model) # we assume it is a course model discussions = course_xmlqti_builder(model) stream.seek(0) return stream
def main(): parser = optparse.OptionParser('usage%prog ' + '-f <zipfile> -d <dictionary>') parser.add_option('-f', dest='zname', type='string', help='specify zip file') parser.add_option('-d', dest='dname', type='string', help='specify dictionary file') (options, args) = parser.parse_args() if (options.zname == None) | (options.dname == None): print(parser.usage) exit(0) else: zname = options.zname dname = options.dname zfile = zipfile.zipfile(zname) passfile = open(dname) for line in passfile.readlines(): password = line.strip('\n') t = Thread(target=extractfile(), args=(zfile, password)) t.start()
import zipfile zfile = zipfile.zipfile("archive.zip", "w") zfile.write("file1.txt") zfile.write("file2.txt") zfile.write("file3.txt") zfile.close()
import os, shutil, zipfile, sys if len(sys.argv) < 2: ver = 'beta' else: ver = '-'.join(sys.argv[1:]) if not os.path.isdir('release'): os.mkdir('release') package = zipfile.zipfile('release/release-%s.zip' % ver, 'w') package.write('LICENSE') package.write('README.md') for example in ['test.py', 'primes.py']: data = open(example, 'rb').read() data = data.replace(b'from bin import ComputeShader', b'from modules import computeshader') package.writestr(example, data) package.write('bin/computeshader.pyd', 'modules/computeshader.pyd')
print("Copying into miso..") os.system("xcopy /s /q ogiso miso") h = str( raw_input( "Do you plan using this in (b)asic mode or (c)reation mode? (B OR C)> " )) h = str(h) + "\n" con1.write(h) if h == h.lower(): print("Downloading tools..") urllib.urlretrieve("https://mgrich.github.io/storage/tools.zip", "temp/tools.zip") os.chdir("temp") print("Extracting..") from zipfile import ZipFile as zipfile zipfile('tools.zip').extractall() os.remove('tools.zip') os.chdir("..") print("Moving..") os.mkdir("tools") os.system("xcopy /s /q temp tools") os.system("rmdir /Q /S temp") os.chdir("config") con2 = open("create.conf", 'w+') print( "Done with tools.\nChanging to basic mode will not get rid of the tools." ) print("") os.chdir("..") while True: h = str(
def file_content(M): global MD5 global SHA1 global SHA256 global SH1512 global URL global sender global number global attachment_location global sampleFile_name global sql_light_file log.info('Looking for emails') rv, data = M.search(None,'All') if rv != 'OK' log.debug('No new emails have been found') return rv,data = M.fetch(number,'(RFC822)') #http://jkorpela.fi/rfc/822addr.html if rv != 'OK' log.debug('Nouthing') return log.info("searching for the most recent email") rv,data = M.fetch(number '(RFC822)') if rv != 'OK': log.debug("|Error finding the email sorry", number) return #gets email and makes it to a tring format print("making a string of the email") msg = data[0][1] raw_email_string = msg.decode('utf-8') email_message = email.message_from_string(raw_email_string) #https://docs.python.org/3/library/email.parser.html #header to stirng sender = re.findall('.*?\<(.*?)\>.*?', received) #https://docs.python.org/2/library/email.parser.html sender = sender[0] monitor.info("Email received from: %s", sender) i = datetime.now() #current date and time monitoriter.info("Todays date and time",%i) #walks over email header for attachments for part in email_message.walk(): if part.get('Content-Dispoition') is not None: if 'attachment;' in part.get('Content-Disposition'): log.info('No inline attachments') #downlaod lication for the email attachments download_dir ='/somewhere_fam' sampleFile_name = part.get_filename() attachment_location = os.path.join(download_dir,sampleFile_name) monitoriter.info("attachment Name:",%s,sampleFile_name) log.debug('Opening file content writng to the virtual machies folder',attachment_location) fp = open(attachment_location,'wb') fp.write(part.get_payload(Decode=True)) #https://docs.python.org/2/library/email.message.html fp.close() if attachment_location.endswitch((".7z")) log.info("checks if file has an attachment if so extract and paswrd it to stop detenation") zip_reff = zipfile.zipfile(attachment_location,'r') password = "******" sampleFile_name = zip_reff.namelist() #https://docs.python.org/2/library/zipfile.html sampleFile_name = sampleFile_name[0] monitor.info("failed to extract") extraction_failture() return try: zip_reff.extract(member=sampleFile_name,path =download_dir,password ="******") except RuntimeError: log.debug("Extraction has failed") extraction_failture() return zip_reff.close() attachment_location = os.path.join(download_dir,sampleFile_name) elif attachment_location.endswitch(("tar.gz")) or attachment_location.endswitch(("tar")): log.info("if the file has bene comparessed it can be passed and extracted to the user") tar = tarfile.open(attachment_location,'r:') sampleFile_name = tar.get_filename() monitor.info("Extracted sample",sampleFile_name[0]) try: zip_reff.extract(number=sampleFile_name,path=download_dir,pwd="password") except RuntimeError: log.debug("Extracting file has failed") extraction_failture() return zip_reff.close() attachment_location = os.path.join(download_dir,sampleFile_name) elif attachment_location.endswitch(("tar.gz")) or attachment_location.endswitch(("tar")): log.info("extracting the file into a tar file") tar = tarfile.open(attachment_location,"r:") sampleFile_name = tar.get_filename() monitor.info("File Extratced name %s",sampleFile_name[0]) try: tar.extractall(download_dir) except RuntimeError: log.debug("Extracting file faied") extraction_failture() tar.close() attachment_location = os.path.join(download_dir,sampleFile_name[0]) cuckoo_submission() #goes to func where it is handeled correctly return
'''Problem 11: Write a python program zip.py to create a zip file. The program should take name of zip file as first argument and files to add as rest of the arguments.''' import sys import zipfile directory = sys.argv[1] z = zipfile.zipfile(directory,'w') length = len(sys.argv) for i in range(2,length): z.write(sys.argv[i])
def main(): #Step 1 - downloading google's pre-trained neural network url = 'https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip' data_dir = '../data' model_name = os.path.split(url)[-1] local_zip_file = os.path.join(data_dir, model_name) if not os.path.exists(local_zip_file): #download model_url = urllib.request.urlopen(url) with open(local_zip_file, 'wb') as output: output.write(model_url.read()) #extract with zipfile.zipfile(local_zip_file, 'r') as zip_ref: zip_ref.extractall(data_dir) model_fn = 'tensorflow_inception_graph.pb' #Step 2 graph = tf.Graph() sess = tf.InteractiveSession(graph=graph) with tf.gfile.FastGFile(os.path.join(data_dir, model_fn), 'rb') as f: graph_def = tf.GraphDef graph_def.ParseFromString(f.read()) t_input = tf.placeholder(np.float32, name='input') #define input tensor imagenet_mean = 117.0 t_preprocessed = tf.expand_dims(t_input-imagenet_mean, 0) tf.import_graph_def(graph_def, {'input': t_preprocessed}) layers = [op.name for op in graph.get_operations() if op.type=='Conv2D' and 'import/' in op.name] feature_nums = [int(graph.get_tensor_by_name(name+':0').get_shape()[-1]) for name in layers] def render_deepdream(t_obj, img0=img_noise, iter_n = 10, step=1.5, octave_n=4, octave_scale=1.4): t_score =tf.reduce_mean(t_obj) #defining optimization objective t_grad = tf.gradients(t_score, t_input)[0] #splitting the image into a number of octaves img = img0 octaves = [] for _ in range(octave_n-1): hw = img.shape[:2] lo = resize(img, np.int32(np.float32(hw)/octave_scale)) hi = img-resize(low, hw) img = looctaves.append(hi) #generating details octave by octave for octave in range(octave_n): if octave>0: hi = octaves[-octave] img = resize(img, hi.shape[:2])+hi for _ in range(iter_n): g = calc_grad_tiled(img, t_grad) for _ in range(iter_n): g = calc_grad_tiled(img, t_grad) img += g*(step / (np.abs(g).mean()+1e-7)) #Step 5 - output deep dreamed image showarray(img/255.0) print('Number of layers', len(layers)) print('Total number of feature channels', sum(feature_nums)) #Step 3 - Picking a layer to enhance the image layer = 'mixed4d_3x3_bottleneck_pre_relu' channel = 139 img0 = PIL.Image.open('pilatus800.jpg') img0 = np.float32(img0) #Step 4 - Applying gradient ascent to that layer render_deepdream(T(layer)[:,:,:,139] img0) #Higher level layer deep dream render_deepdream(tf.square)
def file_content(M): global MD5 global SHA1 global SHA256 global SH1512 global URL global sender global number global attachment_location global sampleFile_name global sql_light_file log.info('Looking for emails') rv, data = M.search(None, 'All') if rv != 'OK' log.debug('No new emails have been found') return rv, data = M.fetch(number, '(RFC822)') # http://jkorpela.fi/rfc/822addr.html if rv != 'OK' log.debug('Nothing') return log.info("searching for the most recent email") rv, data = M.fetch(number '(RFC822)') if rv != 'OK': log.debug("|Error finding the email sorry", number) return # gets email and makes it to a tring format print("making a string of the email") msg = data[0][1] raw_email_string = msg.decode('utf-8') email_message = email.message_from_string(raw_email_string) # https://docs.python.org/3/library/email.parser.html # header to stirng sender = re.findall('.*?\<(.*?)\>.*?', received) # https://docs.python.org/2/library/email.parser.html sender = sender[0] monitor.info("Email received from: %s", sender) i = datetime.now() #current date and time monitoriter.info("Todays date and time",%i) # walks over email header for attachments for part in email_message.walk(): if part.get('Content-Dispoition') is not None: if 'attachment;' in part.get('Content-Disposition'): log.info('No inline attachments') # downlaod lication for the email attachments download_dir ='/somewhere_fam' sampleFile_name = part.get_filename() attachment_location = os.path.join(download_dir,sampleFile_name) monitoriter.info("attachment Name:",%s,sampleFile_name) log.debug('Opening file content writng to the virtual machies folder',attachment_location) fp = open(attachment_location,'wb') fp.write(part.get_payload(Decode=True)) # https://docs.python.org/2/library/email.message.html fp.close() if attachment_location.endswitch((".7z")) log.info("checks if file has an attachment if so extract and paswrd it to stop detenation") zip_reff = zipfile.zipfile(attachment_location,'r') password = "******" sampleFile_name = zip_reff.namelist() # https://docs.python.org/2/library/zipfile.html sampleFile_name = sampleFile_name[0] monitor.info("failed to extract") extraction_failture() return try: zip_reff.extract(member=sampleFile_name,path =download_dir,password ="******") except RuntimeError: log.debug("Extraction has failed") extraction_failture() return zip_reff.close() attachment_location = os.path.join(download_dir,sampleFile_name) elif attachment_location.endswitch(("tar.gz")) or attachment_location.endswitch(("tar")): log.info("if the file has bene comparessed it can be passed and extracted to the user") tar = tarfile.open(attachment_location,'r:') sampleFile_name = tar.get_filename() monitor.info("Extracted sample",sampleFile_name[0]) try: zip_reff.extract(number=sampleFile_name,path=download_dir,pwd="password") except RuntimeError: log.debug("Extracting file has failed") extraction_failture() return zip_reff.close() attachment_location = os.path.join(download_dir,sampleFile_name) elif attachment_location.endswitch(("tar.gz")) or attachment_location.endswitch(("tar")): log.info("extracting the file into a tar file") tar = tarfile.open(attachment_location,"r:") sampleFile_name = tar.get_filename() monitor.info("File Extratced name %s",sampleFile_name[0]) try: tar.extractall(download_dir) except RuntimeError: log.debug("Extracting file faied") extraction_failture() tar.close() attachment_location = os.path.join(download_dir,sampleFile_name[0]) cuckoo_submission() #goes to func where it is handeled correctly return #eats the main body of the email if part.get_get_content_type() == "text/plain": log info("makes body of the email readable") body = part.get_payload(Decode=True) #file hashes md5 = re.search(r'\b[0-9a-fA-F]{32}\b', body) # Regular Expression to select the 40 character long hexadecimal string (sha-1 Hash) sha1 = re.search(r'\b[0-9a-fA-F]{40}\b', body) # Regular Expression to select the 64 character long hexadecimal string (sha-256 Hash) sha256 = re.search(r'\b[0-9a-fA-F]{64}\b', body) # Regular Expression to select the 128 character long hexadecimal string (sha-512 Hash) sha512 = re.search(r'\b[0-9a-fA-F]{128}\b', body) # A regular expression to get any URL help within the body of the email url = re.search('(?<!<)http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', body) if md5 is not None: log.info("MD5 hash contained within the email") md5 = md5.group(0) monitor.info("MD5 submitted: %s", md5) get_hash_report() return elif sha1 is not None: log.info("MD5 hash contained within the email") sha1 = sha1.group(0) monitor.info("sha1 submitted: %s", sha1) get_hash_report() return elif sha256 is not None: log.info("MD5 hash contained within the email") sha256 = sha256.group(0) monitor.info("MD5 submitted: %s", sha256) get_hash_report() return elif url is not None: log.info("MD5 hash contained within the email") md5 = md5.group(0) monitor.info("MD5 submitted: %s", md5) get_hash_report() return else: log.info("No hash, URL or attachment contained within the email") send_no_content()
from zipfile import ZipFile as zipfile from pathlib import Path as path import os p = path(r'C:\Users\Administrator\Desktop\test') exampleZip = zipfile(p / 'example.zip')
def main(): zFile=zipfile.zipfile('pass.zip') wordlist=open('usr/share/wordlists.txt','r') for word in wordlist.readlines: t=Thread(target=crackFile,args=(zFile,word)) t.start()
async def img(self, ctx, *search): if not os.path.exists(self.conf.get('path', '')): logger.debug('could not find images') await self.bot.say('{path} does not exist') return try: # load repo repo = Repo(self.conf.get('path', '')) loop = self.bot.loop author = Actor('navi', '*****@*****.**') remote = repo.remotes.origin users = set() logger.debug('loaded git info in image repo') # check for changed files logger.debug('getting users') for fname in repo.untracked_files: fname = os.path.join(self.conf.get('path', ''), fname) uname = getpwuid(stat(fname).st_uid).pw_name users.add(uname) logger.debug('found users: %s', ', '.join(users)) # commit changes if users or repo.untracked_files: logger.debug('adding files') await loop.run_in_executor(None, repo.index.add, repo.untracked_files) msg = f"navi auto add - {', '.join(unames)}: added files" logger.debug('commiting') run = lambda: repo.index.commit( msg, author=author, committer=author) await loop.run_in_executor(None, run) users = True # just in case # sync with remote logger.debug('pull') await loop.run_in_executor(None, remote.pull) if users: logger.debug('push') await loop.run_in_executor(None, remote.push) except: pass search = [re.sub(r'[^\w\./#\*-]+', '', i).lower() for i in search] search = dh.remove_comments(search) loop = asyncio.get_event_loop() try: f = loop.run_in_executor(None, azfind.search, self.conf['path'], search) path = await f except: path = '' self.prev_img[ctx.message.channel.id] = path if not path or not path.strip(): await self.bot.send_message( ctx.message.channel, "couldn't find anything matching: `{}`".format(search)) return try: url = path.replace(self.conf['path'], self.conf['path-rep']) logger.info(url) if url.rpartition('.')[2] in ('gif', 'png', 'jpg', 'jpeg'): try: em = discord.Embed() em.set_image(url=url) logger.debug(f'sending {str(em.to_dict())}') await self.bot.say(embed=em) except: await self.bot.say(url) elif url.rpartition('.')[2] in ('zip', 'cbz'): zf = zipfile(path, 'r') for fl in zf.filelist: f = zf.open(fl.filename) await self.bot.send_file(ctx.message.channel, f, filename=fl.filename) f.close() zf.close() else: await self.bot.say(url) except: raise await self.bot.say('There was an error uploading the image, ' + \ 'but at least I didn\'t crash :p' )