def zip_file(dst_zip_file, src, src_name=None, compress=True, append=False): mode = "w" if append: mode = "a" if compress: zf = zipfile.ZipFile(dst_zip_file, mode, zipfile.ZIP_DEFLATED) else: zf = zipfile.ZipFile(dst_zip_file, mode, zipfile.ZIP_STORED) if src_name is None: src_name = os.path.basename(src) zf.write(src, src_name) zf.close()
def package(self): # Get the package osname = "win%d"%self.windowsbits winzipname = self.cache.getFile(self.options['destdir'], osname=osname, version=self.options['loveversion']) winzippath = os.path.join(self.options['destdir'],winzipname) finaldirpath = os.path.join(self.options['destdir'],"%s-win%d"%(self.options['gamename'], self.windowsbits)) exefilepath = os.path.join(finaldirpath,"love.exe") gameexepath = os.path.join(finaldirpath,"%s.exe"%self.options['gamename']) gamezippath = os.path.join(self.options['destdir'],"%s.love"%self.options['gamename']) finalpath = os.path.join(self.options['destdir'], "%s-win%d.zip"%(self.options['gamename'], self.windowsbits)) # Remove old files if they exists logger.debug("Cleaning out old file: %s", finaldirpath) shutil.rmtree(finaldirpath, ignore_errors=True) try: logger.debug("Cleaning out old file: %s", finalpath) os.remove(finalpath) except FileNotFoundError: pass #Unzip the package winfile = zipfile2.ZipFile(winzippath, 'r') windirpath = os.path.commonpath(winfile.namelist()) windirpath = os.path.join(self.options['destdir'], windirpath) winfile.extractall(self.options['destdir']) winfile.close() # Move love.exe to game.exe os.rename(windirpath,finaldirpath) os.rename(exefilepath,gameexepath) # Concat love.exe and game.love logger.info("Making executable: %s", gameexepath) exefile = open(gameexepath, "ab") lovefile = open(gamezippath, "rb") exefile.write(lovefile.read()) exefile.close() lovefile.close() # Write zipped file for distribution logger.info("Making zip file: %s", finalpath) finalzipfile = zipfile2.ZipFile(finalpath, 'w') finalzipfile.add_tree(finaldirpath, include_top=True) finalzipfile.close() os.remove(winzippath)
def package(self): # Get the package osxfile = self.cache.getFile(self.options['destdir'], osname="mac", version=self.options['loveversion']) # Calculate paths osxpath = os.path.join(self.options['destdir'], osxfile) loveapp = os.path.join(self.options['destdir'], "love.app") gameapp = os.path.join(self.options['destdir'], "%s.app" % self.options['gamename']) gamezip = os.path.join(self.options['destdir'], "%s.love" % self.options['gamename']) finalpath = os.path.join(self.options['destdir'], "%s-macOS.zip" % self.options['gamename']) # Remove old files if they exists logger.debug("Cleaning out old file: %s", gameapp) shutil.rmtree(gameapp, ignore_errors=True) try: logger.debug("Cleaning out old file: %s", finalpath) os.remove(finalpath) except FileNotFoundError: pass #Unzip the package osxzipfile = zipfile2.ZipFile(osxpath, 'r') osxzipfile.extractall(self.options['destdir'], preserve_permissions=zipfile2.PERMS_PRESERVE_ALL) osxzipfile.close() os.rename(loveapp, gameapp) #Copy the love file into the app bundle logger.info("Making .app: %s", gameapp) shutil.copy(gamezip, os.path.join(gameapp, "Contents", "Resources")) self.editPlist(gameapp) # Write zipped file for distribution logger.info("Making zip file: %s", finalpath) finalzipfile = zipfile2.ZipFile(finalpath, 'w') finalzipfile.add_tree(gameapp, include_top=True) finalzipfile.close() # Clean up os.remove(osxpath)
def dictionary_zip(dictionary, zfile): """ dictionary_zip runs a dictionary attack on a password protected zip file given a list of words :param dictionary: wordlist :type dictionary: string :param zfile: password protected zip file :type zfile: string """ # initialize the zip file object zip_file = zipfile2.ZipFile(zfile) # get length of dictionary # check file in a listed binary word_count = len(list(open(dictionary, "rb"))) # run attack with open(dictionary, "rb") as dictionary: # creates a progress bar for length of list for word in tqdm(dictionary, total=word_count, unit="word"): try: zip_file.extractall(pwd=word.strip()) except: continue else: print(f"Password found: {word.decode().strip()}") exit(0) print("Password not found, try a different list")
def build_installer_native(dir, nameprefix): installer_template_exe = os.path.join(dir, "Installer.exe") if nameprefix is not None: installer_exe = os.path.join(dir, "%s-install.exe" % nameprefix) exe = os.path.join(dir, "%s-uncompr.exe" % nameprefix) else: installer_exe = os.path.join(dir, "SumatraPDF-install.exe") exe = os.path.join(dir, "SumatraPDF.exe") exe = os.path.join(dir, "SumatraPDF-no-MuPDF.exe") lib = os.path.join(dir, "libmupdf.dll") plugin = os.path.join(dir, "npPdfViewer.dll") font_name = "DroidSansFallback.ttf" font_path = os.path.join("mupdf", "fonts", "droid", font_name) ifilter = os.path.join(dir, "PdfFilter.dll") shutil.copy(installer_template_exe, installer_exe) # write the marker where the (un)installer ends and the ZIP file begins fo = open(installer_exe, "ab") fo.write("!uninst_end!") fo.close() zf = zipfile.ZipFile(installer_exe, "a", zipfile.ZIP_BZIP2) zf.write(exe, "SumatraPDF.exe") zf.write(lib, "libmupdf.dll") zf.write(font_path, font_name) zf.write(plugin, "npPdfViewer.dll") zf.write(ifilter, "PdfFilter.dll") zf.close() print("Built installer at " + installer_exe) return installer_exe
def zip_folder(src, dst_file_path, with_root_folder, re_skip_list=None, fixed_modified_time=False): zf = zipfile.ZipFile(dst_file_path, "w", zipfile.ZIP_DEFLATED) abs_src = os.path.abspath(src) for dirname, subdirs, files in os.walk(abs_src): for filename in files: absname = os.path.abspath(os.path.join(dirname, filename)) if not with_root_folder: arcname = absname[len(abs_src) + 1:] else: arcname = absname[len(os.path.dirname(dirname)) + 1:] # LOGD(TAG, 'zipping %s as %s' % (os.path.join(dirname, filename), arcname) if re_skip_list: to_skip = False for re_skip in re_skip_list: if re.search(re_skip, arcname): to_skip = True break if to_skip: continue file_name = os.path.split(arcname)[-1] if file_name == '.DS_Store': continue zf.write(absname, arcname, None, fixed_modified_time) zf.close() LOGI(TAG, 'zip_folder: zipping %s finished!' % dst_file_path)
def unzip_mbz_file(mbz_filepath): # Make folder to contain contentes of unzipped mbz file base_dir = os.path.dirname(mbz_filepath) mbz_filename, extension = os.path.splitext(os.path.basename(mbz_filepath)) unzip_folder = mbz_filename i = 1 while unzip_folder in os.listdir(base_dir): unzip_folder = "%s_%d" % (mbz_filename, i) i += 1 fullpath_to_unzip_dir = os.path.join(base_dir, unzip_folder) if not os.path.exists(fullpath_to_unzip_dir): os.mkdir(fullpath_to_unzip_dir) # Older version of mbz files are zip files # Newer versions are gzip tar files # Figure out what file type we have and unzip appropriately fileinfo = magic.from_file(mbz_filepath) if 'Zip archive data' in fileinfo: with zipfile2.ZipFile(mbz_filepath, 'r') as myzip: myzip.extractall(fullpath_to_unzip_dir) elif 'gzip compressed data' in fileinfo: tar = tarfile.open(mbz_filepath) tar.extractall(path=fullpath_to_unzip_dir) tar.close() else: print("Can't figure out what type of archive file this is") return -1 return fullpath_to_unzip_dir
def test_parse_jsons_from_folder(): s3_client = boto3.client("s3") log.info("Downloading zipped jsons.") name = "threat_eg_jsons" tmp_filepath = "/tmp/" + name + ".zip" s3_client.download_file("cvapis-data", "jsons/" + name + ".zip", tmp_filepath) log.info("Zipped jsons downloaded.") with open(tmp_filepath, "rb") as f: log.info("Unzipping it.") z = zipfile2.ZipFile(f) for name_local in z.namelist(): print(" Extracting file", name_local) z.extract(name_local, "/tmp/") jsons_path = "/tmp/" + name log.info("Unzipped to" + jsons_path) log.info("Parrsing jsons in " + jsons_path) all_jsons = os.listdir(jsons_path) dets_per_media = {} server_name = "ThreatClassifier" for ind, single_json_rel in enumerate(all_jsons): single_json = jsons_path + "/" + single_json_rel log.info("Parsing: " + single_json) avro_io = AvroIO() x = AvroAPI(avro_io.read_json(single_json)) p = {} p["server"] = server_name props = [p] dets = x.get_detections_from_props(props) url = x.get_url() dets_per_media[url] = dets log.info("\n\n\n") log.info("url: " + url) log.info("dets: " + str(dets))
def Extract_Icon(Filename, ROMFile, TempFilename, Data): try: ImageLoc = Read_Address(Data, 0x68) Size = ImageLoc + 3251232256256 if os.path.splitext(Filename)[1].lower() == ".zip" and Use_Zip: File = zipfile2.ZipFile(Filename, "r") Data = File.read_size(ROMFile, Size) elif os.path.splitext(Filename)[1].lower() == ".7z" and Use_7Zip: Data = Data[0:Size] elif os.path.splitext(Filename)[1].lower() == ".rar" and Use_RAR: File = open(TempFilename, "rb") Data = File.read(Size) else: File = open(Filename, "rb") Data = File.read(Size) Logo = array.array('B', Data[ImageLoc + 32:ImageLoc + 32512]) BinPal = array.array('H', Data[ImageLoc + 32512:ImageLoc + 3251232]) Icon = Get_Icon(Logo=Logo, BinPal=BinPal) Icon_Filename = os.path.join( Config.Config["Image_Path"], os.path.splitext(os.path.basename(Filename))[0] + ".png") Icon.save(Icon_Filename, "PNG") return Get_File_CRC(Icon_Filename) except: return ""
def makedir(completed_file_path): log(whoami(), 'MAKEDIR START - Creating folder structure to feedback.zip') files = os.listdir(path=completed_file_path) for docxname in files: #log(whoami(), 'MAKEDIR FOR: {} {}'.format(completed_file_path, docxname)) try: #log(whoami(),'File exist: {}'.format(os.path.isfile(os.path.join(completed_file_path.decode(), docxname.strip('.docx'))))) if not os.path.isfile( os.path.join(completed_file_path, docxname.strip('.docx'))): os.mkdir( os.path.join(completed_file_path, docxname.strip('.docx'))) if os.path.isfile(os.path.join(completed_file_path, docxname)): move( os.path.join(completed_file_path, docxname), os.path.join(completed_file_path, docxname.strip('.docx'))) except FileExistsError as msg: log(whoami(), '{}'.format(msg)) collect_feedback_files(completed_file_path) zf = zipfile2.ZipFile(os.path.join(path, 'Feedback.zip'), 'w', zipfile2.ZIP_DEFLATED) log(whoami(), 'ZIP: {}'.format(os.path.join(path, 'Feedback.zip'))) folders_to_zip_feedback = search_dir(os.path.join(path, 'completed/'), '_file_') log(whoami(), 'folders_to_zip_feedback: {}'.format(folders_to_zip_feedback)) for folder_path in folders_to_zip_feedback: try: zipdir(folder_path, zf) finally: log(whoami(), 'ZIP: {}'.format(zf.infolist()))
def attackZip(workerNum, fileName, passFile): file = zipfile2.ZipFile(fileName) for attempt in passFile: try: password = "******" + str(workerNum) + " Password found: " + attempt file.extractall(pwd=attempt.encode("utf8")) print(password) f() except: print("Worker-no: " + str(workerNum) + " Not matched with : " + attempt)
def unzip_file(dir_name, zip_file, dst_file_path): file_path = os.path.join(dir_name, zip_file) if not os.path.exists(file_path): raise ErrorMessage("Cannot find %s" % file_path) if not os.path.exists(dst_file_path): os.mkdir(dst_file_path) zf = zipfile.ZipFile(file_path, "r") zf.extractall(dst_file_path) LOGD(TAG, 'unzip_files: unzipping %s finished!' % file_path)
def Get_CRC_or_Date(Filename): CRC = [] Date = [] ROMFile = [] if os.path.splitext(Filename)[1].lower() == ".zip" and Use_Zip: try: FileIn = zipfile2.ZipFile(Filename, "r") for File in FileIn.infolist(): if os.path.splitext(File.filename)[1].lower( ) in Config.Config["ROM_Extensions"]: ROMFile.append(File.filename) CRC.append(CRC2Hex(File.CRC)) Date.append("") except: return ([], [], []) #IGNORE:W0702 elif os.path.splitext(Filename)[1].lower() == ".7z" and Use_7Zip: try: FileIn = open(Filename, "rb") archive = Archive7z(FileIn) Filenames = archive.getnames() for File in Filenames: if os.path.splitext( File)[1].lower() in Config.Config["ROM_Extensions"]: ROMFile.append(File) cf = archive.getmember(File) CRC.append(CRC2Hex(cf.digest)) Date.append("") except: return ([], [], []) #IGNORE:W0702 elif os.path.splitext(Filename)[1].lower() == ".rar" and Use_RAR: try: for File in UnRAR.Archive(Filename).iterfiles(): if os.path.splitext(File.filename)[1].lower( ) in Config.Config["ROM_Extensions"]: ROMFile.append(File.filename) CRC.append(CRC2Hex(File.crc)) Date.append("") except: return ([], [], []) #IGNORE:W0702 elif os.path.splitext( Filename)[1].lower() in Config.Config["ROM_Extensions"]: try: Date.append( time.strftime("%d/%m/%Y %I:%M:%S %p", time.localtime( os.path.getmtime(Filename))).lower()) ROMFile.append(os.path.basename(Filename)) CRC.append("") except: return ([], [], []) #IGNORE:W0702 return (CRC, Date, ROMFile)
def build_installer_data(dir): zf = zipfile.ZipFile(os.path.join(dir, "InstallerData.zip"), "w", zipfile.ZIP_BZIP2) exe = os.path.join(dir, "SumatraPDF-no-MuPDF.exe") zf.write(exe, "SumatraPDF.exe") for f in [ "libmupdf.dll", "npPdfViewer.dll", "PdfFilter.dll", "PdfPreview.dll", "uninstall.exe" ]: zf.write(os.path.join(dir, f), f) font_path = os.path.join("mupdf", "fonts", "droid", "DroidSansFallback.ttf") zf.write(font_path, "DroidSansFallback.ttf") zf.close()
def pull_net_data(): log.info("Downloading net_data") s3_client = boto3.client("s3") log.info("Downloading model.") tmp_filepath = "/tmp/net_data.zip" s3_client.download_file("cvapis-data", "ssd-detector/net_data.zip", tmp_filepath) log.info("Model downloaded.") with open(tmp_filepath, "rb") as f: log.info("Unzipping it.") z = zipfile2.ZipFile(f) for name in z.namelist(): print(" Extracting file", name) z.extract(name, "/tmp/") log.info("Unzipped.")
def Get_NFO(ROM): NFO_Filename = os.path.join(Config.Config["NFO_Path"], "%04d.nfo" % ROM.Image_Number) if os.path.isfile(NFO_Filename): File = open(NFO_Filename, "rt") Data = File.read() File.close() return Data elif os.path.splitext(ROM.Archive_File)[1].lower() == ".zip" and Use_Zip: try: File_In = zipfile2.ZipFile(ROM.Archive_File, "r") for File in File_In.infolist(): if os.path.splitext(File.filename)[1].lower() == ".nfo": Data = File_In.read(File.filename) return Data except: pass elif os.path.splitext(ROM.Archive_File)[1].lower() == ".7z" and Use_7Zip: try: File_In = open(ROM.Archive_File, "rb") archive = Archive7z(File_In) for File in archive.filenames: if os.path.splitext(File)[1].lower() == ".nfo": cf = archive.getmember(File) Data = cf.read() return Data except: pass elif os.path.splitext(ROM.Archive_File)[1].lower() == ".rar" and Use_RAR: try: TempFilename = Create_Temp_Filename() for ArchiveFile in UnRAR.Archive(ROM.Archive_File).iterfiles(): if os.path.splitext(ArchiveFile.filename)[1].lower() == ".nfo": ArchiveFile.extract(TempFilename) File = open(TempFilename, "rt") Data = File.read() File.close() try: os.unlink(TempFilename) except: pass return Data except: pass return ""
def generate_user_pack(username): files = [ '%s.crt' % username, '%s.key' % username, './config/ca.crt', './config/ta.key', './config/client.ovpn' ] zf = zipfile2.ZipFile('%s.zip' % username, 'w') for f in files: try: zf.write(f) except Exception as e: logger.error(e) logger.error('add %s error ...' % f) zf.close() return False zf.close() return True
def Fetch_Master_List(): try: socket.setdefaulttimeout(30) URL = urllib2.urlopen(Config.Config["Master_XML_URL"]) Data = URL.read() except: return False #IGNORE:W0702 try: File_In_Memory = cStringIO.StringIO(Data) File_In = zipfile2.ZipFile(File_In_Memory) File_Out = open(Config.Config["Master_XML_File"], "wt") File_Out.write(File_In.read(Config.Config["Master_XML_File"])) File_Out.close() File_In.close() except: return False #IGNORE:W0702 return True
def make_zip_to_staff(path): log(whoami(), 'folders_to_zip: {} {}'.format(folders_to_zip, len(folders_to_zip))) log(whoami(), 'staff: {} {}'.format(staff, len(staff))) pbar = tqdm(total=len(folders_to_zip)) for i in range(len(staff)): zf = zipfile2.ZipFile(path + '/' + staff[i] + '.zip', 'w', zipfile2.ZIP_DEFLATED) log(whoami(), 'folders_to_zip: {}'.format(folders_to_zip[i])) for j in range(len(folders_to_zip[i])): pbar.update(1) time.sleep(1) try: zipdir(folders_to_zip[i][j] + '/', zf) finally: pass log(whoami(), 'Created: ' + staff[i] + '.zip') log(whoami(), '\t\tinfo: {}'.format(zf.infolist())) zf.close() pbar.close()
def zip_file_with_path_info_list(path_info_list, dst_file_path, debug=False, fixed_modified_time=False): zf = zipfile.ZipFile(dst_file_path, "w", zipfile.ZIP_DEFLATED) for path_info in path_info_list: full_path = path_info['full_path'] path_in_zip = path_info['path_in_zip'] file_name = os.path.split(full_path)[-1] if file_name == '.DS_Store': continue assert_if_failed(os.path.exists(full_path), "The path (%s) doesn't exist!" % full_path) zf.write(full_path, path_in_zip, None, fixed_modified_time) zf.close() if debug: for info in zf.infolist(): LOGD(TAG, "===========================") LOGD(TAG, "filename:" + info.filename) LOGD(TAG, "date_time:" + str(info.date_time)) LOGD(TAG, "compress_type:" + str(info.compress_type)) LOGD(TAG, "comment:" + info.comment) LOGD(TAG, "extra:" + info.extra) LOGD(TAG, "create_system:" + str(info.create_system)) LOGD(TAG, "create_version:" + str(info.create_version)) LOGD(TAG, "extract_version:" + str(info.extract_version)) LOGD(TAG, "reserved:" + str(info.reserved)) LOGD(TAG, "flag_bits:" + str(info.flag_bits)) LOGD(TAG, "volume:" + str(info.volume)) LOGD(TAG, "internal_attr:" + str(info.internal_attr)) LOGD(TAG, "external_attr:" + str(info.external_attr)) LOGD(TAG, "header_offset:" + str(info.header_offset)) LOGD(TAG, "CRC:" + str(info.CRC)) LOGD(TAG, "compress_size: " + str(info.compress_size)) LOGD(TAG, "file_size:" + str(info.file_size)) LOGD(TAG, 'zip_files: zipping %s finished!' % dst_file_path)
def brute_force_zip(plock_file, password_length, asci): """ function uses brute-force techniques to gain access to password protected zip files :param plock_file: file with password protection :param password_length: possible length of password :param asci: string combination of ascii values """ # initialize the Zipfile object zfile = zipfile2.ZipFile(plock_file) # iterates through all asci values and generates a progress bar showing completion process for i in tqdm(range(1, (password_length + 1))): for letter in itertools.product(asci, repeat=i): password = ''.join(letter) # if password is proper zip file password, will display, else continue try: zfile.extractall(pwd=password) print('Password found: {}'.format(password)) except: pass print("Password not found")
def Get_File_Size(Filename, ROMFile, TempFilename): Size = 0 try: if os.path.splitext(Filename)[1].lower() == ".zip" and Use_Zip: File = zipfile2.ZipFile(Filename, "r") zi = File.getinfo(ROMFile) Size = zi.file_size elif os.path.splitext(Filename)[1].lower() == ".7z" and Use_7Zip: File = open(Filename, "rb") archive = Archive7z(File) cf = archive.getmember(ROMFile) Size = cf.size elif os.path.splitext(Filename)[1].lower() == ".rar" and Use_RAR: for ArchiveFile in UnRAR.Archive(Filename).iterfiles(): if ArchiveFile.filename == ROMFile: Size = ArchiveFile.size else: Size = os.path.getsize(Filename) except: pass return Size
def Read_Data(Filename, ROMFile, Size=-1): TempFilename = "" Data = [] OK = True try: if os.path.splitext(Filename)[1].lower() == ".zip" and Use_Zip: File = zipfile2.ZipFile(Filename, "r") if Size == -1: Data = File.read(ROMFile) else: Data = File.read_size(ROMFile, Size) elif os.path.splitext(Filename)[1].lower() == ".7z" and Use_7Zip: File = open(Filename, "rb") archive = Archive7z(File) cf = archive.getmember(ROMFile) Data = cf.read() elif os.path.splitext(Filename)[1].lower() == ".rar" and Use_RAR: TempFilename = Create_Temp_Filename() for ArchiveFile in UnRAR.Archive(Filename).iterfiles(): if ArchiveFile.filename == ROMFile: ArchiveFile.extract(TempFilename) File = open(TempFilename, "rb") if Size == -1: Data = File.read() else: Data = File.read(Size) File.close() else: File = open(Filename, "rb") if Size == -1: Data = File.read() else: Data = File.read(Size) except: OK = False return (OK, TempFilename, Data)
#!/usr/bin/python import zipfile2 wordlist = ['111', '123', 'password123', '98124'] for i in wordlist: try: zip = zipfile2.ZipFile('protected.zip', 'r') zipfile2.ZipFile.extractall(zip, pwd=i) print 'The Password is: ', i except: continue
import os import urllib import zipfile2 import gzip packages=[ \ {'url': 'http://ecs.utdallas.edu/loizou/speech/composite.zip', 'local': 'composite.zip', 'dir': 'evaluation/obj_evaluation'}, {'url': 'http://www.ee.ic.ac.uk/hp/staff/dmb/voicebox/voicebox.html', 'local': 'voicebox.zip', 'dir': 'evaluation/voicebox'}, {'url': 'http://ceestaal.nl/stoi.zip', 'local': 'stoi.zip', 'dir': 'evaluation/stoi'}, {'url': 'https://www.mathworks.com/matlabcentral/mlc-downloads/downloads/submissions/19550/versions/1/download/zip', 'local': 'rdir.zip', 'dir': 'rdir'} \ ] for package in packages: src = package['url'] dest = package['local'] path = package['dir'] print "Downloading file %s to %s" % (src, dest) urllib.urlretrieve(src, dest) print "Unzipping file %s..." % (dest) outpath = 'matlab/' + package['dir'] if not os.path.exists(outpath): print "Creating directory %s" % outpath os.makedirs(outpath) try: with zipfile2.ZipFile(dest, 'r') as f: f.extractall(outpath) except: print "Failed to extract file %s to %s" % (dest, outpath)
def home(request): """ Controller for the app home page. """ gage_names = [] select_gage_options_tuple = [] # This is new temp_dir = RecessionAnalyzer.get_app_workspace().path #res_ids = request.GET.getlist('WofUri') res_ids = [] res_ids.append('cuahsi-wdc-2017-04-03-30616779') res_ids.append('cuahsi-wdc-2017-04-03-30650403') res_ids.append('cuahsi-wdc-2017-04-03-30705857') for res_id in res_ids: url_zip = 'http://qa-webclient-solr.azurewebsites.net/CUAHSI/HydroClient/WaterOneFlowArchive/' + res_id + '/zip' r = requests.get(url_zip, verify=False) z = zipfile.ZipFile(io.BytesIO(r.content)) file_list = z.namelist() for file in file_list: file_data = z.read(file) file_path = temp_dir + '/id/' + res_id + '.xml' with open(file_path, 'wb') as f: f.write(file_data) gage_name = getSite(res_id) gage_names.append(gage_name) select_gage_options_tuple.append((gage_name, gage_name)) # New stuff ends here concave_initial = False nonlinear_fitting_initial = False rec_sense_initial = 1 min_length_initial = 4 antecedent_moisture_initial = 1 lag_start_initial = 0 #select_gage_options_initial = ['11476500'] select_gage_options_initial = gage_names #select_gage_options_tuple = [('11476500', '11476500'), ('11477000', '11477000')] #select_gage_options_tuple = [(getSite(res_id), getSite(res_id))] abJson = '' seriesDict = {} scatter_plot_view = [] line_plot_view = [] context = {} gage_json = '' ab_stats = buildStatTable({'stats': []}) submitted = False sites = pd.read_csv( '/usr/lib/tethys/src/tethys_apps/tethysapp/recession_analyzer/public/huc_18.tsv', sep='\t', header=30, index_col=False, skiprows=[31]) sites = sites[sites.site_tp_cd == 'ST'] names = sites.station_nm values = [str(x) for x in list(sites.site_no)] text = [num + ' ' + name[0:20] for (num, name) in zip(values, names)] gages_options_options = zip(text, values) gages_options_options_dict = dict(zip(values, text)) # "Analyze recessions" button has been pressed # this stores new set of analysis parameters # and performs recession analysis, stores data in dictionaries # creates a new dropdown box with user gages if request.POST and 'analyze' in request.POST: # PRESERVE THE PREVIOUS STATE # gages_initial = request.POST.getlist("gages_input") start_initial = request.POST['start_input'] stop_initial = request.POST['stop_input'] if 'concave_input' in request.POST: concave_initial = True else: concave_initial = False if 'nonlinear_fitting_input' in request.POST: nonlinear_fitting_initial = True else: nonlinear_fitting_initial = False rec_sense_initial = request.POST['rec_sense_input'] min_length_initial = request.POST['min_length_input'] lag_start_initial = request.POST['lag_start_input'] antecedent_moisture_initial = request.POST['antecedent_moisture_input'] ######################################## app_workspace = RecessionAnalyzer.get_user_workspace(request.user) new_file_path = os.path.join(app_workspace.path, 'current_plot.txt') pickle.dump(request.POST, open(new_file_path[:-4] + '.p', 'w')) post = pickle.load(open(new_file_path[:-4] + '.p', 'r')) submitted = True gage_json = json.dumps(gage_names) start = post['start_input'] stop = post['stop_input'] rec_sense = post['rec_sense_input'] min_length = post['min_length_input'] nonlin_fit = post.get('nonlinear_fitting_input', False) min_length = float(min_length) selectivity = float(rec_sense) * 500 sitesDict, startStopDict = recessionExtract(gage_names, res_ids, start, stop, ante=10, alph=0.90, window=3, selectivity=selectivity, minLen=min_length, option=1, nonlin_fit=nonlin_fit) abJson, abDict = createAbJson(sitesDict, gage_names) a = [] a0 = [] b = [] q = [] g = [] flow = np.array([]) time = np.array([], dtype='<U10') gage_flow = [] for gage in gage_names: a = a + abDict[gage]['a'] a0 = a0 + abDict[gage]['a0'] b = b + abDict[gage]['b'] q = q + abDict[gage]['q'] g = g + [str(gage)] * len(abDict[gage]['a']) flow2 = sitesDict[gage][gage].values flow = np.concatenate((flow, flow2), axis=0) time2 = sitesDict[gage].index.strftime('%Y-%m-%d') time = np.concatenate((time, time2), axis=0) gage_flow = gage_flow + [str(gage)] * len(sitesDict[gage][gage]) dfinfo = np.array([g, a, a0, b, q]) flow_info = np.array([gage_flow, time, flow]) df = pd.DataFrame(data=np.transpose(dfinfo), columns=['Gage', 'a', 'a0', 'b', 'q']) flow_df = pd.DataFrame(data=np.transpose(flow_info), columns=['Gage', 'Time', 'Flow rate']) new_file_path = "/usr/local/lib/tethys/src/tethys_apps/tethysapp/recession_analyzer/templates/recession_analyzer/flowdata.html" flow_df.to_html(new_file_path) newline = '{% extends "recession_analyzer/base.html" %}\n{% load tethys_gizmos %}\n{% block app_content %}' line_prepender(new_file_path, newline) newline = '{% endblock %}' line_appender(new_file_path, newline) new_file_path = "/usr/local/lib/tethys/src/tethys_apps/tethysapp/recession_analyzer/templates/recession_analyzer/dataframe.html" df.to_html(new_file_path) newline = '{% extends "recession_analyzer/base.html" %}\n{% load tethys_gizmos %}\n{% block app_content %}' line_prepender(new_file_path, newline) newline = '{% endblock %}' line_appender(new_file_path, newline) # FIXME: Throw error here if len(gage_names) == 0 for gage in gage_names: ts = sitesDict[gage] startStop = startStopDict[gage] startVec = startStop[0] endVec = startStop[1] flow = ts[gage] tsinds = ts.index series = [] series.append({ 'name': ' ', 'color': '#0066ff', 'data': zip(flow[tsinds[0]:startVec[0]].index, flow[tsinds[0]:startVec[0]]) }) series.append({ 'name': ' ', 'color': '#ff6600', 'data': zip(flow[startVec[0]:endVec[0]].index, flow[startVec[0]:endVec[0]]) }) for i in np.arange(0, len(startVec) - 1): series.append({ 'name': ' ', 'color': '#0066ff', 'data': zip(flow[endVec[i]:startVec[i + 1]].index, flow[endVec[i]:startVec[i + 1]]) }) series.append({ 'name': ' ', 'color': '#ff6600', 'data': zip(flow[startVec[i + 1]:endVec[i + 1]].index, flow[startVec[i + 1]:endVec[i + 1]]) }) series.append({ 'name': ' ', 'color': '#0066ff', 'data': zip(flow[endVec[-1]:tsinds[-1]].index, flow[endVec[-1]:tsinds[-1]]) }) seriesDict[gage] = series line_plot_view.append( buildFlowTimeSeriesPlot(series=seriesDict[gage], name=gage)) avals = ts['A0n'][ts['A0n'] > 0].values bvals = ts['Bn'][ts['Bn'] > 0].values tuplelist = zip(avals, bvals) scatter_plot_view.append( buildRecParamPlot(tuplelist=tuplelist, name=gage)) stats_dict = createStatsInfo(abJson) ab_stats = buildStatTable(stats_dict) gages_options = SelectInput( display_text='Select gage(s)', name='gages_input', multiple=True, options=gages_options_options, initial=[gages_options_options_dict[init] for init in gages_initial]) start_options = DatePicker(name='start_input', display_text='Start date', autoclose=True, format='yyyy-m-d', start_date='01/01/1910', initial=start_initial) stop_options = DatePicker(name='stop_input', display_text='Stop date', autoclose=True, format='yyyy-m-d', start_date='01/01/1910', initial=stop_initial) concave_options = ToggleSwitch(name='concave_input', size='small', initial=concave_initial, display_text='Concave recessions') nonlinear_fitting_options = ToggleSwitch(name='nonlinear_fitting_input', display_text='Nonlinear fitting', size='small', initial=nonlinear_fitting_initial) min_length_options = RangeSlider( name='min_length_input', min=4, max=10, initial=min_length_initial, step=1, attributes={"onchange": "showValue(this.value,'min_length_initial');"}) rec_sense_options = RangeSlider( name='rec_sense_input', min=0, max=1, initial=rec_sense_initial, step=0.01, attributes={"onchange": "showValue(this.value,'rec_sense_initial');"}) antecedent_moisture_options = RangeSlider( name='antecedent_moisture_input', min=0, max=1, initial=antecedent_moisture_initial, step=0.01, attributes={ "onchange": "showValue(this.value,'antecedent_moisture_initial');" }) lag_start_options = RangeSlider( name='lag_start_input', min=0, max=3, initial=lag_start_initial, step=1, attributes={"onchange": "showValue(this.value,'lag_start_initial');"}) select_gage_options = SelectInput( display_text='Select gage', name='gage_input', multiple=False, initial=select_gage_options_initial, options=select_gage_options_tuple, attributes={"onchange": "updatePlots(this.value);"}) context.update({ 'rec_sense_initial': rec_sense_initial, 'antecedent_moisture_initial': antecedent_moisture_initial, 'lag_start_initial': lag_start_initial, 'gage_json': gage_json, 'min_length_initial': min_length_initial, 'concave_options': concave_options, 'nonlinear_fitting_options': nonlinear_fitting_options, 'min_length_options': min_length_options, 'submitted': submitted, 'antecedent_moisture_options': antecedent_moisture_options, 'lag_start_options': lag_start_options, 'rec_sense_options': rec_sense_options, 'line_plot_view': line_plot_view, 'ab_stats': ab_stats, 'scatter_plot_view': scatter_plot_view, 'select_gage_options': select_gage_options, 'abJson': abJson, 'seriesDict': seriesDict, 'gages_options': gages_options, 'start_options': start_options, 'stop_options': stop_options }) return render(request, 'recession_analyzer/home.html', context)