Example #1
0
    def loadaster(self, fileaster=[]):
        """ load Aster files

        """

        # construct filename from prefix
        _fileaster = 'ASTGTM2_' + self.prefix + '_dem.tif'

        if fileaster == []:
            fileaster = pyu.getlong(_fileaster, os.path.join('gis', 'aster'))
        else:
            _fieleaster = pyu.getshort(fileaster)

        # zip extraction
        ext = _fileaster.split('.')
        if ext[1] == 'zip':
            with zipfile.Zipfile(fileaster) as zf:
                for member in zf.infolist():
                    words = member.filename.split('/')
                    path = dest_dir
                    for word in words[:-1]:
                        drive, word = os.path.splitdrive(word)
                        head, word = os.path.split(word)
                        if word in (os.curdir, os.pardir, ''):
                            continue
                        path = os.path.join(path, word)
                    zf.extract(member, path)

        f = gdal.Open(fileaster)
        self.hgta = f.ReadAsArray()
Example #2
0
def backupToZip(folder):
    #make sure folder is absolute
    folder = os.path.abspath(folder)

    #figure out filename based on files that already exists
    num = 1
    while True:
        zipFilename = os.path.basename(folder) + '_' + str(num) + '.zip'
        if not os.path.exists(zipFilename):
            break
        num += 1

    #create zip file
    print(f'Creating {zipFilename}...')
    backupZip = zipfile.Zipfile(zipFilename, 'w')

    #walk folder tree and compress files in each folder
    for foldername, subfolders, filenames in os.walk(folder):
        print(f'Adding files in {foldername}...')
        #add current folder to zip file
        backupZip.write(foldername)

        #add all files in folder to zip file
        for filename in filenames:
            newBase = os.path.basename(folder) + '_'
            if filename.startswith(newBase) and filename.endswith('.zip'):
                continue  #dont backup backup zip files
            backupZip.write(os.path.join(foldername, filename))
    backupZip.close()
    print('Done.')


#backupToZip('C:\\delicious')
Example #3
0
def ExtractZipfiles(sourceDir, destinationDir):

  for filename in os.listdir(sourceDir):
    print filename
    with zipfile.Zipfile(os.path.join(sourceDir, filename)) as zf:
      name = zf.namelist()
      print name
      zf.extractall(destinationDir, name, 'password')
def download_data(path):
    print('Downloading data . . .')
    url = "http://mattmahoney.net/dc/text8.zip"
    directory = os.path.dirname(path)
    if not os.path.exists(directory):
        os.makedirs(directory)
    urllib.urlretrieve(url, path)
    with zipfile.Zipfile(path) as zf:
        zf.extractall(path=path)
Example #5
0
def Archive_Controller(arg):

    CSV_Write()

    # Create archive directory for current time
    Current_time = datetime.now()
    directory = '/home/pi/datalogger/Archive/'
    if not os.path.exists(directory):
        os.makedirs(directory)

    # Create archive
    zf_name = str(Current_time) + '_Archive'
    zf = zipfile.Zipfile(zf_name, 'w')

    # Write files to zip archive and compress
    try:
        with zipfile.Zipfile(zf_name, 'w') as zf:
            zf.write('UPS_DB.sql',
                     zipfile.ZIP_STORED)  # Write sql database to zip file
            zf.write('UPS_Messages.log',
                     zipfile.ZIP_STORED)  # Write log to zip file
            zf.write('UPS_DB.csv',
                     zipfile.ZIP_STORED)  # Write csv file to zip file

    except:
        logger.error('Could not write files to zip archive')

    try:
        os.remove('UPS_Messages.log')  # Delete log file
        os.remove('UPS_DB.csv')  # Delete csv file
    except:
        logger.error('Could not delete log and csv files')

    try:

        conn = sqlite3.connect('UPS_DB.db')
        c = conn.cursor()
        c.execute("DELETE FROM UPS_DB WHERE Date <= date('now','-1 day')"
                  )  # Delete sql database older than one week
        conn.close()
    except:
        logger.error('Could not update SQL database')
Example #6
0
    def config_file(self):

        if self.check_file():  # 如果文件内容不为空,则配置该文件邮件
            reportpath = self.log.get_result_path(
            )  # self.log.get_result_path()是哪里来的?
            zippath = os.path.join(readConfig.proDir, "result", "test.zip")

            files = glob.glob(reportpath + "\*")  # glob查找文件和目录
            '''filelist = glob.glob(r'./*.py');可查找到文件名为'./1.py','./2.py'''

            f = zipfile.Zipfile(zippath, 'w', zipfile.ZIP_DEFLATED)  #
Example #7
0
 def unpack(ar, delete):
     """ Unpack a file and delete the original """
     print "Unpacking %s" % ar
     if ar.endswith("tar"):
         tarfile.open(ar).extractall()
     elif ar.endswith("zip"):
         zipfile.Zipfile(ar, "r").extractall()
     else:
         print "Could not recognize file format of %s. Aborting unpack." % ar
         return  # Skip the possible delete
     if delete: os.remove(ar)
def load_file(filename):
    """ LOADING FILES """
    if filename in ['-', 'stdin']:
        filehandle = sys.stdin
    elif filename.split('.')[-1] == 'gz':
        filehandle = gzip.open(filename)
    elif filename.split('.')[-1] == 'bz2':
        filehandle = bz2.BZFile(filename)
    elif filename.split('.')[-1] == 'zip':
        filehandle = zipfile.Zipfile(filename)
    else:
        filehandle = open(filename)
    return filehandle
Example #9
0
def zip():
    form = FORM(TABLE("", INPUT(_type="submit", _value="SUBMIT")))
    if form.accepts(request.vars):
        uploaded_files = os.listdir(
            [os.getcwd(), 'applications', request.application, 'uploads'])
        zipf = zipfile.Zipfile(os.getcwd(), 'applications',
                               request.application, 'uploads',
                               'uploads_backup.zip', 'w')
        for f in uploaded_files:
            try:
                zipf.write(f)
            except:
                pass
        #return encode('rot13')
        f.close()
    return dict(form=form)
    return unicode
Example #10
0
def read_file(file_path, direct):
    '''Function to read in daily x data'''
    if os.path.exists(os.getcwd() + '/' + file_path) == True:
        station = pd.read_csv(file_path)
    else:
        zip_file = zipfile.Zipfile(file_path, 'r')
        zip_file.extractall(direct)
        station = pd.read_csv(file_path)

    station['date'] = pd.to_datetime(station['date'])
    station = station.sort_values(by='date')
    station.set_index('date', inplace=True)  # put date in the index
    station = station[station.index > '1984-09-29']  # removes days where there is no y-data
    station.replace('---', '0', inplace=True)
    try:
        station.drop(columns=['Unnamed: 0'], axis=1, inplace=True)  # drop non-station columns
    except:
        pass

    return station
Example #11
0
def _uncompress_file(file_, delete_archive=True):
    """Uncompress files contained in a data_set.

    Parameters
    ----------
    file: string
        path of file to be uncompressed.

    delete_archive: boolean, optional
        Wheteher or not to delete archive once it is uncompressed.
        Default: True

    Notes
    -----
    This handles zip, tar, gzip and bzip files only.
    """
    print 'extracting data from %s...' % file_
    data_dir = os.path.dirname(file_)
    # We first try to see if it is a zip file
    try:
        if file_.endswith('.zip'):
            z = zipfile.Zipfile(file_)
            z.extractall(data_dir)
            z.close()
        elif file_.endswith('.gz'):
            z = gzip.GzipFile(file_)
            name = os.path.splitext(file_)[0]
            f = file(name, 'w')
            z = f.write(z.read())
        elif file_.endswith('.txt'):
            pass
        else:
            tar = tarfile.open(file_, "r")
            tar.extractall(path=data_dir)
            tar.close()
        if delete_archive and not file_.endswith('.txt'):
            os.remove(file_)
        print '   ...done.'
    except Exception as e:
        print 'error: ', e
        raise
def read_data():

    dataset_folder_path = 'data'
    dataset_filename = 'text8.zip'
    dataset_name = 'Text8 Dataset'

    if not isfile(dataset_filename):
        with DLProgress(unit="B",
                        unit_scale=True,
                        miniters=1,
                        desc=dataset_name) as pbar:
            urlretrieve('http://mattmahoney.net/dc/text8.zip',
                        dataset_filename, pbar.hook)

    if not isdir(dataset_folder_path):
        with zipfile.Zipfile(dataset_filename) as zip_ref:
            zip_ref.extractall(dataset_folder_path)

    with open("data/text8") as f:
        text = f.read()

    return text
Example #13
0
# pip3 install tensorflow_datasets
import tensorflow_datasets as tfds

n = "data"
fn = n + ".zip"
filename = os.path.join(os.getcwd(), fn)
filepath = os.path.join(os.getcwd(), n)

if not os.path.isfile(filename):
    # ssl not working, just download using url
    url = "https://github.com/srihari-humbarwadi/datasets/releases/download/v0.1.0/" + n + ".zip"
    keras.utils.get_file(filename, url)

if not os.path.isdir(filepath):
    # zipfile have no Zipfile attribute, just extract it
    with zipfile.Zipfile(fn, "r") as z_fp:
        z_fp.extractall("./")

##### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### #####

# Implementing utility functions
# Bounding boxes can be represented in multiple ways, the most common formats are:

# Storing the coordinates of the corners [xmin, ymin, xmax, ymax]
# Storing the coordinates of the center and the box dimensions [x, y, width, height]
# Since we require both formats, we will be implementing functions for converting between the formats.


def swap_xy(boxes):
    """Swaps order the of x and y coordinates of the boxes.
Example #14
0
import zipfile

file_name = "test_zip.zip"
output_directory = "test_zip"
zip_file = zipfile.Zipfile(file_name, "r")
zip_file.extractall(output_directory)
zip_file.close()
Example #15
0
print(result)

# driver.close()
print("수집 완료")

# 검색한 이름으로 폴더 생성

if not os.path.isdir('./{}'.format(keyword)):
    print("폴더 생성")
    os.mkdir('./{}'.format(keyword))

# 다운로드
from urllib.request import urlretrieve
# 확장자명 맞추기위해
for index, link in tqdm(enumerate(result)):
    start = link[0].rfind('.')
    end = link[0].rfind('&')
    filetype = link[start:end]  #.png

    urlretrieve(link, './{}/{}{}{}'.format(keyword, keyword, index, filetype))
print("다운로드 완료")

# 압축하기
import zipfile
zip_file = zipfile.Zipfile('./{}.zip'.format(keyword), 'w')

for image in os.listdir('./{}'.format(keyword)):
    zip_file.write('./{}/{}'.format(keyword, image),
                   compress_type=zipfile.ZIP_DEFLATED)
zip_file.close()
print("압축 완료")
Example #16
0
import zipfile
import os

if __name__ == "__main__":
    zip = zipfile.Zipfile("cpss_ppt.zip", 'w')
    zip.write(os.path.join(folder, file),
              file,
              compress_type=zipfile.ZIP_DEFLATED)
    for folder, subfolders, files in os.walk('./python'):
        zip.write(files, compress_type=zipfile.ZIP_DEFLATED)
    zip.close()
Example #17
0
 def run(self):
     f = zipfile.Zipfile(self.outfile, 'w', zipfile.ZIP_DEFLATED)
     f.write(self.infile)
     f.close()
     print('Finished background zip of : ', self.infile)
Example #18
0
'''

setup= '''

func(100)

'''

import timeit

timeit.timeit(setup,stmt,number=100000)

import zipfile
-

comp_file = zipfile.Zipfile('comp_file.zip','w')

import requests
import bs4

result = requests.get("http://example.com")

type(result)

result.text

import bs4

soup = bs4.BeautifulSoup(result.text,"lxml")

soup
Example #19
0
import glob
import zipfile

with zipfile.Zip("test.zip", "w") as z:
    z.write("test_dir")
    z.write("test_dir/test.txt")

with zipfile.Zip("test.zip", "w") as z:
    for f in glob.glob("test_dir/**", recursive=True):
        print(f)
        z.write(f)

with zipfile.Zipfile("test.zip", "r") as z:
    z.extractall("z2")

with zipfile.Zipfile("test.zip", "r") as z:
    with z.open("test_dir/test.txt") as f:
        print(f.read())
Example #20
0
urllib.request.urlretrieve(
    val_URL,
    '/Users/Owner/PycharmProjects/week4_coursera/img/validation-horse-or-human.zip'
)

# Un-zipping

# Training
local_zip = '/Users/Owner/PycharmProjects/week4_coursera/img/horse-or-human.zip'
zip_ref = zipfile.ZipFile(local_zip, 'r')
zip_ref.extractall(
    '/Users/Owner/PycharmProjects/week4_coursera/img/horse-or-human')
os.remove(local_zip)
# Validation
val_local_zip = '/Users/Owner/PycharmProjects/week4_coursera/img/validation-horse-or-human.zip'
zip_ref = zipfile.Zipfile(val_local_zip, 'r')
zip_ref.extractall(
    '/Users/Owner/PycharmProjects/week4_coursera/img/validation-horse-or-human'
)
zip_ref.close()
os.remove(val_local_zip)

# Splitting directories

# Training
train_horse_dir = os.path.join(
    '/Users/Owner/PycharmProjects/week4_coursera/img/horse-or-human/horses')
train_human_dir = os.path.join(
    '/Users/Owner/PycharmProjects/week4_coursera/img/horse-or-human/humans')
# Validation
val_horse_dir = os.path.join(
Example #21
0
from mxnet import nd
from mxnet.gluon import nn
from mxnet.gluon.data import vision
import numpy as np
import pandas as pd
import datetime
import sys
import utils

#unzip dataset
demo = False
#unzip little dataset
if demo:
	import zipfile
	for fin in ['train_tiny.zip', 'test_tiny.zip', 'trainLables.csv.zip']:
		with zipfile.Zipfile('../data/kaggle_cifar10/' + fin, 'r') as zin:
			zin.extractall('../data/kaggle_cifar10')

#if dataset is .7z, use '7z x filename.7z'

#reorganization dataset
def reorg_cifar10_data(data_dir, label_file, train_dir, test_dir, input_dir, valid_ratio):
	#read label
	with open(os.path.join(data_dir, label_file), 'r') as f:
		#skip first line
		lines = f.readlines()[1:]
		tokens = [l.rstrip()split(',') for l in lines]
		idx_label = dict(((int(idx), label) for idx, label in tokens))
	labels = set(idx_label.values())

	num_train = len(os.listdir(os.path.join(data_dir, train_dir)))
Example #22
0
def install_stm32flash():
    if not os.path.exists(install_dir + "quantracker-master"):
        print("stm32flash needs to have Quantracker installed first")
        return False

    if platform.system() == 'Linux':
        if not os.path.exists(install_dir +
                              "quantracker-master/bin/stm32flash"):
            exn = "stm32flash"
            if not os.path.exists(exn):
                tarf = "stm32flash-0.4.tar.gz"
                if not os.path.exists(tarf):
                    url = "http://sourceforge.net/projects/stm32flash/files/stm32flash-0.4.tar.gz"
                    print("retrieving stm32flash ...")
                    try:
                        urllib.urlretrieve(url, tarf)
                    except:
                        print("Couldnt retrieve \"" + url +
                              "\". Are you connected to the internet? ")
                        return False
                try:
                    print("extracting stm32flash ...")
                    t = tarfile.open(tarf, 'r')
                    t.extractall()
                except:
                    print("Couldnt extract \"" + tarf +
                          " Possibly download corrupted or interrupted.")
                    print("Delete it and restart installer to retry")
                    return False
            try:
                print("building stm32flash ...")
                os.system("make -C " + exn)
            except:
                printf("unknown error in making stm32flash")
                return False
            if not os.path.exists(exn + "/stm32flash"):
                print("Failed to build stm32flash")
                return False
            try:
                print("installing stm32flash ...")
                os.rename(exn + "/stm32flash",
                          install_dir + "quantracker-master/bin/stm32flash")
                print("---[stm32flash installed]---")
                return True
            except:
                print("Couldnt rename \"" + exn + "\" to \"" + install_dir +
                      exn + "\". Check target directory status")
                return False

        else:
            print("found pre-existing stm32flash linux install")
            return True

    elif platform.system() == 'Windows':
        if not os.path.exists(install_dir +
                              "quantracker-master/bin/stm32flash.exe"):
            stm32flash_stub_path = "quantracker-master/bin/stm32flash_win.zip"
            stm32flash_path = install_dir + stm32flash_stub_path
            if not os.path.exists(stm32flash_path):
                print("cant find " + stm32flash_path)
                return False
            print("extracting stm32flash")
            try:
                z = zipfile.Zipfile(stm32flash_path)
                z.extractall(install_dir + "quantracker-master/bin/")
                print("---[stm32flash installed]---")
                return True
            except:
                print("Couldnt extract \"" + stm32flash_path +
                      "\" Possibly download corrupted or interrupted.")
                print("Delete quantracker and restart installer to retry")
                return False
        else:
            print("found pre-existing stm32flash Windows install")
            return True
    else:
        #shouldnt get here
        print("unknown os .. quitting")
        exit(-1)
Example #23
0
import boto3
import StringIO
import zipfile
import mimetypes

s3 = boto3.resource('s3')

portfolio_bucket = s3.Bucket('peter.rooke.portfolio')
build_bucket = s3.Bucket('project.build')

portfolio_zip = StringIO.StringIO()
build_bucket.download_fileobj('portfoliobuild.zip', portfolio_zip)

with zipfile.Zipfile(portfolio_zip) as myzip:
    for nm in myzip.namelist():
        obj = myzip.open(nm)
        portfolio_bucket.upload_fileobj(
            obj, nm, ExtraArgs={'ContentType': mimetypes.guess_type(nm)[0]})
        portfolio_bucket.Object(nm).Acl().put(ACL='public-read')
Example #24
0
import zipfile, os
from pathlib import Path

p = Path.home()

exampleZip = zipfile.Zipfile(p / 'example.zip')
exampleZip.namelist()
spamInfo = exampleZip.getinfo('spam.txt')
spamInfo.file_size
spamInfo.compress_size

print(
    f'Compressed file is {round(spamInfo.file_size / spamInfo.compress_size, 2)}x smaller!'
)
Example #25
0
model.summary()

# Get the Horse or Human dataset
!wget --no-check-certificate https://storage.googleapis.com/laurencemoroney-blog.appspot.com/horse-or-human.zip -O /tmp/horse-or-human.zip

# Get the Horse or Human Validation dataset
!wget --no-check-certificate https://storage.googleapis.com/laurencemoroney-blog.appspot.com/validation-horse-or-human.zip -O /tmp/validation-horse-or-human.zip 
  
from tensorflow.keras.preprocessing.image import ImageDataGenerator

import os
import zipfile

local_zip = '//tmp/horse-or-human.zip'
zip_ref = zipfile.Zipfile(local_zip, 'r')
zip_ref.extractall('/tmp/training')
zip_ref.close()

local_zip = '//tmp/validation-horse-or-human.zip'
zip_ref = zipfile.Zipfile(local_zip, 'r')
zip_ref.extractall('/tmp/validation')
zip_ref.close()

# Set up directories
train_horses_dir = os.path.join(train_dir, 'horses')
train_humans_dir = os.path.join(train_dir, 'humans')
validation_horses_dir = os.path.join(validation_dir, 'horses')
validaiton_humans_dir = os.path.join(validation_dir, 'humans')

train_horses_fnames = os.listdir(train_horses_dir)