Ejemplo n.º 1
0
def zip():
    fl=zipf('e:\\adikstools\\wf.zip', 'w', ZIP_DEFLATED)
    b='e:\\adikstools\\mysmsbackup\\'
    wal=walkfile.Walk()
    list=wal.walk(b)
    for i in list:
        fl.write(i)
    fl.close()
    print 'completed'
Ejemplo n.º 2
0
 def check_for_files(self):
     try:
         # remove the extracted files if they are there
         if exists(self.files['tempDir']):
             rmfr(self.files['tempDir'])
         # make the temp dir
         mkdirs(self.files['tempDir'])
         if exists(self.files['patches']):
             with zipf(self.files['patches'], "r") as zipr:
                 zipr.extractall(self.files['tempDir'])
     except:
         failure()
 def webScrapping(self):
     #Method for web scrapping
     
     #Wesite URL
     website = 'https://simplemaps.com'
     url = 'https://simplemaps.com/resources/free-country-cities'
     
     ### Extracting 'US Cities' CSV Link ###
     
     #Accessing the webpage
     page = requests.get(url)
     page.raise_for_status()
     page_data = bs(page.content, 'html.parser')
     
     #Finding the CSV link and store it in variable 'csv_url'
     header_tags = page_data.find('ul', class_='nav nav-pills')
     header_ahref_list = header_tags.select('a')
     
     for row in header_ahref_list:
         if row.get_text().strip() == 'US Cities':
             temp_link = row['href']
             csv_url = website+temp_link
             break
     
     ### Extracting City Data ###
     
     #Accesing the webpage
     city_page = requests.get(csv_url)
     city_page.raise_for_status()
     city_page_data = bs(city_page.content, 'html.parser')
     
     #Creating cities dataframe
     csv_tags = city_page_data.find('div', class_='modal-footer')
     csv_ahref_list = csv_tags.select('a')
     
     for row in csv_ahref_list:
         if row.get_text().strip() == 'Yes, proceed with download':
             temp_link = row['href']
             download_url = website+temp_link
             break
     
     #Handling Zip File
     csv_request = requests.get(download_url)
     csv_request.raise_for_status()
     zip_file = zipf(BytesIO(csv_request.content))
     
     city_df = pd.read_csv(zip_file.open('uscities.csv'))  
     
     return city_df
def test_read_silixa_zipped():
    files = [(data_dir_zipped_single_ended, 11387947.857184),
             (data_dir_zipped_double_ended, 19613502.26171),
             (data_dir_zipped_double_ended2, 28092965.5188),
             (data_dir_zipped_silixa_long, 2.88763942e+08)]

    for file, stsum in files:
        with zipf(file) as fh:
            ds = read_silixa_files(zip_handle=fh,
                                   timezone_netcdf='UTC',
                                   file_ext='*.xml',
                                   load_in_memory=True)
            np.testing.assert_almost_equal(ds.st.sum(), stsum, decimal=0)
            ds.close()
    pass
Ejemplo n.º 5
0
def extract_all(zipfile, path='themes'):
    pwd = os.getcwd()
    makedirs(path)
    os.chdir(path)
    z = zipf(zipfile)
    namelist = z.namelist()
    for f in namelist:
        if f.endswith('/') and '..' in f:
            raise UnsafeZipException('The zip file contains ".." and is '
                                     'not safe to expand.')
    for f in namelist:
        if f.endswith('/'):
            makedirs(f)
        else:
            z.extract(f)
    z.close()
    os.chdir(pwd)
Ejemplo n.º 6
0
def test_read_silixa_zipped():
    files = [
        data_dir_zipped_single_ended,
        data_dir_zipped_double_ended,
        data_dir_zipped_double_ended2,
        data_dir_zipped_silixa_long,
        # data_dir_zipped_sensornet_single_ended
    ]
    for file in files:
        with zipf(file) as fh:
            ds = read_silixa_files(zip_handle=fh,
                                   timezone_netcdf='UTC',
                                   file_ext='*.xml',
                                   load_in_memory=True)

        assert ds._initialized

    pass
Ejemplo n.º 7
0
from zipfile import ZipFile as zipf

zip = zipf("test.zip", "r")

zip.extract("troj.apk", "./")

zip.close()