def reduce_python(self): print("Reduce python") oldpwd = os.getcwd() try: print("Remove files unlikely to be used") os.chdir(join(self.ctx.dist_dir, "root", "python")) sh.rm("-rf", "share") sh.rm("-rf", "bin") os.chdir(join(self.ctx.dist_dir, "root", "python", "lib")) sh.rm("-rf", "pkgconfig") sh.rm("libpython2.7.a") os.chdir( join(self.ctx.dist_dir, "root", "python", "lib", "python2.7")) sh.find(".", "-iname", "*.pyc", "-exec", "rm", "{}", ";") sh.find(".", "-iname", "*.py", "-exec", "rm", "{}", ";") #sh.find(".", "-iname", "test*", "-exec", "rm", "-rf", "{}", ";") sh.rm("-rf", "wsgiref", "bsddb", "curses", "idlelib", "hotshot") sh.rm("-rf", sh.glob("lib*")) # now create the zip. print("Create a python27.zip") sh.rm("config/libpython2.7.a") sh.rm("config/python.o") sh.rm("config/config.c.in") sh.rm("config/makesetup") sh.rm("config/install-sh") sh.mv("config", "..") sh.mv("site-packages", "..") sh.zip("-r", "../python27.zip", sh.glob("*")) sh.rm("-rf", sh.glob("*")) sh.mv("../config", ".") sh.mv("../site-packages", ".") finally: os.chdir(oldpwd)
def package_zip(self, file_name, files): zip_file, cwd = self.prepare_path(file_name) if os.path.isfile(zip_file) and not self.force_deploy: print("==> File {} exists, moving on.".format(zip_file)) return zip_file print("==> Packaging the following: {}".format(file_name)) for file_info in files: for file_type, name in file_info.items(): if file_type == 'file': print("Adding file {} to {}".format(name, zip_file)) os.chdir(self.code_dir) sh.zip('-gr9', zip_file, name) os.chdir(cwd) elif file_type == 'dir': print("Adding directory contents of {} to {}".format( name, zip_file)) os.chdir(name) sh.zip('-gr9', zip_file, '.') os.chdir(cwd) else: print("something went wrong") return zip_file
def reduce_python(self): print("Reduce python") oldpwd = os.getcwd() try: print("Remove files unlikely to be used") os.chdir(join(self.ctx.dist_dir, "root", "python")) sh.rm("-rf", "share") sh.rm("-rf", "bin") os.chdir(join(self.ctx.dist_dir, "root", "python", "lib")) sh.rm("-rf", "pkgconfig") sh.rm("libpython2.7.a") os.chdir(join(self.ctx.dist_dir, "root", "python", "lib", "python2.7")) sh.find(".", "-iname", "*.pyc", "-exec", "rm", "{}", ";") sh.find(".", "-iname", "*.py", "-exec", "rm", "{}", ";") #sh.find(".", "-iname", "test*", "-exec", "rm", "-rf", "{}", ";") sh.rm("-rf", "wsgiref", "bsddb", "curses", "idlelib", "hotshot") sh.rm("-rf", sh.glob("lib*")) # now create the zip. print("Create a python27.zip") sh.rm("config/libpython2.7.a") sh.rm("config/python.o") sh.rm("config/config.c.in") sh.rm("config/makesetup") sh.rm("config/install-sh") sh.mv("config", "..") sh.mv("site-packages", "..") sh.zip("-r", "../python27.zip", sh.glob("*")) sh.rm("-rf", sh.glob("*")) sh.mv("../config", ".") sh.mv("../site-packages", ".") finally: os.chdir(oldpwd)
def reduce_python(self): logger.info("Reduce python") oldpwd = os.getcwd() try: logger.info("Remove files unlikely to be used") os.chdir(join(self.ctx.dist_dir, "root", "python3")) # os.execve("/bin/bash", ["/bin/bash"], env=os.environ) sh.rm("-rf", "bin", "share") # platform binaries and configuration os.chdir(join( self.ctx.dist_dir, "root", "python3", "lib", "python3.7", "config-3.7m-darwin")) sh.rm("libpython3.7m.a") sh.rm("python.o") sh.rm("config.c.in") sh.rm("makesetup") sh.rm("install-sh") # cleanup pkgconfig and compiled lib os.chdir(join(self.ctx.dist_dir, "root", "python3", "lib")) sh.rm("-rf", "pkgconfig") sh.rm("-f", "libpython3.7m.a") # cleanup python libraries os.chdir(join( self.ctx.dist_dir, "root", "python3", "lib", "python3.7")) sh.rm("-rf", "wsgiref", "curses", "idlelib", "lib2to3", "ensurepip", "turtledemo", "lib-dynload", "venv", "pydoc_data") sh.find(".", "-path", "*/test*/*", "-delete") sh.find(".", "-name", "*.exe", "-type", "f", "-delete") sh.find(".", "-name", "test*", "-type", "d", "-delete") sh.find(".", "-iname", "*.pyc", "-delete") sh.find(".", "-path", "*/__pycache__/*", "-delete") sh.find(".", "-name", "__pycache__", "-type", "d", "-delete") # now precompile to Python bytecode hostpython = sh.Command(self.ctx.hostpython) shprint(hostpython, "-m", "compileall", "-f", "-b") # sh.find(".", "-iname", "*.py", "-delete") # some pycache are recreated after compileall sh.find(".", "-path", "*/__pycache__/*", "-delete") sh.find(".", "-name", "__pycache__", "-type", "d", "-delete") # create the lib zip logger.info("Create a python3.7.zip") sh.mv("config-3.7m-darwin", "..") sh.mv("site-packages", "..") sh.zip("-r", "../python37.zip", sh.glob("*")) sh.rm("-rf", sh.glob("*")) sh.mv("../config-3.7m-darwin", ".") sh.mv("../site-packages", ".") finally: os.chdir(oldpwd)
def zip_package(): cwd = os.getcwd() file_name = 'curator-lambda.zip' zip_file = cwd + '/' + file_name os.chdir('src/') sh.zip('-r9', zip_file, '.') os.chdir(cwd) return file_name, zip_file
def download(self): """ Copy it right from the source """ #: Zip the srz src_root = self.get_package_root() with current_directory(src_root): dst_root = join(self.ctx.packages_path, self.name) if not exists(dst_root): os.makedirs(dst_root) sh.zip('-r', join(dst_root, self.url), 'src') sh.zip('-r', join(self.ctx.packages_path, self.name, self.url), 'src')
def reduce_python(self): logger.info("Reduce python") logger.info("Remove files unlikely to be used") with cd(join(self.ctx.dist_dir, "root", "python3")): sh.rm("-rf", "bin", "share") # platform binaries and configuration with cd(join( self.ctx.dist_dir, "root", "python3", "lib", "python3.8", "config-3.8-darwin")): sh.rm( "libpython3.8.a", "python.o", "config.c.in", "makesetup", "install-sh", ) # cleanup pkgconfig and compiled lib with cd(join(self.ctx.dist_dir, "root", "python3", "lib")): sh.rm("-rf", "pkgconfig", "libpython3.8.a") # cleanup python libraries with cd(join( self.ctx.dist_dir, "root", "python3", "lib", "python3.8")): sh.rm("-rf", "curses", "idlelib", "lib2to3", "ensurepip", "turtledemo", "lib-dynload", "venv", "pydoc_data") sh.find(".", "-path", "*/test*/*", "-delete") sh.find(".", "-name", "*.exe", "-type", "f", "-delete") sh.find(".", "-name", "test*", "-type", "d", "-delete") sh.find(".", "-iname", "*.pyc", "-delete") sh.find(".", "-path", "*/__pycache__/*", "-delete") sh.find(".", "-name", "__pycache__", "-type", "d", "-delete") # now precompile to Python bytecode hostpython = sh.Command(self.ctx.hostpython) shprint(hostpython, "-m", "compileall", "-f", "-b") # sh.find(".", "-iname", "*.py", "-delete") # some pycache are recreated after compileall sh.find(".", "-path", "*/__pycache__/*", "-delete") sh.find(".", "-name", "__pycache__", "-type", "d", "-delete") # create the lib zip logger.info("Create a python3.8.zip") sh.mv("config-3.8-darwin", "..") sh.mv("site-packages", "..") sh.zip("-r", "../python38.zip", sh.glob("*")) sh.rm("-rf", sh.glob("*")) sh.mv("../config-3.8-darwin", ".") sh.mv("../site-packages", ".")
def backup(user, database, date_suffix, config_file, config_dir): """ Back up PG database, copy to S3 """ now = str(datetime.datetime.now()) # backup if date_suffix: s3_file_name = f"{database}-{now}.psql" else: s3_file_name = f"{database}.psql" db_dump = sh.pg_dump("-U", f"{user}", "-w", f"{database}") # upload to S3 s3simple = S3Simple(region_name=aws_region, profile=aws_profile, bucket_name=backup_bucket) s3simple.put_to_s3(key=s3_file_name, body=str(db_dump)) # config file or dir? if config_file: file_parts = config_file.split('/') s3_name = file_parts[-1] s3simple.send_file_to_s3(local_file=config_file, s3_file=s3_name) if config_dir: path_parts = config_dir.split('/') s3_name = 'backup' + '_'.join(path_parts) + '.zip' zip_name = tmp + s3_name result = sh.zip("-r", zip_name, config_dir) s3simple.send_file_to_s3(local_file=zip_name, s3_file=s3_name)
def main(image_id, out_path, aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None, tiles=None, bands=None): """Download data from AWS open data and convert into SAFE format compatible with ESA tools""" # set environment variables to enable AWS access os.environ['AWS_ACCESS_KEY_ID'] = aws_access_key_id os.environ['AWS_SECRET_ACCESS_KEY'] = aws_secret_access_key os.environ['AWS_SESSION_TOKEN'] = aws_session_token # parse input args if tiles is not None: tile_list = map(str, tiles.split(',')) else: tile_list = None if bands is not None: band_list = map(str, bands.split(',')) else: band_list = None print("Compiling SAFE archive for {}...".format(image_id)) product_request = AwsProductRequest(product_id=image_id, data_folder=out_path, safe_format=True, tile_list=tile_list, bands=band_list) product_request.save_data() # zip it all up # necessary because otherwise gbdx will drop empty folders and sen2cor will fail print("Zipping up SAFE archive") archive = os.path.join(out_path, '{}.SAFE'.format(image_id)) # use sh to do this because the shutil.make_archive() function doesn't seem to work with SNAP sh.zip('-rm', archive.replace('.SAFE', '.zip'), os.path.basename(archive), '-4', _cwd=os.path.dirname(archive)) print("Process completed successfully.")
def main(image_id, out_path, aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None): """Download data from AWS open data S3 and convert into SAFE format compatible with SNAP and PyroSAR""" print("Downloading files from S3") archive = utils.download_s1_image( image_id, out_path, dry_run=False, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token) # rename the files print("Renaming files") utils.rename_files(image_id, dir=os.path.join(archive, 'measurement'), ext='tiff', prefix=None) utils.rename_files(image_id, dir=os.path.join(archive, 'annotation'), ext='xml', prefix=None) utils.rename_files(image_id, dir=os.path.join(archive, 'annotation', 'calibration'), ext='xml', prefix='noise-') utils.rename_files(image_id, dir=os.path.join(archive, 'annotation', 'calibration'), ext='xml', prefix='calibration-') # zip it all up print("Zipping up SAFE archive") # use sh to do this because the shutil.make_archive() function doesn't seem to work with SNAP sh.zip('-rm', archive.replace('.SAFE', '.zip'), os.path.basename(archive), '-4', _cwd=os.path.dirname(archive)) print("Process completed successfully.")
def save_as_new_datasource(self, dataset_title, parent=None, *args, **kwargs): df = self.as_dataframe(*args, **kwargs) tempfile = uuid4() tempfile = os.path.join('/tmp', tempfile.hex) self.resource.foreign_resource.driver_instance.from_dataframe(df, tempfile, self.resource.foreign_resource.srs) sh.zip('-r', tempfile + '.zip', sh.glob(tempfile + "/*")) with open(tempfile + '.zip') as input: ds = models.DataResource.objects.create( title = dataset_title, parent = parent if parent else self.resource.parent, resource_file = File(input) ) os.unlink(tempfile + '.zip') shutil.rmtree(tempfile) return ds
def save_as_new_datasource(self, dataset_title, parent=None, *args, **kwargs): df = self.as_dataframe(*args, **kwargs) tempfile = uuid4() tempfile = os.path.join('/tmp', tempfile.hex) self.resource.foreign_resource.driver_instance.from_dataframe( df, tempfile, self.resource.foreign_resource.srs) sh.zip('-r', tempfile + '.zip', sh.glob(tempfile + "/*")) with open(tempfile + '.zip') as input: ds = models.DataResource.objects.create( title=dataset_title, parent=parent if parent else self.resource.parent, resource_file=File(input)) os.unlink(tempfile + '.zip') shutil.rmtree(tempfile) return ds
def deploy(name, fn=None, bucket='lambda_methods'): print 'Preparing lambda method:', name orig_dir = sh.pwd().strip() dirname = '{}/{}'.format(orig_dir, name) zip_name = '{}/{}.zip'.format(dirname, name) if os.path.exists( dirname ): sh.rm('-rf', dirname) # cp skeleton project data sh.cp('-r', os.path.join(os.path.dirname(__file__), 'project'), dirname) base_zip = '{}/dist.zip'.format(dirname) if not os.path.exists(base_zip): _docker('--rm', '-v', '{}:/app'.format(dirname), 'quay.io/pypa/manylinux1_x86_64', '/app/scripts/build.sh') sh.zip('-9', zip_name, '-j', '{}/README.md'.format(dirname)) sh.cd(os.path.join(dirname, 'build')) sh.zip('-r9', zip_name, sh.glob('*')) sh.cd(dirname) else: sh.mv( base_zip, zip_name ) if fn is not None: with open(os.path.join(dirname, 'src', 'custom.py'), 'w') as fh: fh.write(fn) sh.cp(os.path.join(dirname, 'src', 'template.py'), os.path.join(dirname, 'src', '{}.py'.format(name))) sh.cd(os.path.join(dirname, 'src')) sh.zip('-r9', zip_name, sh.glob('*')) sh.cd(orig_dir) def percent_cb(complete, total): sys.stdout.write('.') sys.stdout.flush() print 'Publishing zip file to S3', 's3://{}/{}.zip'.format(bucket, name) b = _s3conn.get_bucket(bucket) k = Key(b) k.key = '{}.zip'.format(name) k.set_contents_from_filename(zip_name, cb=percent_cb, num_cb=10) try: _lambda.delete_function(FunctionName=name) except: pass b = _s3conn.get_bucket('idaho-lambda') for key in b.list(prefix=name): key.delete() print 'Creating function' code = {'S3Bucket': bucket, 'S3Key': '{}.zip'.format(name)} handler = '{}.handler'.format(name) role = 'arn:aws:iam::523345300643:role/lambda_s3_exec_role' _lambda.create_function(FunctionName=name, Code=code, Role=role, Handler=handler, Runtime='python2.7', Timeout=60, MemorySize=1024)
fab_files.append(f) assem_files.append(f) if fext[1:] in assembler_extensions: assem_files.append(f) if os.path.basename(f) in fab_add_files: fab_files.append(f) assem_files.append(f) # Add any bom files to the assembler zip boms = glob.glob("*bom*") assem_files += boms fab_filename = "{0}_{1}_to_fab_{2}".format(project_name, revision, project_date) assem_filename = "{0}_{1}_to_assembler_{2}".format(project_name, revision, project_date) print("Project name: {}".format(project_name)) print("Revision: {}".format(revision)) print("Date: {}".format(project_date)) print("Fab files: {}".format(" ".join(fab_files))) print("Assembler files: {}".format(" ".join(assem_files))) if len(fab_files) == 0: print("Nothing to do!") print("Maybe you should run the cam job") sys.exit(1) rm("-f", fab_filename) rm("-f", assem_filename) sh.zip(fab_filename, fab_files) sh.zip(assem_filename, assem_files)
output_name = fab_zip.split("_to_fab")[0] + "_{}.zip".format(args.date) # Actually make the zip # Generate the folders we use to organize things mkdir(FAB_FOLDER) mkdir(ASSEM_FOLDER) mkdir(IMAGE_FOLDER) # Put the contents of the zip files in the folders # This way we don't have to replicate that logic unzip(fab_zip, "-d", FAB_FOLDER) unzip(assem_zip, "-d", ASSEM_FOLDER) # Put the images in the images folder for jpg in jpgs: cp(jpg, IMAGE_FOLDER) # Get the filenames for fab fab_files = glob.glob("{}/*".format(FAB_FOLDER)) assem_files = glob.glob("{}/*".format(ASSEM_FOLDER)) image_files = glob.glob("{}/*".format(IMAGE_FOLDER)) combined = [output_name] + schs + brds + pdfs + dxfs + infos + boms + fab_files + assem_files + image_files sh.zip(*combined) rm("-rf", FAB_FOLDER) rm("-rf", ASSEM_FOLDER) rm("-rf", IMAGE_FOLDER)
def zip(folder, zipfile): cwd = os.getcwd() sh.cd(folder) sh.zip("-r", "-9", zipfile, os.listdir()) sh.cd(cwd) return zipfile
assem_files.append(f) if fext[1:] in assembler_extensions: assem_files.append(f) if os.path.basename(f) in fab_add_files: fab_files.append(f) assem_files.append(f) # Add any bom files to the assembler zip boms = glob.glob('*bom*') assem_files += boms fab_filename = '{0}_{1}_to_fab_{2}'.format(project_name, revision, project_date) assem_filename = '{0}_{1}_to_assembler_{2}'.format(project_name, revision, project_date) print('Project name: {}'.format(project_name)) print('Revision: {}'.format(revision)) print('Date: {}'.format(project_date)) print('Fab files: {}'.format(' '.join(fab_files))) print('Assembler files: {}'.format(' '.join(assem_files))) if len(fab_files) == 0: print('Nothing to do!') print('Maybe you should run the cam job') sys.exit(1) rm('-f', fab_filename) rm('-f', assem_filename) sh.zip(fab_filename, fab_files) sh.zip(assem_filename, assem_files)
zips = sorted(glob.glob('./*.zip')) for zip in zips: name = zip[2:-4] sh.unzip(zip) print(f"名字是{name}") # sh.zip(f"{name}zip.zip", "./*.videofx", "./*.lic") videofx_full_name = "" lic_full_name = "" pic_full_name = "" for root, dirs, files in os.walk("./"): for file in files: if os.path.splitext(file)[-1] == '.videofx': videofx_full_name = os.path.split(file)[1] if os.path.splitext(file)[-1] == '.lic': lic_full_name = os.path.split(file)[1] if os.path.splitext(file)[-1] == '.png': pic_full_name = os.path.split(file)[1] elif os.path.splitext(file)[-1] == '.jpg': pic_full_name = os.path.split(file)[1] print(videofx_full_name) print(lic_full_name) print(pic_full_name) sh.zip(f"{name}zip.zip", videofx_full_name, lic_full_name) sh.rm("-rf", lic_full_name) sh.rm("-rf", videofx_full_name) sh.rm("-rf", zip) sh.zip(f"{zip}", pic_full_name, f"{name}zip.zip") sh.rm("-rf", pic_full_name) sh.rm("-rf", f"{name}zip.zip")
'top': 'GTL', 'bot': 'GBL', 'smt': 'GTS', 'smb': 'GBS', 'slk': 'GTO', 'bsk': 'GBO', 'oln': 'GKO', 'L1': 'GTL', 'L2': 'G2L', 'L3': 'G3L', 'L4': 'GBL', 'drd': 'XLN' } print('Looking for gerber files created by Sunstone CAM') new_files = [] for f in os.listdir('.'): fname, ext = os.path.splitext(f) if ext[1:] in extensions: new_name = '{}_oshpark.{}'.format(fname, extensions[ext[1:]]) new_files.append(new_name) cp(f, new_name) make_zip = query_yes_no( 'Would you like to create a ZIP of the OSH Park Gerbers') if make_zip and len(new_files) > 0: sh.zip('osh_park_gerbers.zip', *new_files)
extensions = { "top": "GTL", "bot": "GBL", "smt": "GTS", "smb": "GBS", "slk": "GTO", "bsk": "GBO", "oln": "GKO", "L1": "GTL", "L2": "G2L", "L3": "G3L", "L4": "GBL", "drd": "XLN", } print("Looking for gerber files created by Sunstone CAM") new_files = [] for f in os.listdir("."): fname, ext = os.path.splitext(f) if ext[1:] in extensions: new_name = "{}_oshpark.{}".format(fname, extensions[ext[1:]]) new_files.append(new_name) cp(f, new_name) make_zip = query_yes_no("Would you like to create a ZIP of the OSH Park Gerbers") if make_zip and len(new_files) > 0: sh.zip("osh_park_gerbers.zip", *new_files)
# Add any bom files to the assembler zip boms = glob.glob('*bom*') assem_files += boms fab_filename = '{0}_{1}_to_fab_{2}'.format(project_name, revision, project_date) assem_filename = '{0}_{1}_to_assembler_{2}'.format(project_name, revision, project_date) stencil_filename = '{0}_{1}_to_stencil_{2}'.format(project_name, revision, project_date) print('Project name: {}'.format(project_name)) print('Revision: {}'.format(revision)) print('Date: {}'.format(project_date)) print('Fab files: {}'.format(' '.join(fab_files))) print('Assembler files: {}'.format(' '.join(assem_files))) print('Stencil files: {}'.format(' '.join(stencil_files))) if len(fab_files) == 0: print('Nothing to do!') print('Maybe you should run the cam job') sys.exit(1) rm('-f', fab_filename) rm('-f', assem_filename) rm('-f', stencil_filename) sh.zip(fab_filename, fab_files) sh.zip(assem_filename, assem_files) sh.zip(stencil_filename, stencil_files)
assem_files.append(f) # Add any bom files to the assembler zip boms = glob.glob('*bom*') assem_files += boms fab_filename = '{0}_{1}_to_fab_{2}'.format(project_name, revision, project_date) assem_filename = '{0}_{1}_to_assembler_{2}'.format(project_name, revision, project_date) stencil_filename = '{0}_{1}_to_stencil_{2}'.format(project_name, revision, project_date) print('Project name: {}'.format(project_name)) print('Revision: {}'.format(revision)) print('Date: {}'.format(project_date)) print('Fab files: {}'.format(' '.join(fab_files))) print('Assembler files: {}'.format(' '.join(assem_files))) print('Stencil files: {}'.format(' '.join(stencil_files))) if len(fab_files) == 0: print('Nothing to do!') print('Maybe you should run the cam job') sys.exit(1) rm('-f', fab_filename) rm('-f', assem_filename) rm('-f', stencil_filename) sh.zip(fab_filename, fab_files) sh.zip(assem_filename, assem_files) sh.zip(stencil_filename, stencil_files)
prj.write(assign_srs) cmd = sh.Command("/usr/bin/{command}".format(command=command) cmd(*args, _out='stdout.txt', _err='stdout.err', **kwargs) # clean up source files, because whatever's left in the directory we're going to post for f in files_to_delete: os.unlink(f) files_to_post = [] for f in os.listdir('.'): if os.isdir(f): zfname = f.split('.')[0] + '.zip' sh.zip('-r', zfname, f) files_to_post.append(zfname) else: files_to_post.append(f) files_to_post = {"file_{n}".format(n=n): (f, open(f)) for n, f in enumerate(files_to_post)} requests.post(response_url, files=files_to_post) # if for some reason the results URL doesn't work, let's say something try: r.raise_for_status() except Exception as e: requests.post(abort_url, data={ 'error_text': str(e) }) r.raise_for_status() # in case the abort URL is effed
# Actually make the zip # Generate the folders we use to organize things mkdir(FAB_FOLDER) mkdir(ASSEM_FOLDER) mkdir(IMAGE_FOLDER) # Put the contents of the zip files in the folders # This way we don't have to replicate that logic unzip(fab_zip, '-d', FAB_FOLDER) unzip(assem_zip, '-d', ASSEM_FOLDER) # Put the images in the images folder for jpg in jpgs: cp(jpg, IMAGE_FOLDER) # Get the filenames for fab fab_files = glob.glob('{}/*'.format(FAB_FOLDER)) assem_files = glob.glob('{}/*'.format(ASSEM_FOLDER)) image_files = glob.glob('{}/*'.format(IMAGE_FOLDER)) combined = [output_name] + schs + brds + pdfs + dxfs + infos + boms + \ fab_files + assem_files + image_files sh.zip(*combined) rm('-rf', FAB_FOLDER) rm('-rf', ASSEM_FOLDER) rm('-rf', IMAGE_FOLDER)