def deploy_service( repoDeployPlanFilePath: str = 'deploy-repository.json', repoHost: str = None, targetHosts: list = None, application: str = None, services: list = None, tags: list = None, providerSettingsFile: str = None, initParamFile: str = None, ignoreCmdErr: bool = False, workdir: str = '/sardines/shoal', env: list = None, ): containerCache = {} for inst in client.containers.list(): containerCache[inst.name] = inst print('container instances have been cached') hoststr = '' for host in targetHosts: hoststr += 'root@' + host + ' ' cmd = "deploy_service.py --repo-deploy-plan {} --hosts {} --application {}".format( repoDeployPlanFilePath, hoststr, application) if services is not None or len(services) != 0: cmd = "{} --services {}".format(cmd, ' '.join(services)) if tags is not None and len(tags) != 0: cmd = "{} --tags {}".format(cmd, ' '.join(tags)) if initParamFile is not None: exec_cmd(repoHost, 'mkdir -p {}/deployments'.format(workdir), ignoreCmdErr=ignoreCmdErr) targetFileName = '{}/deployments/initParams_{}_{}.json'.format( workdir, application, time.time()) copy_to_container(containerCache[repoHost], initParamFile, targetFileName) cmd = "{} --init-parameters {}".format(cmd, targetFileName) if providerSettingsFile is not None: exec_cmd(repoHost, 'mkdir -p {}/deployments'.format(workdir), ignoreCmdErr=ignoreCmdErr) targetFileName = '{}/deployments/providerSettings_{}_{}.json'.format( workdir, application, time.time()) copy_to_container(containerCache[repoHost], providerSettingsFile, targetFileName) cmd = "{} --providers {}".format(cmd, targetFileName) environment = env if env is None: environment = ['PATH=./node_modules/.bin', 'PATH=./bin'] exec_cmd(repoHost, cmd, ignoreCmdErr=ignoreCmdErr, workdir=workdir, environment=environment)
def exec_pansharpen(image_pair, pansh_dstfp, args): dstdir = os.path.dirname(pansh_dstfp) #### Get working dir if args.wd is not None: wd = args.wd else: wd = dstdir if not os.path.isdir(wd): try: os.makedirs(wd) except OSError: pass logger.info("Working Dir: %s" %wd) #### Identify name pattern print "Multispectral image: %s" %image_pair.mul_srcfp print "Panchromatic image: %s" %image_pair.pan_srcfp if args.dem is not None: dem_arg = '-d "%s" ' %args.dem else: dem_arg = "" bittype = utils.get_bit_depth(args.outtype) pan_basename = os.path.splitext(image_pair.pan_srcfn)[0] mul_basename = os.path.splitext(image_pair.mul_srcfn)[0] pan_local_dstfp = os.path.join(wd,"{}_{}{}{}.tif".format(pan_basename,bittype,args.stretch,args.epsg)) mul_local_dstfp = os.path.join(wd,"{}_{}{}{}.tif".format(mul_basename,bittype,args.stretch,args.epsg)) pan_dstfp = os.path.join(dstdir,"{}_{}{}{}.tif".format(pan_basename,bittype,args.stretch,args.epsg)) mul_dstfp = os.path.join(dstdir,"{}_{}{}{}.tif".format(mul_basename,bittype,args.stretch,args.epsg)) pansh_tempfp = os.path.join(wd,"{}_{}{}{}_pansh_temp.tif".format(mul_basename,bittype,args.stretch,args.epsg)) pansh_local_dstfp = os.path.join(wd,"{}_{}{}{}_pansh.tif".format(mul_basename,bittype,args.stretch,args.epsg)) pansh_xmlfp = os.path.join(dstdir,"{}_{}{}{}_pansh.xml".format(mul_basename,bittype,args.stretch,args.epsg)) mul_xmlfp = os.path.join(dstdir,"{}_{}{}{}.xml".format(mul_basename,bittype,args.stretch,args.epsg)) if not os.path.isdir(wd): os.makedirs(wd) #### Ortho pan logger.info("Orthorectifying panchromatic image") if not os.path.isfile(pan_dstfp) and not os.path.isfile(pan_local_dstfp): ortho_functions.process_image(image_pair.pan_srcfp, pan_dstfp, args, image_pair.intersection_geom) if not os.path.isfile(pan_local_dstfp) and os.path.isfile(pan_dstfp): shutil.copy2(pan_dstfp, pan_local_dstfp) logger.info("Orthorectifying multispectral image") #### Ortho multi if not os.path.isfile(mul_dstfp) and not os.path.isfile(mul_local_dstfp): ## If resolution is specified in the command line, assume it's intended for the pansharpened image ## and multiply the multi by 4 if args.resolution: args.resolution = args.resolution * 4.0 ortho_functions.process_image(image_pair.mul_srcfp, mul_dstfp, args, image_pair.intersection_geom) if not os.path.isfile(mul_local_dstfp) and os.path.isfile(mul_dstfp): shutil.copy2(mul_dstfp, mul_local_dstfp) #### Pansharpen logger.info("Pansharpening multispectral image") if os.path.isfile(pan_local_dstfp) and os.path.isfile(mul_local_dstfp): if not os.path.isfile(pansh_local_dstfp): cmd = 'gdal_pansharpen.py -co BIGTIFF=IF_SAFER -co COMPRESS=LZW -co TILED=YES "{}" "{}" "{}"'.format(pan_local_dstfp, mul_local_dstfp, pansh_local_dstfp) utils.exec_cmd(cmd) else: print "Pan or Multi warped image does not exist\n\t%s\n\t%s" %(pan_local_dstfp,mul_local_dstfp) #### Make pyramids if os.path.isfile(pansh_local_dstfp): cmd = 'gdaladdo "%s" 2 4 8 16' %(pansh_local_dstfp) utils.exec_cmd(cmd) ## Copy warped multispectral xml to pansharpened output shutil.copy2(mul_xmlfp,pansh_xmlfp) #### Copy pansharpened output if wd <> dstdir: for local_path, dst_path in [ (pansh_local_dstfp, pansh_dstfp), (pan_local_dstfp, pan_dstfp), (mul_local_dstfp, mul_dstfp) ]: if os.path.isfile(local_path) and not os.path.isfile(dst_path): shutil.copy2(local_path,dst_path) #### Delete Temp Files wd_files = [ pansh_local_dstfp, pan_local_dstfp, mul_local_dstfp ] if not args.save_temps: if wd <> dstdir: for f in wd_files: try: os.remove(f) except Exception, e: logger.warning('Could not remove %s: %s' %(os.path.basename(f),e))
def remove_service_runtimes( repoDeployPlanFilePath: str = 'deploy-repository.json', repoHost: str = None, targetHosts: list = None, application: str = None, services: list = None, tags: list = None, ignoreCmdErr: bool = False, workdir: str = '/sardines/shoal', env: list = None, ): hoststr = '*' if targetHosts is not None and len(targetHosts) > 0: hoststr = '' for host in targetHosts: hoststr += 'root@' + host + ',' hoststr = hoststr[0:-1] appstr = "*" if application is not None and application != "*": appstr = application versionDict = {} if services is not None and len(services) > 0: for service in services: parts = service.split(':')[0] if len(parts) < 2: continue version = "*" if len(parts) > 2: version = parts[2] if version not in versionDict: versionDict[version] = {} moduleDict = versionDict[version] moduleName = parts[0] if moduleName not in moduleDict: moduleDict[moduleName] = [] moduleServiceList = moduleDict[moduleName] if parts[1] != '*' and parts[1] not in moduleServiceList: moduleServiceList.append(parts[1]) if len(versionDict.keys()) == 0: versionDict['*'] = {} for version in versionDict.keys(): moduleDict = versionDict[version] if len(moduleDict.keys()) == 0: moduleDict["*"] = [] for module in moduleDict.keys(): moduleServiceList = moduleDict[module] cmd = './lib/manager/manageRepository.js --remove-service-runtimes --applications={}'.format( appstr) if module != '*': cmd = '{} --modules={}'.format(cmd, module) if version != "*": cmd = '{} --versions={}'.format(cmd, version) if len(moduleServiceList) > 0: cmd = '{} --services={}'.format(cmd, ','.join(moduleServiceList)) if tags is not None and len(tags) > 0: cmd = '{} --tags={}'.format(cmd, ','.join(tags)) if hoststr != '*': cmd = '{} --hosts={}'.format(cmd, hoststr) cmd = '{} {}'.format(cmd, repoDeployPlanFilePath) environment = env if env is None: environment = ['PATH=./node_modules/.bin', 'PATH=./bin'] print('removing service runtimes using command:', cmd) exec_cmd(repoHost, cmd, ignoreCmdErr=ignoreCmdErr, workdir=workdir, environment=environment)
elif args.action == 'deploy-services': if not args.application: print("--application is required") sys.exit(1) else: deploy_service(args.repo_deploy_plan, args.repo_host, args.hosts, args.application, args.services, args.tags, args.provider_settings_file, args.init_parameters_file, args.ignoreCmdErr, args.workdir, args.env) print("services have been deployed on hosts [{}]".format(' '.join( args.hosts))) elif args.action == 'remove-service-runtimes': remove_service_runtimes(args.repo_deploy_plan, args.repo_host, args.hosts, args.application, args.services, args.tags, args.ignoreCmdErr, args.workdir, args.env) elif args.action == "exec-cmd": if args.cmd and args.hosts: for host in args.hosts: exec_cmd(host, args.cmd, workdir=args.workdir, ignoreCmdErr=args.ignoreCmdErr, environment=args.env) print("Command [{}] has been executed on host [{}]".format( args.cmd, args.hosts)) else: print('action [{}] is not supported'.format(args.action)) sys.exit(1)
def calc_ndvi(srcfp, dstfp, args): # ndvi nodata value ndvi_nodata = -9999 # tolerance for floating point equality tol = 0.00001 # get basenames for src and dst files, get xml metadata filenames srcdir,srcfn = os.path.split(srcfp) dstdir,dstfn = os.path.split(dstfp) bn,ext = os.path.splitext(srcfn) src_xml = os.path.join(srcdir, bn + '.xml') dst_xml = os.path.join(dstdir,bn + '_ndvi.xml') #### Get working dir if args.wd is not None: wd = args.wd else: wd = dstdir if not os.path.isdir(wd): try: os.makedirs(wd) except OSError: pass logger.info("Working Dir: %s" %wd) print "Image: %s" %srcfn ## copy source image to working directory srcfp_local = os.path.join(wd,srcfn) if not os.path.isfile(srcfp_local): shutil.copy2(srcfp, srcfp_local) ## open image and get band numbers ds = gdal.Open(srcfp_local) if ds: bands = ds.RasterCount if bands == 8: red_band_num = 5 nir_band_num = 7 elif bands == 4: red_band_num = 3 nir_band_num = 4 else: logger.error("Cannot calculate NDVI from a {} band image: {}".format(bands, srcfp_local)) return 1 else: logger.error("Cannot open target image: {}".format(srcfp_local)) return 1 ## check for input data type - must be float or int datatype = ds.GetRasterBand(1).DataType if not (datatype in [1,2,3,4,5,6,7]): logger.error("Invalid input data type {}".format(datatype)) return 1 ## get the raster dimensions nx = ds.RasterXSize ny = ds.RasterYSize ## open output file for write and copy proj/geotransform info if not os.path.isfile(dstfp): dstfp_local = os.path.join(wd,os.path.basename(dstfp)) gtiff_options = ['TILED=YES','COMPRESS=LZW','BIGTIFF=IF_SAFER'] driver = gdal.GetDriverByName('GTiff') out_ds = driver.Create(dstfp_local,nx,ny,1,gdal.GetDataTypeByName(args.outtype),gtiff_options) if out_ds: out_ds.SetGeoTransform(ds.GetGeoTransform()) out_ds.SetProjection(ds.GetProjection()) ndvi_band = out_ds.GetRasterBand(1) ndvi_band.SetNoDataValue(float(ndvi_nodata)) else: logger.error("Couldn't open for write: {}".format(dstfp_local)) return 1 ## for red and nir bands, get band data, nodata values, and natural block size ## if NoData is None default it to zero. red_band = ds.GetRasterBand(red_band_num) if red_band == None: logger.error("Can't load band {} from {}".format(red_band_num,srcfp_local)) return 1 red_nodata = red_band.GetNoDataValue() if red_nodata is None: logger.info("Defaulting red band nodata to zero") red_nodata = 0.0 (red_xblocksize,red_yblocksize) = red_band.GetBlockSize() nir_band = ds.GetRasterBand(nir_band_num) if nir_band == None: logger.error("Can't load band {} from {}".format(nir_band_num,srcfp_local)) return 1 nir_nodata = nir_band.GetNoDataValue() if nir_nodata is None: logger.info("Defaulting nir band nodata to zero") nir_nodata = 0.0 (nir_xblocksize,nir_yblocksize) = nir_band.GetBlockSize() ## if different block sizes choose the smaller of the two xblocksize = min([red_xblocksize,nir_xblocksize]) yblocksize = min([red_yblocksize,nir_yblocksize]) ## calculate the number of x and y blocks to read/write nxblocks = int(math.floor(nx + xblocksize - 1)/xblocksize) nyblocks = int(math.floor(ny + yblocksize - 1)/yblocksize) ## blocks loop yblockrange = range(nyblocks) xblockrange = range(nxblocks) for yblock in yblockrange: ## y offset for ReadAsArray yoff = yblock*yblocksize ## get block actual y size in case of partial block at edge if yblock<nyblocks-1: block_ny=yblocksize else: block_ny = ny - (yblock*yblocksize) for xblock in xblockrange: ## x offset for ReadAsArray xoff = xblock*xblocksize ## get block actual x size in case of partial block at edge if xblock<(nxblocks-1): block_nx = xblocksize else: block_nx = nx - (xblock*xblocksize) ## read a block from each band red_array = red_band.ReadAsArray(xoff,yoff,block_nx,block_ny) nir_array = nir_band.ReadAsArray(xoff,yoff,block_nx,block_ny) ## generate mask for red nodata, nir nodata, and ## (red+nir) less than tol away from zero red_mask = (red_array==red_nodata) if red_array[red_mask] != []: nir_mask = (nir_array==nir_nodata) if nir_array[nir_mask] != []: divzero_mask = abs(nir_array + red_array) < tol if red_array[divzero_mask] != []: ndvi_mask = red_mask | nir_mask | divzero_mask else: ndvi_mask = red_mask | nir_mask else: divzero_mask = abs(nir_array + red_array) < tol if red_array[divzero_mask] != []: ndvi_mask = red_mask | divzero_mask else: ndvi_mask = red_mask else: nir_mask = (nir_array==nir_nodata) if nir_array[nir_mask] != []: divzero_mask = abs(nir_array + red_array) < tol if red_array[divzero_mask] != []: ndvi_mask = nir_mask | divzero_mask else: ndvi_mask = nir_mask else: divzero_mask = abs(nir_array + red_array) < tol if red_array[divzero_mask] != []: ndvi_mask = divzero_mask else: ndvi_mask = numpy.full_like(red_array,fill_value=0,dtype=numpy.bool) ## declare ndvi array, init to nodata value ndvi_array = numpy.full_like(red_array,fill_value=ndvi_nodata,dtype=numpy.float32) ## cast bands to float for calc red_asfloat = numpy.array(red_array,dtype=numpy.float32) red_array = None nir_asfloat = numpy.array(nir_array,dtype=numpy.float32) nir_array = None ## calculate ndvi if ndvi_array[~ndvi_mask] != []: ndvi_array[~ndvi_mask] = numpy.divide(numpy.subtract(nir_asfloat[~ndvi_mask],red_asfloat[~ndvi_mask]),numpy.add(nir_asfloat[~ndvi_mask],red_asfloat[~ndvi_mask])) red_asfloat = None nir_asfloat = None ## scale and cast to int if outtype integer if args.outtype == 'Int16': ndvi_scaled = numpy.full_like(ndvi_array,fill_value=ndvi_nodata,dtype=numpy.int16) if ndvi_scaled[~ndvi_mask] != []: ndvi_scaled[~ndvi_mask] = numpy.array(ndvi_array[~ndvi_mask]*1000.0,dtype=numpy.int16) ndvi_array = ndvi_scaled ndvi_scaled = None ndvi_mask = None ## write valid portion of ndvi array to output file ndvi_band.WriteArray(ndvi_array,xoff,yoff) ndvi_array = None out_ds = None ds=None if os.path.isfile(dstfp_local): ## add pyramids cmd = 'gdaladdo "%s" 2 4 8 16' %(dstfp_local) utils.exec_cmd(cmd) ## copy to dst if wd <> dstdir: shutil.copy2(dstfp_local, dstfp) ## copy xml to dst if os.path.isfile(src_xml): shutil.copy2(src_xml, dst_xml) else: logger.warning("xml {} not found".format(src_xml)) ## Delete Temp Files temp_files = [srcfp_local] wd_files = [dstfp_local] if not args.save_temps: for f in temp_files: try: os.remove(f) except Exception, e: logger.warning('Could not remove %s: %s' %(os.path.basename(f),e)) if wd <> dstdir: for f in wd_files: try: os.remove(f) except Exception, e: logger.warning('Could not remove %s: %s' %(os.path.basename(f),e))