def validateInputs(self): if not os.path.exists(self.argsCSV): raise RuntimeError(self, 'Input argsFile, ' + self.argsCSV + ', could not be found on file system') if not os.path.exists(self.inputMDS): raise RuntimeError(self, 'Input MDS, ' + self.inputMDS + ', could not be found on file system') if not self.args.has_key('projectionlayers'): self.args['projectionlayers'] = '' if self.args['projectionlayers'] <> '': dirs = self.args['projectionlayers'].split(',') for dir in dirs: if not os.path.isdir(dir): raise RuntimeError(self, "Input 'projectionlayers' must be a directory") if not utilities.isMDSFile(self.inputMDS): raise RuntimeError(self, 'Input MDS, ' + self.inputMDS + ', does not appear to be formated as an MDS file.') if not os.path.exists(self.outputDir): raise RuntimeError(self, 'Output directory, ' + self.outputDir + ', could not be found on file system') if self.logger is None: self.logger = utilities.logger(outDir, self.verbose) self.writetolog = self.logger.writetolog
def main(argv): usageStmt = "usage: options: -m --MDSFile -o --outputDir -f --format -v --verbose" desc = "Converts all of the tif files specified in an MDS to ASCII format (or optionally other formats)" parser = OptionParser(usage=usageStmt, description=desc) parser.add_option("-v", dest="verbose", default=False, action="store_true", help="the verbose flag causes diagnostic output to print.") parser.add_option("-i", "--inputfile", dest="input", help="") parser.add_option("-o", "--outputDir", dest="outputDir", help="Output directory to save files in.") parser.add_option("-f", "--format", dest="format", default='asc', help="The format to convert into. 'bil', 'img', 'tif', 'jpg', 'bmp', 'asc'") (options, args) = parser.parse_args(argv) ourFC = FormatConverter() ourFC.verbose = options.verbose ourFC.logger = utilities.logger(options.outputDir, ourFC.verbose) ourFC.writetolog = ourFC.logger.writetolog ourFC.outputDir = options.outputDir ourFC.format = options.format ourFC.convertEnvironmentalLayers([options.input, ], options.outputDir, options.format)
def validateArgs(self): argProblem = "" if self.logger is None: self.logger = utilities.logger(outDir, self.verbose) self.writetolog = self.logger.writetolog if os.path.isdir(self.inputDir): pass elif os.path.exists(self.MDSFile): if not isMDSFile(self.MDSFile): argProblem += "The supplied MDS file, " + self.MDSFile + ", does not appear to be in the appropriate format." else: argProblem += "Neither an input Directory or MDS File was supplied." if not os.path.isdir(self.outputDir): try: os.mkdir(self.outputDir) except: argProblem += 'The supplied output directory, ' + self.outputDir + ", does not exist and could not be created." if not self.format.lower() in self.driverExt.keys(): argProblem += "The supplied format must be one of " + ", ".join(self.driverExt.keys()) if argProblem: raise utilities.TrappedError("There was a problem with one or more of the inputs to RasterFormatConverter")
def check_zippermast(commander): was_on = True if commander.relay_stats["zipmast_relay"] != "1": was_on = False commander.relay_on("zipmast_relay") time.sleep(.5) commander.relay_on("zipmast_wake") time.sleep(.5) commander.relay_off("zipmast_wake") commander.debug_logger( utilities.logger( NAME, "[STATUS] Attempting to query zippermast for height.")) mast_response = "NO RESPONSE" # transmit pcmov_down to zippermast try: modem = serial.Serial(port=commander.physical_ports['zm_port'], baudrate=38400, rtscts=True, timeout=1.0) commander.debug_logger(utilities.logger(NAME, "{}".format(modem))) modem.flushInput() modem.flushOutput() mast_response = modem.readline() commander.debug_logger( utilities.logger( NAME, "[STATUS] Zippermast response: {}".format(mast_response))) modem.flushInput() modem.flushOutput() modem.close() except: commander.debug_logger( utilities.logger(NAME, "[ERROR] Failed to write command to zippermast")) if not was_on: commander.relay_off("zipmast_relay") if mast_response is not "" and "$PPSST" in mast_response: mast_resps = mast_response.split(",") if mast_resps > 3: commander.debug_logger( utilities.logger(NAME, "{}".format(mast_resps))) height_mm = mast_resps[2] return height_mm return "NO RESPONSE"
def validateArgs(self): """run a series of checks on our inputs Checks for: 1) field data csv exists 2) inputs csv exists 3) each of the files listed in the inputs csv exists. 4) if a propSurface file was supplied it exists 5) if point count supplied its an integer greater than 0 6) output directory exists """ #1) field data csv exists if self.fieldData != '' and not os.path.exists(self.fieldData): raise RuntimeError, "Could not find supplied CSV file of fieldData provided. Please check input file: " + str( self.fieldData) #2) inputs csv exists if self.inputsCSV != '' and not os.path.exists(self.inputsCSV): raise RuntimeError, "Could not find CSV file of inputs provided. Please check input file: " + str( self.inputsCSV) #3) each of the files listed in the inputs csv exists. if os.path.exists(self.inputsCSV): reader = csv.reader(open(self.inputsCSV, 'r')) header = reader.next() missingFiles = [] for row in reader: if not SpatialUtilities.isRaster(row[0]): missingFiles.append(row[0]) if not len(missingFiles) == 0: msg = "One or more of the files in the input covariate list CSV could not be identified as rasters by GDAL." msg += "\n ".join(missingFiles) raise RuntimeError, msg #4) if a propSurface file was supplied it exists if self.probSurfacefName <> '': if not SpatialUtilities.isRaster(self.probSurfacefName): raise RuntimeError, "The supplied probability surface, " + self.probSurfacefName + ", does not appear to be a valid raster." else: self.probSurface = SpatialUtilities.SAHMRaster( self.probSurfacefName) #6) if point count supplied its an integer greater than 0 try: self.pointCount = int(self.pointCount) except: raise RuntimeError, "The supplied point count parameter, " + self.pointCount + ", does not appear to be an integer " if not self.pointCount >= 0: raise RuntimeError, "The supplied point count parameter, " + self.pointCount + ", must be greater than 0" #7) output directory exists outDir = os.path.split(self.outputMDS)[0] if not os.path.exists(outDir): raise RuntimeError, "The directory of the supplied MDS output file path, " + self.outputMDS + ", does not appear to exist on the filesystem" if self.logger is None: self.logger = utilities.logger(outDir, self.verbose) self.writetolog = self.logger.writetolog
def main(args_in): print "args used = ", args_in for arg in args_in: logger = False if arg.startswith("o="): outDir = arg[2:] while not os.path.isdir(outDir): outDir = os.path.split(outDir)[0] print "outDir=", outDir logger = utilities.logger(os.path.join(outDir, "logfile.txt"), True) # if this is an ApplyModel we need to wait for the preceeding model to finish # up before launching R print "args_in[3]", args_in[3] if "EvaluateNewData.r" in args_in[3]: inDir = [os.path.split(d[3:])[0] for d in args_in if d.startswith("ws=")][0] while True: check = utilities.checkIfModelFinished(inDir) if check == "Error in model": sys.stderr.write("Error in original model that this ApplyModel needs") sys.exit("Error in original model could not apply model") elif check.startswith("Completed successfully"): time.sleep(5) break p = subprocess.Popen(args_in, stderr=subprocess.PIPE, stdout=subprocess.PIPE) ret = p.communicate() print ret[0]#this sends it to the std out sys.stderr.write(ret[1]) msg = "" if 'Error' in ret[1]: msg = "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" msg +="\n An error was encountered in the R script for this module." msg += "\n The R error message is below: \n" msg += ret[1] if logger: logger.writetolog(msg) sys.stderr.write(msg) print msg return elif 'Warning' in ret[1]: msg = "The R scipt returned the following warning(s). The R warning message is below - \n" msg += ret[1] if logger: logger.writetolog(msg) sys.stderr.write(msg) sys.stderr.write(ret[1]) setupGDAL() mosaicTiledOutputs(outDir) print "Finished successfully!"
def main(args_in): """ Process commandline Arguments, Create an instance of PARC with the Variables, Kick off the parkFiles function of our PARC instance """ # Process command-line args. usageStmt = "usage: %prog [options] <template image> <input dir or list of input files>" desc = "This application projects, aggregates, resamples, and clips imagery." parser = OptionParser(usage=usageStmt, description=desc) parser.add_option("-s", dest="source", help="print the names of all known aggregation methods") parser.add_option("-c", dest="categorical") parser.add_option( "-d", dest="dest", default="./", help= "directory in which to put processed images, defaults to current directory" ) parser.add_option( "-v", dest="verbose", default=False, action="store_true", help="the verbose flag causes diagnostic output to print") parser.add_option( "-t", dest="template", help= "The template raster used for projection, origin, cell size and extent" ) parser.add_option( "-r", dest="resampling", help= "The CSV containing the list of files to process. Format is 'FilePath, Categorical, Resampling, Aggreagtion" ) parser.add_option( "-a", dest="aggregation", default=True, help="'True', 'False' indicating whether to use multiple cores or not") (options, args) = parser.parse_args(args_in) ourPARC = PARC() ourPARC.verbose = options.verbose ourPARC.template = options.template outDir = os.path.split(options.dest)[0] ourPARC.outDir = outDir ourPARC.logger = utilities.logger(outDir, ourPARC.verbose) ourPARC.writetolog = ourPARC.logger.writetolog ourPARC.template_params = ourPARC.getRasterParams(options.template) ourPARC.parcFile([ options.source, options.categorical, options.resampling, options.aggregation ], options.dest)
def authNewClient(self, clientFD): # Code Section authMessage = clientFD.recv(1024) self.mutex.acquire() # Lock Access to DB serverAuth = authentication.ServerAuth() # Create auth instance result = serverAuth.authenticate(authMessage) # Authenticate self.mutex.release() # Unlock Access to DB status = result["status"] self.send(clientFD, Utilities.serialize(result)) if(not status): self.closeConnection() Utilities.logger("Authentication failed") return status
def __init__(self, app): uri = MONGO_URI + KEY_DB self.db = PyMongo(app, uri=uri).db.db if self.db.count_documents({}) == 0: self.db.create_index([('name', TEXT), ('moodle', TEXT), ('key', TEXT)], unique=True) self.log = logger()
def raise_mast(commander, speed): if commander.device != "zipbox": commander.debug_logger( utilities.logger( NAME, "[ERROR] Tried to execute 'raise_mast' from outside zippermast box. Attempting to forward." )) return commander.forward_command(commander.device_ip["zipbox"], commander.server_port, "rmt") zip_mast_pcmov(commander, "up", speed) return "MAST_UP"
def run(file_path, *args, **kwargs): result = list() # rep file size size = os.stat(file_path).st_size result.append(logger("size", f"{size/1000}kb")) # last access time in seconds last_accessed = datetime.fromtimestamp(os.stat(file_path).st_atime).ctime() result.append(logger("last accessed", last_accessed)) # last modify in seconds last_modified = datetime.fromtimestamp(os.stat(file_path).st_mtime).ctime() result.append(logger("last modify", last_modified)) # last changed in seconds last_changed = datetime.fromtimestamp(os.stat(file_path).st_ctime).ctime() result.append(logger("last changed", last_changed)) return result
def lower_mast(commander, speed): if commander.device != "zipbox": commander.debug_logger( utilities.logger( NAME, "[ERROR] Tried to execute 'lower_mast' from outside zippermast box. Attempting to forward." )) return commander.forward_command(commander.device_ip["zipbox"], commander.server_port, "lmt") # send signal to zippermast relay - circuit closed zip_mast_pcmov(commander, "down", speed) return "MAST_DOWN"
def validateArgs(self): """ Make sure the user sent us some stuff we can work with """ # Validate template image. if self.templatefName is None: raise Exception, "template raster not provided (-t command line argument missing)" if not os.path.exists(self.templatefName): raise Exception, "Template file, " + self.template + ", does not exist on file system" self.template = SpatialUtilities.SAHMRaster(self.templatefName) if len(self.template.Error) <> 0: print("There was a problem with the provided template: \n " + " " + "\n ".join(self.template.Error)) raise Exception, ( "There was a problem with the provided template: \n " + " " + "\n ".join(self.template.Error)) # Ensure the template has square pixels. if abs(abs(self.template.xScale) - abs(self.template.yScale)) > 1e-6: print "The template raster must have square pixels." print "x pixel scale = " + str(self.template.xScale) print "y pixel scale = " + str(self.template.yScale) raise Exception, "template image must have square pixels." # Validate the CSV if self.csv is None: raise Exception, "No csv provided" if not os.path.exists(self.csv): raise Exception, "CSV file, " + self.csv + ", does not exist on file system" # make sure the directory the mds file is going into exists: outDir = os.path.split(self.output)[0] if not os.path.exists(outDir): raise RuntimeError, "The directory of the supplied MDS output file path, " + self.output + ", does not appear to exist on the filesystem" if self.epsg: try: self.pointsSpatialRef = osr.SpatialReference() self.pointsSpatialRef.ImportFromEPSG(self.epsg) except: raise RuntimeError, "The EPSG code provided, " + str( self.epsg ) + ", is not known to the current installation of GDAL." if self.logger is None: self.logger = utilities.logger(outDir, self.verbose) self.writetolog = self.logger.writetolog
def main(args_in): """ Process commandline Arguments, Create an instance of PARC with the Variables, Kick off the parkFiles function of our PARC instance """ # Process command-line args. usageStmt = "usage: %prog [options] <template image> <input dir or list of input files>" desc = "This application projects, aggregates, resamples, and clips imagery." parser = OptionParser(usage=usageStmt, description=desc) parser.add_option("-s", dest="source", help="print the names of all known aggregation methods") parser.add_option("-c", dest="categorical") parser.add_option( "-d", dest="dest", default="./", help="directory in which to put processed images, defaults to current directory", ) parser.add_option( "-v", dest="verbose", default=False, action="store_true", help="the verbose flag causes diagnostic output to print", ) parser.add_option( "-t", dest="template", help="The template raster used for projection, origin, cell size and extent" ) parser.add_option( "-r", dest="resampling", help="The CSV containing the list of files to process. Format is 'FilePath, Categorical, Resampling, Aggreagtion", ) parser.add_option( "-a", dest="aggregation", default=True, help="'True', 'False' indicating whether to use multiple cores or not" ) (options, args) = parser.parse_args(args_in) ourPARC = PARC() ourPARC.verbose = options.verbose ourPARC.template = options.template outDir = os.path.split(options.dest)[0] ourPARC.outDir = outDir ourPARC.logger = utilities.logger(outDir, ourPARC.verbose) ourPARC.writetolog = ourPARC.logger.writetolog ourPARC.template_params = ourPARC.getRasterParams(options.template) ourPARC.parcFile([options.source, options.categorical, options.resampling, options.aggregation], options.dest)
def closer(arg_exit_code): global readers # Just need the readers for closing the file descriptors # For any readers still around, close them down for rsocket in readers.keys(): try: peer = rsocket.getpeername() rsocket.close() util.logger("closer: rsocket for peer {%s} closed", peer) except Exception as err: util.logger( "closer: rsocket.close() error for peer {%s}, reason: {%s}", rsocket.getpeername(), repr(err)) # Close down server/listener socket try: server.close() except Exception as err: util.logger("closer: server.close() error, reason: {%s}", repr(err)) util.logger("closer: Bye bye") sys.exit(arg_exit_code)
def validateArgs(self): """ Make sure the user sent us some stuff we can work with """ # Validate template image. if self.template is None: raise Exception, "template raster not provided (-t command line argument missing)" if not os.path.exists(self.template): raise Exception, "Template file, " + self.template + ", does not exist on file system" self.templateParams = self.getRasterParams(self.template) if len(self.templateParams["Error"]) <> 0: print("There was a problem with the provided template: \n " + " " + "\n ".join(self.templateParams["Error"])) raise Exception, ( "There was a problem with the provided template: \n " + " " + "\n ".join(self.templateParams["Error"])) # Ensure the template has square pixels. if abs( abs(self.templateParams["xScale"]) - abs(self.templateParams["yScale"])) > 1e-6: print "The template raster must have square pixels." print "x pixel scale = " + str(xScale) print "y pixel scale = " + str(yScale) raise Exception, "template image must have square pixels." #Validate the CSV if self.csv is None: raise Exception, "No csv provided" if not os.path.exists(self.csv): raise Exception, "CSV file, " + self.csv + ", does not exist on file system" #make sure the directory the mds file is going into exists: outDir = os.path.split(self.output)[0] if not os.path.exists(outDir): raise RuntimeError, "The directory of the supplied MDS output file path, " + self.output + ", does not appear to exist on the filesystem" if self.logger is None: self.logger = utilities.logger(outDir, self.verbose) self.writetolog = self.logger.writetolog
def validateArgs(self): """ Make sure the user sent us some stuff we can work with """ # Validate template image. if self.template is None: raise Exception, "template raster not provided (-t command line argument missing)" if not os.path.exists(self.template): raise Exception, "Template file, " + self.template + ", does not exist on file system" self.templateParams = self.getRasterParams(self.template) if len(self.templateParams["Error"]) <> 0: print ("There was a problem with the provided template: \n " + " " + "\n ".join(self.templateParams["Error"])) raise Exception, ("There was a problem with the provided template: \n " + " " + "\n ".join(self.templateParams["Error"])) # Ensure the template has square pixels. if abs(abs(self.templateParams["xScale"]) - abs(self.templateParams["yScale"])) > 1e-6: print "The template raster must have square pixels." print "x pixel scale = " + str(xScale) print "y pixel scale = " + str(yScale) raise Exception, "template image must have square pixels." #Validate the CSV if self.csv is None: raise Exception, "No csv provided" if not os.path.exists(self.csv): raise Exception, "CSV file, " + self.csv + ", does not exist on file system" #make sure the directory the mds file is going into exists: outDir = os.path.split(self.output)[0] if not os.path.exists(outDir): raise RuntimeError, "The directory of the supplied MDS output file path, " + self.output +", does not appear to exist on the filesystem" if self.logger is None: self.logger = utilities.logger(outDir, self.verbose) self.writetolog = self.logger.writetolog
def stop_lower_mast(commander): if commander.device != "zipbox": commander.debug_logger( utilities.logger( NAME, "[ERROR] Tried to execute 'lower_mast' from outside zippermast box. Attempting to forward." )) return commander.forward_command(commander.device_ip["zipbox"], commander.server_port, "slmt") response = "NO RESPONSE" second_check = "" fail_ctr = 0 while not in_range(response, second_check, 10) and fail_ctr < 5: response = zip_mast_pcmov(commander, "down", "0") if response is not "NO RESPONSE": commander.device_stats["mast"] = response time.sleep(0.1) second_check = check_zippermast(commander) else: fail_ctr += 1 return "MAST_DOWN"
def validateArgs(self): #check our CSV file for expectations if not os.path.exists(self.fieldData): raise RuntimeError, "Could not find CSV file of fieldData provided. Please check input file: " + str( self.fieldData) if not os.path.exists(self.inputsCSV): raise RuntimeError, "Could not find CSV file of inputs provided. Please check input file: " + str( self.inputsCSV) #check the input CSV file of inputs reader = csv.reader(open(self.inputsCSV, 'r')) header = reader.next() missingfiles = [] for row in reader: if not self.isRaster(row[0]): missingfiles.append(row[0]) if not len(missingfiles) == 0: msg = "One or more of the files in the input covariate list CSV could not be identified as rasters by GDAL." msg += "\n ".join(missingfiles) raise RuntimeError, msg if self.probsurf <> '': if not self.isRaster(self.probsurf): raise RuntimeError, "The supplied probability surface, " + self.probsurf + ", does not appear to be a valid raster." try: self.pointcount = int(self.pointcount) except: raise RuntimeError, "The supplied point count parameter, " + self.pointcount + ", does not appear to be an integer" #make sure the directory the mds file is going into exists: outDir = os.path.split(self.outputMDS)[0] if not os.path.exists(outDir): raise RuntimeError, "The directory of the supplied MDS output file path, " + self.outputMDS + ", does not appear to exist on the filesystem" if self.logger is None: self.logger = utilities.logger(outDir, self.verbose) self.writetolog = self.logger.writetolog
def remove_key(xsocket, err=None): global readers global writers if err: util.logger("Client {%s} left unexpectedly, reason: {%s}", xsocket.getpeername(), repr(err)) else: util.logger("Client {%s} left politely", xsocket.getpeername()) del readers[xsocket] if xsocket in writers: del writers[xsocket] try: if not err: xsocket.shutdown() xsocket.close() except: util.logger("Socket shutdown or close failed, reason: {%s}", repr(err))
def closer(arg_exit_code): """ Close any open client sockets. Close the server/listener socket. Exit to operating system. """ global readers # Just need the readers for closing the file descriptors # For any readers still around, close them down for csocket in readers.keys(): try: peer = csocket.getpeername() csocket.close() util.logger("closer: rsocket for peer {%s} closed", peer) except Exception as err: util.logger( "closer: rsocket.close() error for peer {%s}, reason: {%s}", csocket.getpeername(), repr(err)) # Close down server/listener socket try: server.close() except Exception as exc: util.logger("closer: server.close() error, reason: {%s}", repr(exc)) util.logger("closer: Bye bye") sys.exit(arg_exit_code)
def __init__(self, filename=None, config=None, comment=None, debug=False): '''Initializer Parameters: filename: optional, configuration filename. config: optional, existing RTParser object. comment: optional, a comment at the filename-level.''' self._file = filename if config is not None and type(config) == type(self): self.config = config.dictionary() else: self.config = RT_dict(comment=comment) self.logger = utilities.logger() if debug: self.logger.setLevel(10) self.logger.debug("Initializing {0}".format(self.__class__.__name__)) if not (self._file is None or (config is not None and type(config) == type(self))): self.logger.info("Reading config from {0}.".format(self._file)) self.read() self.logger.debug("Created {0}".format(self.__class__.__name__))
def validateArgs(self): #check our CSV file for expectations if not os.path.exists(self.fieldData): raise RuntimeError, "Could not find CSV file of fieldData provided. Please check input file: " + str(self.fieldData) if not os.path.exists(self.inputsCSV): raise RuntimeError, "Could not find CSV file of inputs provided. Please check input file: " + str(self.inputsCSV) #check the input CSV file of inputs reader = csv.reader(open(self.inputsCSV, 'r')) header = reader.next() missingfiles = [] for row in reader: if not self.isRaster(row[0]): missingfiles.append(row[0]) if not len(missingfiles) ==0: msg = "One or more of the files in the input covariate list CSV could not be identified as rasters by GDAL." msg += "\n ".join(missingfiles) raise RuntimeError, msg if self.probsurf <> '': if not self.isRaster(self.probsurf): raise RuntimeError, "The supplied probability surface, " + self.probsurf + ", does not appear to be a valid raster." try: self.pointcount = int(self.pointcount) except: raise RuntimeError, "The supplied point count parameter, " + self.pointcount +", does not appear to be an integer" #make sure the directory the mds file is going into exists: outDir = os.path.split(self.outputMDS)[0] if not os.path.exists(outDir): raise RuntimeError, "The directory of the supplied MDS output file path, " + self.outputMDS +", does not appear to exist on the filesystem" if self.logger is None: self.logger = utilities.logger(outDir, self.verbose) self.writetolog = self.logger.writetolog
def remove_key(xsocket, err=None): """ Remove a list-key (socked file descriptor in Linux-speak) from both the readers and writers lists. Then, shutdown and close the socket """ global readers global writers if err: util.logger("Client {%s} left unexpectedly, reason: {%s}", xsocket.getpeername(), repr(err)) else: util.logger("Client {%s} left politely", xsocket.getpeername()) del readers[xsocket] if xsocket in writers: del writers[xsocket] try: if not err: xsocket.shutdown() xsocket.close() except Exception as exc: util.logger("Socket shutdown or close failed, reason: {%s}", repr(exc))
import requests as req from config.moodle import * from utilities import logger, ErrorAPI log = logger() def res_handle(r): if r.status_code != 200: log.info(r.status_code, exc_info=True) raise ErrorAPI(500, r.status_code, 'moodle') if 'application/json' not in r.headers['content-type']: log.info(r.headers['content-type'], exc_info=True) raise ErrorAPI(500, 'incorrect content-type', 'moodle') res = r.json() if 'errorcode' in res and res['errorcode']: if res['errorcode'] == 'invalidtoken': raise ErrorAPI(401, 'invalid wstoken', 'moodle') err = {'status': 500, 'message': res['errorcode']} if res['errorcode'].isnumeric(): err['status'] = int(res['errorcode']) if 'message' in res: err['message'] = res['message'] return err return res
def send(self, clientFD, data): # Prefix each message with a 4-byte length (network byte order) data = struct.pack('>I', len(data)) + data clientFD.sendall(data) # Send loop until all bytes successfully delivered Utilities.logger("Sent" + str(len(data)) + " bytes")
def set_mast_height(commander, target_height): commander.relay_on("zipmast_relay") time.sleep(.1) commander.relay_on("zipmast_wake") time.sleep(.5) commander.relay_off("zipmast_wake") cur_ht = check_zippermast(commander) if "NO RESPONSE" not in cur_ht: commander.device_stats["mast"] = cur_ht target_mm = int(target_height) + MAST_MIN cur_mm = int(commander.device_stats["mast"]) # height change below granularity of movement; doing nothing if in_range(target_mm, cur_mm, 25): commander.debug_logger( utilities.logger( NAME, "[STATUS] User tried to set mast height to current mast height." )) # recurse with height set to zero - function will set to MAST_MIN elif target_mm < MAST_MIN: commander.debug_logger( utilities.logger( NAME, "[STATUS] User tried to set mast height below {} mm threshold." .format(MAST_MIN))) return set_mast_height(commander, str(0)) # recurse with target height set to greatest acceptable input: (MAST_MAX - MAST_MIN) elif target_mm > MAST_MAX: commander.debug_logger( utilities.logger( NAME, "[STATUS] User tried to set mast height above {} mm threshold." .format(MAST_MAX))) return set_mast_height(commander, str(MAST_MAX - MAST_MIN)) # input was within acceptable parameters; continue with raise mast elif target_mm > cur_mm: travel_mm = target_mm - cur_mm commander.debug_logger( utilities.logger( NAME, "[STATUS] Raising mast for {} mm".format(travel_mm))) if travel_mm < 100: mast_speed = "25" else: mast_speed = "100" raise_mast(commander, mast_speed) done = False fail_ctr = 0 while not done and fail_ctr < 3: cur_ht = check_zippermast(commander) if "NO RESPONSE" not in cur_ht: commander.device_stats["mast"] = cur_ht fail_ctr = 0 else: fail_ctr += 1 continue if int(cur_ht) >= target_mm: done = True else: if (target_mm - int(cur_ht)) < 100: raise_mast(commander, "25") time.sleep(0.1) stop_raise_mast(commander) # input was within acceptable parameters, begin lower mast else: travel_mm = cur_mm - target_mm commander.debug_logger( utilities.logger( NAME, "[STATUS] Lowering mast for {} mm".format(travel_mm))) if travel_mm < 100: mast_speed = "25" else: mast_speed = "100" lower_mast(commander, mast_speed) done = False fail_ctr = 0 while not done and fail_ctr < 3: cur_ht = check_zippermast(commander) if "NO RESPONSE" not in cur_ht: commander.device_stats["mast"] = cur_ht fail_ctr = 0 else: fail_ctr += 1 if float(cur_ht) <= target_mm: done = True else: if (int(cur_ht) - target_mm) < 100: lower_mast(commander, "25") time.sleep(0.25) stop_lower_mast(commander) commander.relay_off("zipmast_relay") return "{}".format(int(commander.device_stats["mast"]) - MAST_MIN)
def examine_certificate(conn, cert, errnum, depth, ok): subject = crypto.X509Name(cert.get_subject()) common_name = subject.commonName util.logger("Received certificate from client CN={%s}, depth={%d}", common_name, depth) return ok
def main(args_in): """ Process commandline Arguments, Create an instance of PARC with the Variables, Kick off the parkFiles function of our PARC instance """ # Process command-line args. usage_stmt = "usage: %prog [options] <template image> <input dir or " + \ "list of input files>" desc = "This application projects, aggregates, resamples, and clips" + \ " imagery." parser = OptionParser(usage=usage_stmt, description=desc) parser.add_option("-l", dest="listMethodFlag", default=False, action="store_true", help="print the names of all known aggregation methods") parser.add_option( "-o", dest="out_dir", default="./", help= "directory in which to put processed images, defaults to current directory" ) parser.add_option( "-v", dest="verbose", default=False, action="store_true", help="the verbose flag causes diagnostic output to print") parser.add_option( "-t", dest="templateRaster", help= "The template raster used for projection, origin, cell size and extent" ) parser.add_option( "-i", dest="inputs_CSV", help= "The CSV containing the list of files to process. Format is 'FilePath, Categorical, Resampling, Aggreagtion" ) parser.add_option("-m", dest="multicore", default=False, action="store_true", help="Flag indicating to use multiple cores") parser.add_option( "-n", dest="ignoreNonOverlap", default=False, action="store_true", help="Flag indicating to use ignore non-overlapping area") (options, args) = parser.parse_args(args_in) our_PARC = PARC() our_PARC.verbose = options.verbose our_PARC.template = options.templateRaster our_PARC.out_dir = options.out_dir our_PARC.inputs_CSV = options.inputs_CSV our_PARC.multicores = options.multicore our_PARC.ignoreNonOverlap = options.ignoreNonOverlap our_PARC.logger = utilities.logger( os.path.join(our_PARC.out_dir, "logfile.txt"), True) our_PARC.parcFiles()
def signal_handler(arg_signal, arg_frame): util.logger("signal_handler: Caught signal {%d}", arg_signal) closer(86)
def signal_handler(arg_signal, arg_frame): """ Catch termination signals """ util.logger("signal_handler: Caught signal {%d}", arg_signal) closer(86)
flash(f'Here is your sentence:\t\t{sentence}') flash(f'Here is the word you want to replace:\t\t{word_to_replace}') flash( 'Here are suggestions for this word based on your previous emails:\t\t' + " ".join(final_answers)) flash('Here are the most similar words:\t\t' + " ".join([word[0] for word in word_embedding_output])) else: flash('Error: All the form fields are required. ') return render_template('hello.html', form=form) if __name__ == "__main__": utilities.logger() log = logging.getLogger('Enron_email_analysis.main') log.info("Welcome to the Personalized Thesaurus.") log.info( "ABOUT: This thesaurus recommends you the best word based on your previous emails and the most similar word." ) log.info( "Starting to reading in forward and backward probability pickle files") bigram_forward_probability = data.read_pickle_file( f'model_input_data/bigram_forward_probability.pkl') log.info("Successfully finished reading in 1/4 pickle files.") bigram_backward_probability = data.read_pickle_file( f'model_input_data/bigram_backward_probability.pkl') log.info("Successfully finished reading in 2/4 pickle files.") trigram_forward_probability = data.read_pickle_file(
def validateArgs(self): """ Make sure the user sent us some stuff we can work with """ if not os.path.exists(self.out_dir): raise utilities.TrappedError("Specified Output directory " + self.out_dir + " not found on file system") if not os.path.isdir(self.out_dir): raise utilities.TrappedError("Specified Output directory " + self.out_dir + " is not a directory") if self.logger is None: self.logger = utilities.logger(self.out_dir, self.verbose) self.writetolog = self.logger.writetolog # Validate template image. if self.template is None: raise utilities.TrappedError("template raster not provided.") if not os.path.exists(self.template): raise utilities.TrappedError("Template file, " + self.template + ", does not exist on file system") self.templateRaster = SpatialUtilities.SAHMRaster(self.template) if len(self.templateRaster.Error) != 0: raise utilities.TrappedError( "There was a problem with the provided template: \n " + " " + "\n ".join(self.templateRaster.Error)) # Ensure the template has square pixels. if abs( abs(self.templateRaster.xScale) - abs(self.templateRaster.yScale)) > 1e-6: raise utilities.TrappedError( "template image must have square pixels." + "/n x pixel scale = " + str(abs(self.templateRaster.xScale)) + "/n y pixel scale = " + str(abs(self.templateRaster.yScale))) # Validate input rasters if not os.path.exists(self.inputs_CSV): raise utilities.TrappedError("Inputs CSV, " + self.inputs_CSV + ", does not exist on file system.") inputs_csv = csv.reader(open(self.inputs_CSV, 'r')) header = inputs_csv.next() input_file_errors = "" output_csv = os.path.join(self.out_dir, "PARC_Files.csv") output = csv.writer(open(output_csv, "wb")) output.writerow([ "PARCOutputFile", "Categorical", "Resampling", "Aggregation", "OriginalFile", os.path.abspath(self.template), os.path.abspath(self.out_dir) ]) inputs = [] had_to_shrink = False for row in inputs_csv: input_file = row[0] input_just_file = os.path.splitext(os.path.split(input_file)[1])[0] if input_just_file == "hdr": input_file = os.path.split(input_file)[0] row[0] = input_file input_just_file = os.path.split(input_file)[1] if input_just_file in inputs: input_file_errors += "\n PARC not currently set up to handle identically named inputs." input_file_errors += "\n\t" + input_just_file + " used multiple times" else: inputs.append(input_just_file) sourceRaster = SpatialUtilities.SAHMRaster(input_file) if len(sourceRaster.Error) > 0: input_file_errors += (" " + os.path.split(input_file)[1] + " had the following errors:\n" + " " + "\n ".join(sourceRaster.Error)) + "\n" else: if not self.ignoreNonOverlap and not self.image_covers_template( sourceRaster): input_file_errors += "\n Some part of the template image falls outside of " + input_just_file input_file_errors += "\n template upper left = (" + str( self.templateRaster.west) + ", " + str( self.templateRaster.north) + ")" input_file_errors += "\n template lower right = (" + str( self.templateRaster.east) + ", " + str( self.templateRaster.south) + ")" upperLeftX, upperLeftY = SpatialUtilities.transformPoint( sourceRaster.west, sourceRaster.north, sourceRaster.srs, self.templateRaster.srs) input_file_errors += "\n image upper left = (" + \ str(sourceRaster.west) + ", " + str(sourceRaster.north) + ")" input_file_errors += "\n image lower right = (" + str( sourceRaster.east) + ", " + str( sourceRaster.south) + ")" if self.ignoreNonOverlap: # if this input is smaller in any of the dimensions if not self.image_covers_template(sourceRaster): self.shrink_template_extent(sourceRaster) had_to_shrink = True if len(row) < 2 or not row[1] in ['0', '1']: self.writetolog(" " + os.path.split(input_file)[1] + " categorical either missing or not 0 or 1:" + "\n Defaulting to 0 (continuous)") if len(row) < 2: row.append('0') else: row[1] = '0' resample_methods = [item.lower() for item in self.resample_methods] if len(row) < 3 or not row[2].lower() in resample_methods: self.writetolog( " " + os.path.split(input_file)[1] + " resample method either missing or not one of " + ", ".join(self.resample_methods) + "\n Defaulting to 'Bilinear'") if row[1] == '0': default = 'Bilinear' else: default = 'NearestNeighbor' if len(row) < 3: row.append(default) else: row[2] = default lower_agg_methods = [item.lower() for item in self.agg_methods] if len(row) < 4 or not row[3].lower() in lower_agg_methods: self.writetolog( " " + os.path.split(input_file)[1] + " aggregation method either missing or not one of " + ", ".join(self.agg_methods) + "\n Defaulting to 'Mean'") if row[1] == '0': default = 'Mean' else: default = 'Majority' if len(row) < 4: row.append(default) else: row[3] = default self.inputs.append(row) # also write the output row, reconfigured to our output file short_name = SpatialUtilities.getRasterShortName(row[0]) file_name = os.path.abspath( os.path.join(self.out_dir, short_name + ".tif")) outputrow = [file_name] + row[1:4] + [ os.path.abspath(row[0]), os.path.abspath(self.out_dir) ] output.writerow(outputrow) del output if had_to_shrink: old_template = SpatialUtilities.SAHMRaster(self.template) new_template_fname = os.path.join(self.out_dir, os.path.split(self.template)[1]) SpatialUtilities.intermediaryReprojection( old_template, self.templateRaster, new_template_fname, gdalconst.GRA_NearestNeighbour, True) self.template = new_template_fname self.templateRaster = SpatialUtilities.SAHMRaster(self.template) output_csv = open(os.path.join(self.out_dir, "PARC_Files.csv")) first_line, rest_of_lines = output_csv.readline(), output_csv.read( ) output_csv = open(os.path.join(self.out_dir, "PARC_Files.csv"), "w") replacement_line = first_line.split(",") replacement_line[5] = new_template_fname replacement_line = ",".join(replacement_line) output_csv.write(replacement_line) output_csv.write(rest_of_lines) output_csv.close() if input_file_errors != "": self.writetolog(input_file_errors, False, False) raise utilities.TrappedError( "There was one or more problems with " + "your input rasters: \n" + input_file_errors)
import fitnessCalc, population, algorithm, utilities, logging if __name__ == "__main__": utilities.logger(logging) fitnessCalc.FitnessCalc.setSolutionStr( "1111000000000000000000000000000000000000000000000000000000001111") myPop = population.Population(50, True) generationCount = 0 maxFitness = fitnessCalc.FitnessCalc.getMaxFitness() fittest = myPop.getFittest() objectiveVal = fittest.getFitness() while (objectiveVal < maxFitness): generationCount += 1 logging.info("Generation: " + str(generationCount) + " ObjectiveValue: " + str(objectiveVal)) logging.debug("\nPopulation:" + str(myPop)) logging.debug("Fittest in this population: " + str(fittest) + "\n") myPop = algorithm.Algorithm.evolvePopulation(myPop) fittest = myPop.getFittest() objectiveVal = fittest.getFitness() logging.info( "\nReached optimal solution (that was provided in the beginning) in Generation: " + str(generationCount + 1)) logging.debug("\nPopulation:" + str(myPop))
parser.add_argument('--sysConfig', default='ical_system_config.xml', help='Configuration file') parser.add_argument('--prodDefs', default='product_definitions', help='Product Definitions file') parser.add_argument('--cableLoss', default='ical_cable_loss.xml', help='Cable Loss (Main)') parser.add_argument('--cableLossDiv', default='ical_cable_loss.xml', help='Cable Loss (Div)') args = parser.parse_args() # Debug Logger debugLog = utilities.logger(args.trace, '', args.verbosity) # Factory Station and attributes fs = factoryStation.factoryStation( debugLog, args.test) # default factoryStation object fs.productName = 'i500_1720_att' fs.powerMeterVisaAddress = '1234' # Set options if args.xml == None: # set default values if no XML options = { 'sysConfig': args.sysConfig, 'prodDefs': args.prodDefs, 'cableLoss': args.cableLoss, 'cableLossDiv': args.cableLossDiv, }
def validateArgs(self): """ Make sure the user sent us some stuff we can work with """ if not os.path.exists(self.out_dir): raise utilities.TrappedError("Specified Output directory " + self.out_dir + " not found on file system") if not os.path.isdir(self.out_dir): raise utilities.TrappedError("Specified Output directory " + self.out_dir + " is not a directory") if self.logger is None: self.logger = utilities.logger(self.out_dir, self.verbose) self.writetolog = self.logger.writetolog # Validate template image. if self.template is None: raise utilities.TrappedError("template raster not provided.") if not os.path.exists(self.template): raise utilities.TrappedError("Template file, " + self.template + ", does not exist on file system") self.template_params = self.getRasterParams(self.template) if len(self.template_params["Error"]) <> 0: raise utilities.TrappedError("There was a problem with the provided template: \n " + " " + "\n ".join(self.template_params["Error"])) # Ensure the template has square pixels. if abs(abs(self.template_params["xScale"]) - abs(self.template_params["yScale"])) > 1e-6: raise utilities.TrappedError("template image must have square pixels." + "/n x pixel scale = " + str(xScale) + "/n y pixel scale = " + str(yScale)) #Validate input rasters if not os.path.exists(self.inputs_CSV): raise utilities.TrappedError("Inputs CSV, " + self.inputs_CSV + ", does not exist on file system.") inputsCSV = csv.reader(open(self.inputs_CSV, 'r')) header = inputsCSV.next() strInputFileErrors = "" outputCSV = os.path.join(self.out_dir, "PARC_Files.csv") output = csv.writer(open(outputCSV, "wb")) output.writerow(["PARCOutputFile", "Categorical", "Resampling", "Aggregation", "OriginalFile", os.path.abspath(self.template), os.path.abspath(self.out_dir)]) for row in inputsCSV: inputFile = row[0] sourceParams = self.getRasterParams(inputFile) if len(sourceParams["Error"]) > 0: strInputFileErrors += (" " + os.path.split(inputFile)[1] + " had the following errors:\n" + " " + "\n ".join(sourceParams["Error"])) + "\n" else: pass if not self.ImageCoversTemplate(sourceParams): strInputFileErrors += ("\n Some part of the template image falls outside of " + os.path.split(inputFile)[1]) strInputFileErrors += "\n template upper left = (" + str(self.template_params["gWest"]) + ", " + str(self.template_params["gNorth"]) + ")" strInputFileErrors += "\n template lower right = (" + str(self.template_params["gEast"]) + ", " + str(self.template_params["gSouth"]) + ")" strInputFileErrors += "\n image upper left = (" + str(sourceParams["gWest"]) + ", " + str(sourceParams["gNorth"]) + ")" strInputFileErrors += "\n image lower right = (" + str(sourceParams["gEast"]) + ", " + str(sourceParams["gSouth"]) + ")" # strInputFileErrors += "\n points are given in projected coordinates." # strInputFileErrors += "\n template upper left = (" + str(self.template_params["tWest"]) + ", " + str(self.template_params["tNorth"]) + ")" # strInputFileErrors += "\n template lower right = (" + str(self.template_params["tEast"]) + ", " + str(self.template_params["tSouth"]) + ")" # strInputFileErrors += "\n image upper left = (" + str(sourceParams["tWest"]) + ", " + str(sourceParams["tNorth"]) + ")" # strInputFileErrors += "\n image lower right = (" + str(sourceParams["tEast"]) + ", " + str(sourceParams["tSouth"]) + ")" # strInputFileErrors += "\n Note: points are given in the template coordinates." + "\n" # if len(row) < 2 or not row[1] in ['0', '1']: self.writetolog(" " + os.path.split(inputFile)[1] + " categorical either missing or not 0 or 1:\n Defaulting to 0 (continuous)") if len(row) < 2: row.append('0') else: row[1] = '0' if len(row) < 3 or not row[2].lower() in [item.lower() for item in self.resample_methods]: self.writetolog(" " + os.path.split(inputFile)[1] + " resample method either missing or not one of " + ", ".join(self.resample_methods) + "\n Defaulting to 'Bilinear'") if row[1] == '0': default = 'Bilinear' else: default = 'NearestNeighbor' if len(row) < 3: row.append(default) else: row[2] = default if len(row) < 4 or not row[3].lower() in [item.lower() for item in self.agg_methods]: self.writetolog(" " + os.path.split(inputFile)[1] + " aggregation method either missing or not one of " + ", ".join(self.agg_methods) + "\n Defaulting to 'Mean'") if row[1] == '0': default = 'Mean' else: default = 'Majority' if len(row) < 4: row.append(default) else: row[3] = default self.inputs.append(row) #also write the output row, reconfigured to our output file fileName = self.getShortName(row[0]) fileName = os.path.abspath(os.path.join(self.out_dir, fileName + ".tif")) outputrow = [fileName] + row[1:4] + [os.path.abspath(row[0]), os.path.abspath(self.out_dir)] output.writerow(outputrow) del output if strInputFileErrors <> "": self.writetolog(strInputFileErrors) raise utilities.TrappedError("There was one or more problems with your input rasters: \n" + strInputFileErrors)
my_port = config.getint(SECTION_NAME, "my_port") session_timeout = config.getint(SECTION_NAME, "session_timeout") except Exception as err: util.oops("Trouble with config file {%s}, reason: {%s}", config_file, repr(err)) # Initialize context try: ctx = SSL.Context(SSL.SSLv23_METHOD) ctx.set_verify(SSL.VERIFY_PEER | SSL.VERIFY_FAIL_IF_NO_PEER_CERT, examine_certificate) # Demand a client certificate ctx.use_certificate_file(my_crt_file) # Provide a server certificate ctx.use_privatekey_file(my_key_file) # My private key ctx.load_verify_locations(ca_crt_file) # I trust this CA ctx.set_timeout(session_timeout) # Set session timeout value util.logger("SSL context initialized") except Exception as err: util.oops( "Could not initialize SSL context, reason: {%s} - see the HELP.txt file", repr(err)) # Set up server try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server = SSL.Connection(ctx, sock) server.bind(("localhost", my_port)) server.listen(3) server.setblocking(0) # non-blocking except Exception as err: util.oops( "Cannot start listening for SSL connections to port {%d}, reason: {%s}",