def indexpattern_generate(start, end, raw=False, allraw=False): """Function to return the proper index pattern for queries to elasticsearch on gracc.opensciencegrid.org. This improves performance by not just using a general index pattern unless absolutely necessary. This will especially help with reports, for example. This function assumes that the date being passed in has been split into a list with [yyyy,mm,dd] format. This gets tested and cleaned up in the called dateparse function. """ if allraw: return 'gracc.osg.raw-*' if not raw and not allraw: # Default return 'gracc.osg.summary' # Raw is True, allraw is False t = TimeUtils() basepattern = 'gracc.osg.raw-' if start.year == end.year: basepattern += '{0}.'.format(str(start.year)) if start.month == end.month: if len(str(start.month)) == 1: add = '0{0}'.format(str(start.month)) else: add = str(start.month) basepattern += '{0}'.format(add) else: basepattern += '*' else: basepattern += '*' return basepattern
def sampleDistanceWithInterval(self): #launching next timer thread to record temp after specified interval self.nextTimer = threading.Timer(self.sampleInterval, self.sampleDistanceWithInterval, ()) self.nextTimer.start() if (self.recordingLoopActive == True and self.storage.hasSpace()): self.isRecording = True #line below ensures that, even when there is an error recording distance, isRecording won't stay on #The pi has 10 seconds to record temperature threading.Timer(self.sampleLength + 15, self.resetIsRecording, ()).start() timestamp = getTimestamp() try: self.logger.log("[DistanceSensor] started recording distance") end_time = time.time() + self.sampleLength while time.time() < end_time: distance = self.readDistance() timestamp = TimeUtils.getTimestamp() output = "%s %f\n" % (timestamp, distance) self.distLogger.appendToFile(output) self.logger.log("[DistacneReaader] recorded distance") except Exception as e: self.logger.logError("DistanceReader", "Error reading distance", e) self.isRecording = False
def makeBackup(self, newFilepath, piName): if self.filepath is None: return #need a lock so no thread writes to file while we are moving it and creating a new one with self.lock: os.rename(os.path.join(self.filepath,self.filename), os.path.join(newFilepath,self.filename.split(".")[0] + \ "_" + TimeUtils.getTimestamp() + "_" + piName + ".txt")) tempFile = open(os.path.join(self.filepath,self.filename), "w+") tempFile.close()
def makeBackup(self, newFilepath, piName): if self.filepath is None: return #need a lock so no thread writes to file while we are moving it and creating a new one with self.lock: os.rename(os.path.join(self.filepath,self.filename), os.path.join(newFilepath,self.filename.split(".")[0] + \ "_" + TimeUtils.getTimestamp() + "_" + piName + ".txt")) tempFile = open(os.path.join(self.filepath, self.filename), "w+") tempFile.close()
def __init__(self, report, config, start, end=None, verbose=False, raw=False, allraw=False, template=None, is_test=False, no_email=False, title=None, logfile=None, logfile_override=False, check_vo=False): TimeUtils.__init__(self) self.header = [] if config: self.config = config.config self.start_time = self.parse_datetime(start) self.end_time = self.parse_datetime(end) self.verbose = verbose self.no_email = no_email self.is_test = is_test self.template = template self.epochrange = None self.indexpattern = self.indexpattern_generate(raw, allraw) self.report_type = report if logfile: self.logfile = self.get_logfile_path(logfile, override=logfile_override) else: self.logfile = 'reports.log' if check_vo: self.__check_vo() self.email_info = self.__get_email_info() self.logger = self.__setupgenLogger() self.client = self.__establish_client()
def __init__(self, config_file, flush_cache=False, verbose=False): self.max_forecasts = [] self.lines_processed = 0 self.delimiter = '\t' with open(config_file) as conf_file: config = json.load(conf_file) self.error_report = ErrorReport() self.time_utils = TimeUtils() self.verbose = verbose self.geo_precision = config['log_processor']['geo_precision'] self.geoip_db = open_database(config['geoip_db']['file']) self.weather_api = OpenWeatherMap(api_key=config['weather_api']['api_key'], units=config['weather_api']['units']) self.weather_cache = WeatherCache(host=config['cache']['host'], port=config['cache']['port'], redis_key_expiry_secs=config['cache']['key_expiry_secs'] ) if flush_cache: self.weather_cache.flush()
def samplePictureWithInterval(self): #setting up the next timer thread self.nextTimer = threading.Timer(self.sampleInterval, self.samplePictureWithInterval, ()) self.nextTimer.start() if (self.recordingLoopActive == True and self.storage.hasSpace()): self.isRecording = True #line below ensures that, even when there is an error taking a picture, isRecording won't stay on #The pi has 15 seconds to take the picture threading.Timer(15, self.resetIsRecording, ()).start() timestamp = TimeUtils.getTimestamp() try: self.takePicture(self.saveFolder, timestamp+'.png') self.logger.log("[PictureReader] Took picture") except Exception as e: self.logger.logError("PictureReader", "Error taking picture", e) self.isRecording = False
def samplePictureWithInterval(self): #setting up the next timer thread self.nextTimer = threading.Timer(self.sampleInterval, self.samplePictureWithInterval, ()) self.nextTimer.start() if (self.recordingLoopActive == True and self.storage.hasSpace()): self.isRecording = True #line below ensures that, even when there is an error taking a picture, isRecording won't stay on #The pi has 15 seconds to take the picture threading.Timer(15, self.resetIsRecording, ()).start() timestamp = TimeUtils.getTimestamp() try: self.takePicture(self.saveFolder, timestamp + '.png') self.logger.log("[PictureReader] Took picture") except Exception as e: self.logger.logError("PictureReader", "Error taking picture", e) self.isRecording = False
def sampleTemperatureWithInterval(self): #launching next timer thread to record temp after specified interval self.nextTimer = threading.Timer(self.sampleInterval, self.sampleTemperatureWithInterval, ()) self.nextTimer.start() if(self.recordingLoopActive == True and self.storage.hasSpace()): self.isRecording = True #line below ensures that, even when there is an error recording temperature, isRecording won't stay on #The pi has 10 seconds to record temperature threading.Timer(10, self.resetIsRecording, ()).start() try: temperature = self.readTemperature() timestamp = TimeUtils.getTimestamp() output = "%s %s\n" % (timestamp, temperature) #adding temperature to temperature file self.tempLogger.appendToFile(output) self.logger.log("[TemperatureReader] Recorded temperature") except Exception as e: self.logger.logError("TemperatureReader", "Error reading temperature", e) self.isRecording = False
def log(self, text): #logText is printed to console and file logText = TimeUtils.getTimestamp() + " " + text print logText if self.filepath is not None: self.appendToFile(logText + "\n")
def initial_setup(): time_utils = TimeUtils() logging.basicConfig(filename=("log_" + str(time_utils.get_time())), level=logging.INFO)
class WeatherLogProcessor(object): """Used for reading, processing a tsv log file """ def __init__(self, config_file, flush_cache=False, verbose=False): self.max_forecasts = [] self.lines_processed = 0 self.delimiter = '\t' with open(config_file) as conf_file: config = json.load(conf_file) self.error_report = ErrorReport() self.time_utils = TimeUtils() self.verbose = verbose self.geo_precision = config['log_processor']['geo_precision'] self.geoip_db = open_database(config['geoip_db']['file']) self.weather_api = OpenWeatherMap(api_key=config['weather_api']['api_key'], units=config['weather_api']['units']) self.weather_cache = WeatherCache(host=config['cache']['host'], port=config['cache']['port'], redis_key_expiry_secs=config['cache']['key_expiry_secs'] ) if flush_cache: self.weather_cache.flush() def _round_to(self, val): """Returns a value rounded to the precision value""" correction = 0.5 if val >= 0 else -0.5 return int(val/self.geo_precision+correction) * self.geo_precision def get_tomorrows_max(self, latitude, longitude): """Returns the forecast max for tomorrow given latitude and longitude""" start_of_tomorrow = self.time_utils.get_start_of_tomorrow_utc() end_of_tomorrow = self.time_utils.get_end_of_tomorrow_utc() temperature = self.weather_cache.get_forecast_max(latitude=latitude, longitude=longitude, forecast_date=start_of_tomorrow) if temperature: if self.verbose: print("retrieved from cache") else: temperature = self.weather_api.get_geo_max_temperature(latitude=latitude, longitude=longitude, start=start_of_tomorrow, end=end_of_tomorrow) if self.verbose: print("retrieved from api call") self.weather_cache.set_forecast_max(latitude=latitude, longitude=longitude, forecast_date=start_of_tomorrow, temperature=temperature) return temperature def process_tsv(self, input_file): """Loops through values in tsv looking in forecast max in cache first Then does a api call if value is not in cache""" with open(input_file) as tsvfile: reader = csv.reader(tsvfile, delimiter=self.delimiter) line_number = 0 for row in reader: line_number = line_number + 1 try: temperature = None ip_address = row[23] geoip_result = self.geoip_db.get(ip_address) if geoip_result is None: raise Exception("No entry found in geodb") else: latitude = self._round_to(val=geoip_result['location']['latitude']) longitude = self._round_to(val=geoip_result['location']['longitude']) temperature = self.get_tomorrows_max(latitude=latitude, longitude=longitude) self.max_forecasts.append(temperature) except Exception as e: self.error_report.add_error(error_str=str(e), line_number=line_number) if len(self.max_forecasts) == 0: raise Exception("No forecasts returned") self.lines_processed = line_number self.max_forecasts = sorted(self.max_forecasts) def create_histogram_tsv(self, number_of_buckets, output_file): """ Creates the histogram tsv file once all max temperatures is created""" if len(self.max_forecasts) < number_of_buckets: number_of_buckets = len(self.max_forecasts) bucket_size = (self.max_forecasts[-1] - self.max_forecasts[0]) / number_of_buckets row_min = self.max_forecasts[0] row_max = self.max_forecasts[0] + bucket_size count = 0 with open(output_file, 'w') as file: filewriter = csv.writer(file, delimiter=self.delimiter, quoting=csv.QUOTE_MINIMAL, quotechar='|' ) filewriter.writerow(['bucketMin', 'bucketMax', 'count']) for forecast in self.max_forecasts: rounded_row_min = round(row_min, 2) rounded_row_max = round(row_max, 2) if forecast <= rounded_row_max: count = count + 1 else: filewriter.writerow([rounded_row_min, rounded_row_max, count]) count = 1 row_min = row_max row_max = row_max + bucket_size # prints the last row filewriter.writerow([rounded_row_min, rounded_row_max, count]) def print_error_report(self): errors = self.error_report.get_errors() if errors: print("ERROR REPORT:") for key, error in errors.items(): print("ERROR TYPE: {}".format(key)) print("count: {} out of {} lines processed".format(error['count'], self.lines_processed)) print("error happened on lines: {}".format(error['lines'])) print("") else: print("No errors found.") def close(self): self.geoip_db.close()
def run(self, x): time.sleep(20) print self.n + 10 + x if __name__ == '__main__': # print FileUtils.countRow('/home/deadend/code/java/basic/') # t = Task(2) # exe = TimeLimitExecutor(10, t.run, args=(3, )) # exe.execute() # FileUtils.backup('/tmp/zzc/', '/tmp/zzc_bak', '*2016.txt') # print TimeUtils.tsp2time(1472478350916) print TimeUtils.timedelta('20160901', 3) print TimeUtils.timedelta('201609', 5) # FileUtils.addRowIndex('/tmp/files', '|', 10000) # print FileUtils.countFilesRow('/tmp/zzc') # print FileUtils.countFilesRow('/tmp/zzc/a') # print FileUtils.merge('/tmp/zzc', 'abc', 'test*') # print FileUtils.split('/tmp/zzc/abc', 1024, 'test_', '.txt', 4) # 需要在hiveloader根目录执行 # FileUtils.remove('/tmp/zzc', "b*") # FileUtils.rmHiddenFile('/tmp/zzc/') # print FileUtils.addExtension('/tmp/zzc/', '.txt', 'test*')
def archiveCheck(self): if self.keepArchiving: self.isProcessing = True #checking if there are enough files to archive shouldArchiveSound = self.shouldArchive(self.soundDirectory) shouldArchivePictures = self.shouldArchive(self.pictureDirectory) #if there aren't enough files to archive, we do nothing if not shouldArchiveSound and not shouldArchivePictures: self.nextTimer = threading.Timer(self.checkInterval, self.archiveCheck, ()) self.nextTimer.start() self.logger.log("[ArchiveManager] not enough files to archive, exiting") self.isProcessing = False return #if there are enough files, we ask recordingManager to stop recording threads #if it wasn't able to stop the recording threads, we enter this if statement, and do nothing because #we couldn't stop the recording threads if not self.recordingManager.stopRecording(): self.nextTimer = threading.Timer(self.checkInterval, self.archiveCheck, ()) self.nextTimer.start() self.logger.log("[ArchiveManager] could not stop recordings, exiting") self.recordingManager.startRecording() self.isProcessing = False return #if there are enough sound files to archive, we enter this if block and attempt to archive the sound files if shouldArchiveSound: self.logger.log("[ArchiveManager] Attempting to archive sound") try: self.logger.log("[ArchiveManager] Making sound archive") #if makeArchive fails, it will return non 0 value and we will not delete old sound files if self.makeArchive(self.soundDirectoryName, self.soundDirectoryName + '_'+ TimeUtils.getTimestamp() + '_' +self.piName) == 0: self.logger.log("[ArchiveManager] Removing old sound files") #tries to remove all .wav and .mp3 files from the folder try: os.system("sudo rm " + self.soundDirectory + "*.wav") except Exception as e: self.logger.logError("ArchiveManager", "Error removing .wav files", e) try: os.system("sudo rm " + self.soundDirectory + "*.mp3") except Exception as e: self.logger.logError("ArchiveManager", "Error removing .mp3 files", e) else: self.logger.log("[ArchiveManager] Sound archive was not created, exit code was not 0") except Exception as e: self.logger.logError("ArchiveManager", "Error making sound archive", e) #if there are enough picture files to archive, we enter this if block and attempt to archive the picture files if shouldArchivePictures: self.logger.log("[ArchiveManager] Attempting to archive photos") try: self.logger.log("[ArchiveManager] Making photo archive") #if makeArchive fails, it will return non 0 value and we won't delete old photos if self.makeArchive(self.pictureDirectoryName, self.pictureDirectoryName+'_'+ TimeUtils.getTimestamp()+'_'+self.piName) == 0: self.logger.log("[ArchiveManager] Removing old picture files") #removes all .png files try: os.system("sudo rm " + self.pictureDirectory+"*.png") except Exception as e: self.logger.logError("ArchiveManager", "Error removing .png files", e) else: self.logger.log("Photo archive was not created, exit code was not 0") except Exception as e: self.logger.logError("ArchiveManager", "Error making picture archive", e) #we are done archiving files, so we tell the recordingManager to start recording again self.recordingManager.startRecording() #set isProcessing to False to show that we are done archiving self.isProcessing = False #launches next archive timer thread self.nextTimer = threading.Timer(self.checkInterval, self.archiveCheck, ()) self.nextTimer.start()
config.write(cfgFileNew) cfgFileNew.close() print "Autostart was set to on successfully!" elif decision is "2": config.set("General", "auto_start", "off") cfgFileNew = open(local_file_path + 'config', "w") config.write(cfgFileNew) cfgFileNew.close() print "Autostart was set to off successfully!" else: print "input not recognized!" elif arg == "-ac" or arg == "-au": piName = config.get("General", "pi_name") archiveSaveSoundName = config.get( "Saving", "sound_save_folder_name") + "_" + TimeUtils.getTimestamp( ) + "_" + piName + "_FORCED" archiveSavePhotoName = config.get( "Saving", "photo_save_folder_name") + "_" + TimeUtils.getTimestamp( ) + "_" + piName + "_FORCED" archiveSaveDir = os.path.join( config.get("Saving", "base_data_directory"), config.get("Saving", "backup_save_folder_name"), "") archiveSaveSoundPath = os.path.join(archiveSaveDir, archiveSaveSoundName) archiveSavePhotoPath = os.path.join(archiveSaveDir, archiveSavePhotoName) archiveSoundReadDir = os.path.join( config.get("Saving", "base_data_directory"),
cfgFileNew = open(local_file_path + 'config', "w") config.write(cfgFileNew) cfgFileNew.close() print "Autostart was set to on successfully!" elif decision is "2": config.set("General", "auto_start", "off") cfgFileNew = open(local_file_path + 'config', "w") config.write(cfgFileNew) cfgFileNew.close() print "Autostart was set to off successfully!" else: print "input not recognized!" elif arg == "-ac" or arg == "-au": piName = config.get("General", "pi_name") archiveSaveSoundName = config.get("Saving", "sound_save_folder_name") + "_" + TimeUtils.getTimestamp() + "_" + piName + "_FORCED" archiveSavePhotoName = config.get("Saving", "photo_save_folder_name") + "_" + TimeUtils.getTimestamp() + "_" + piName + "_FORCED" archiveSaveDir = os.path.join(config.get("Saving", "base_data_directory"),config.get("Saving", "backup_save_folder_name"),"") archiveSaveSoundPath = os.path.join(archiveSaveDir, archiveSaveSoundName) archiveSavePhotoPath = os.path.join(archiveSaveDir, archiveSavePhotoName) archiveSoundReadDir = os.path.join(config.get("Saving", "base_data_directory"), config.get("Saving", "sound_save_folder_name"), "") archivePhotoReadDir = os.path.join(config.get("Saving", "base_data_directory"), config.get("Saving", "photo_save_folder_name"), "") if arg == "-ac": ret = subprocess.call(["sudo", "tar", "-cvzf", archiveSaveSoundPath+".tar.gz","-C", archiveSoundReadDir, "."]) if ret == 0: os.system("sudo rm " + archiveSoundReadDir + "*") ret = subprocess.call(["sudo", "tar", "-cvzf", archiveSavePhotoPath+".tar.gz","-C", archivePhotoReadDir, "."])