def psutil_process(communication_queue, worker_id, worker_parameters): logging.basicConfig( filename=base_path + '/log/agent.log', level=logging.DEBUG, format= '%(asctime)s.%(msecs)d %(levelname)s %(module)s - %(funcName)s: %(message)s', datefmt="%Y-%m-%d %H:%M:%S") tick = worker_parameters["tick"] warning = {} if tick > 0: tick = my_frequency(tick) beat.set_rate(tick) while beat.true(): for proc in psutil.process_iter(): try: pinfo = proc.as_dict(attrs=['pid', 'name']) pinfo = base64.b64encode(json.dumps(pinfo)) except psutil.NoSuchProcess: pass else: message = { "agent": host_name, "function": "process", "value": pinfo } communication_queue.put(json.dumps(message)) beat.sleep()
def ufhloop(): beat.set_rate(.1) while beat.true(): schedule.run_pending() xrf.receive() # for d in iter(W1.devices): # d.read() output.relays() # calls room.control and outputs changes. # xiv.send() state.save() try: beat.sleep() except: print "no time to sleep"
def capture(directory): if not os.path.exists(directory): os.makedirs(directory) print "Screenshotting every %d seconds..." % screenshot_rate beat.set_rate(1.0 / screenshot_rate) index = 0 while beat.true(): try: now = datetime.now() timestamp = datetime.strftime(now, timestamp_format) filename = "%08d.jpg" % index path = os.path.join(directory, filename) subprocess.call(["scrot", "-q", str(scrot_quality), path]) annotate_image(directory, filename, timestamp) index += 1 beat.sleep() except KeyboardInterrupt: print "Encoding..." encode(directory) sys.exit(0)
def capture(directory): if not os.path.exists(directory): os.makedirs(directory) print "Screenshotting every %d seconds..." % screenshot_rate beat.set_rate(1.0/screenshot_rate) index = 0 while beat.true(): try: now = datetime.now() timestamp = datetime.strftime(now, timestamp_format) filename = "%08d.jpg" % index path = os.path.join(directory, filename) subprocess.call(["scrot", "-q", str(scrot_quality), path]) annotate_image(directory, filename, timestamp) index += 1 beat.sleep() except KeyboardInterrupt: print "Encoding..." encode(directory) sys.exit(0)
def psutil_cpu_count(communication_queue, worker_id, worker_parameters): logging.basicConfig( filename=base_path + '/log/agent.log', level=logging.DEBUG, format= '%(asctime)s.%(msecs)d %(levelname)s %(module)s - %(funcName)s: %(message)s', datefmt="%Y-%m-%d %H:%M:%S") tick = worker_parameters["tick"] warning = {} warning["cpu_count"] = 0 warning["cpu_count_logical"] = 0 if tick > 0: beat.set_rate(my_frequency(tick)) while beat.true(): #Physical CPU try: pinfo = psutil.cpu_count(logical=False) if int(pinfo < worker_parameters["min_cpu"]): message = { "agent": host_name, "function": "cpu_count", "value": pinfo } communication_queue.put(json.dumps(message)) warning["cpu_count"] = 1 beat.set_rate(my_frequency( worker_parameters["tick_error"])) else: if warning["cpu_count"] == 1: logging.debug("in solved cpu_count") warning["cpu_count"] = 0 message = { "agent": host_name, "function": "cpu_count", "value": "solved" } communication_queue.put(json.dumps(message)) except: logging.error( "Unexpected error in psutil_cpu_count - cpu_count: %s" % (sys.exc_info()[0])) pass #Logical CPU try: pinfo = psutil.cpu_count(logical=True) if int(pinfo < worker_parameters["min_logical_cpu"]): message = { "agent": host_name, "function": "cpu_count_logical", "value": pinfo } communication_queue.put(json.dumps(message)) warning["cpu_count_logical"] = 1 beat.set_rate(my_frequency( worker_parameters["tick_error"])) else: if warning["cpu_count_logical"] == 1: logging.debug("in solved cpu_count_logical") warning["cpu_count_logical"] = 0 message = { "agent": host_name, "function": "cpu_count_logical", "value": "solved" } communication_queue.put(json.dumps(message)) except: logging.error( "Unexpected error in psutil_cpu_count - cpu_count_logical: %s" % (sys.exc_info()[0])) pass beat.sleep()
logger = logging.getLogger(__name__) logger.debug(opts) # Data storage sqlcon = sqlite3.connect('/var/lib/temperatur/temperatur.db') i2c = SMBus(1) thermo_luft = thermometer(i2caddr=0x19, name='Luft vorne') thermo_vorlauf = thermometer(i2caddr=0x18, name='Vorlauf') thermo_ruecklauf = thermometer(i2caddr=0x1e, name='Ruecklauf') beat.set_rate(1.0/60) while beat.true(): temp_luft = thermo_luft.get_temp() temp_vorlauf = thermo_vorlauf.get_temp() temp_ruecklauf = thermo_ruecklauf.get_temp() temps = [1, temp_vorlauf, temp_ruecklauf, temp_luft, None, None, None] temps[0] = datetime.datetime.now() logger.debug("Write into db: {0}".format(temps)) # convert (back) to tuple for sqlite3 ttemps = tuple(temps) # Table: # cur.execute("CREATE TABLE Temperatur(Timestamp INT, TempVorlauf REAL, TempRuecklauf REAL, TempVorne REAL, TempHinten REAL, TempBoden REAL, TempLuft REAL)") with sqlcon:
def run(self): while beat.true(): self.do_speedtest() beat.sleep()
consecFrames = 0 recFrames = 0 lastUploaded = datetime.datetime.now() motionCounter = 0 cv2.namedWindow("Security Feed") cv2.namedWindow("ctrl", cv2.WINDOW_NORMAL) cv2.createTrackbar('1:Exit app', "ctrl", 0, 1, quit) cv2.resizeWindow("ctrl", 300, 100) cv2.moveWindow("ctrl", 500, 35) cv2.moveWindow("Security Feed", 1024, 1024) # capture frames from the camera lastUploaded = datetime.datetime.now() while beat.true(): loopstarttime = datetime.datetime.now() ret, frame = cap.read() if not ret: continue timestamp = datetime.datetime.now() text = "Unoccupied" # resize the frame, convert it to grayscale, and blur it frame = imutils.resize(frame, width=500) # draw the text and timestamp on the frame ts = timestamp.strftime("%A %d %B %Y %H:%M:%S") cv2.putText(frame, ts, (10, frame.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX,
def downloader(): print("\n\n\n= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = ") print(" Option 1: Download data from UN ComTrade ") print("= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = ") print("\nFor this option, you will have to:\n\ Specify the level of aggregation wanted for the commodity data\n\ Specify the first and last year to be downloaded") print("\n= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = ") # Choose the aggregate level: 2-digit/4-digit/6-digit valid_choices = ["AG2", "AG4", "AG6"] AG = None while AG not in valid_choices: AG = raw_input("Choose the levels of aggregation for commodities [AG2, AG4, AG6]: ") # Choose the year: valid_years = range(1962,2014) # gives [1962-2013] year_s = None while year_s not in valid_years: year_s = int(raw_input("Choose the first year to be downloaded [1962-2013]: ")) year_e = None while year_e not in valid_years: year_e = int(raw_input("Choose the last year to be downloaded [1962-2013]: ")) for year in range(year_s, year_e+1): dl_path = os.path.join('data','dl',AG) # where the data from ComTrade will be downloaded if not os.path.exists(dl_path): os.makedirs(dl_path) add_res = os.path.join('data','add_res') # where the additional files needed are stored ctrycodes = pd.read_excel(os.path.join(add_res,'country_iso_codes.xls')) ctryrx = pd.read_csv(os.path.join(add_res,'country_list_redux.csv'), sep='\t') ctryrx = pd.merge(ctryrx, ctrycodes, how='left', left_on='iso3', right_on='ISO3-digit Alpha') ctrys = ctryrx.loc[ctryrx['End Valid Year'] > 2009] ctrys = ctrys[['country', 'iso3', 'ctyCode']].drop_duplicates() error_list = [] i = 0 beat.set_rate(0.027) # 100req/hour = 0.027req/s * 3600s/h while beat.true(): try: ctry = ctrys.iloc[i] except: print '\nDownload of %d files completed' % i break print '\ndownloading', year, ctry['country'], '...' myfn = os.path.join(dl_path,"comtrade_EXtoWorld_%s_%s.csv" % (str(ctry['iso3']), str(year))) if (os.path.exists(myfn) == True): i += 1 continue print 'Saving file in', myfn, '...' ctry_code = ctry['ctyCode'] file_url = 'http://comtrade.un.org/api/get?max=50000&type=C&freq=A&cc=%s&px=HS&ps=%s&r=%s&p=0&rg=2&fmt=csv' % (str(AG), year, str(ctry_code)) try: file_name = wget.download(file_url, out = myfn) except: print 'error for ', ctry['country'] error_list[ctry_code] i += 1 beat.sleep() # Redownload instantly the files with errors print 'Check for errors', '...' i = 0 j = 0 beat.set_rate(0.027) # 100req/hour = 0.027req/s * 3600s/h while beat.true(): try: ctry = ctrys.iloc[i] except: print '\nRedownload of %d files completed' % i break myfn = os.path.join(dl_path,"comtrade_EXtoWorld_%s_%s.csv" % (str(ctry['iso3']), str(year))) size = os.path.getsize(myfn) if not (size == 36): i += 1 j = i - 1 continue print '\nReplacing', year, ctry['country'], '...' os.remove(myfn) print 'Saving file in', myfn, '...' ctry_code = ctry['ctyCode'] file_url = 'http://comtrade.un.org/api/get?max=50000&type=C&freq=A&cc=%s&px=HS&ps=%s&r=%s&p=0&rg=2&fmt=csv' % (str(AG), year, str(ctry_code)) try: file_name = wget.download(file_url, out = myfn) except: print 'error for ', ctry['country'] size = os.path.getsize(myfn) if (size == 36): i -= 1 i += 1 beat.sleep() #Cleaning the downloads # Get all files. list = os.listdir(dl_path) redo_list = [] filename = [] cat1 = [] cat2 = [] cat3 = [] cat4 = [] for file in list: location = os.path.join(dl_path, file) size = os.path.getsize(location) if size < 1000: if pd.read_csv(location).iloc[0,0] == 'No data matches your query or your query is too complex. Request JSON or XML format for more information.': redo_list.append(file) filename.append(file[:-4]) cat1.append(file[:-4].split('_')[0]) cat2.append(file[:-4].split('_')[1]) cat3.append(file[:-4].split('_')[2]) cat4.append(file[:-4].split('_')[3]) os.remove(location) deleted = pd.DataFrame(filename, columns=['filename']) deleted['source'] = cat1 deleted['type'] = cat2 deleted['country'] = cat3 deleted['year'] = cat4 # Save a report of the deleted files print("\nThere were %s empty files. They have been deleted automatically" % len(redo_list)) fname = 'DeletedFiles_%s_%s.csv' % (str(AG),str(cat4[0])) # Check the folder exists dest = os.path.join('data','dl','dl_reports') if not os.path.exists(dest): os.makedirs(dest) # Save the file fdest = os.path.join(dest,fname) deleted.to_csv(fdest, sep='\t', index=False) print '\nSaving files in', fdest, '...' print("\nThe report DeletedFiles_%s.csv contains the information on the files that were empty and have been deleted.") % str(cat4[0]) print '\nOperation complete.' raw_input("\nPress Enter to continue...")