def __init__(self): self.home = utils.get_home() try: os.stat(self.home) except: try: os.mkdir(self.home) except: raise DBError, "error creating directories: "+self.home self._storeDir = os.path.join(self.home, "xapian_store") self.needs_index = False try: os.stat(os.path.join(self._storeDir,"NEEDSREINDEX")) #if that exists, we need to reindex self.needs_index = True except: pass if self.needs_index: try: os.remove(os.path.join(self._storeDir,"NEEDSREINDEX")) except: logging.error("Error removing NEEDSREINDEX... check permisions inside %s" % self.home) if not os.path.exists(self._storeDir): os.mkdir(self._storeDir) self.needs_index = True self._quitting = False self._indexing = False
def set_media_dir(self, new_dir): """sets new media dir. returns None, None on success, and returns new dir name if db and player need to be remapped to new dirs""" old_dir = self._media_dir if new_dir == old_dir: return None, None std_loc = os.path.join(utils.get_home(), 'media') #stat new folder if not os.access(new_dir, os.F_OK & os.R_OK & os.W_OK & os.X_OK): raise NoDir, "insufficient permissions to access %s" % new_dir try: os.symlink HAVE_SYMLINK = True except: HAVE_SYMLINK = False if HAVE_SYMLINK: if old_dir == std_loc: self._move_contents(std_loc, new_dir) self._media_dir = new_dir if os.path.islink(std_loc): os.remove(std_loc) os.symlink(new_dir, std_loc) else: os.rmdir(std_loc) os.symlink(new_dir, std_loc) return old_dir, std_loc elif new_dir == std_loc: self._media_dir = std_loc if os.path.islink(std_loc): os.remove(std_loc) self._move_contents(old_dir, std_loc) else: os.rmdir(std_loc) os.mkdir(std_loc) self._move_contents(old_dir, std_loc) return old_dir, std_loc else: self._move_contents(old_dir, new_dir) self._media_dir = new_dir if os.path.islink(std_loc): os.remove(std_loc) os.symlink(new_dir, std_loc) else: os.rmdir(std_loc) os.symlink(new_dir, std_loc) return old_dir, std_loc else: self._move_contents(old_dir, new_dir) self._media_dir = new_dir return old_dir, new_dir return None, None
def main(): # Expected to be called once per minute main_call_epoch = utils.epoch_now() log.info(" - Starting on " + socket.gethostname() + " ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾") temp = 15 # default value for later calculation of speed of sound first_remote = True local_camera_name = None conn = db_module.get_conn() curs = conn.cursor() # name | priority | sensor_label | decimals | cumulative | unit | consolidated | sensor_type read_sensors_query = \ "SELECT name, sensor_label, decimals, cumulative, unit," \ " consolidated, sensor_type, filepath_last, filepath_data " \ "FROM sensors " \ " LEFT JOIN captures ON sensors.name = captures.sensor_name; " curs.execute(read_sensors_query) sensors = curs.fetchall() values = [] for sensor in sensors: (sensor_name, sensor_label, decimals, cumulative, unit, consolidated, sensor_type, filepath_last, filepath_data) = sensor sensor_name = sensor_name.decode('ascii') measure = None # Below ifs to be replaced by function blocks and dictionary as described # at https://stackoverflow.com/questions/11479816/what-is-the-python-equivalent-for-a-case-switch-statement if sensor_type == "ignored": log.info("Sensor '" + sensor_name + "' -> -ignoring-") # measure = None # kept as None elif sensor_type == "CPU_temp": measure = func.value_cpu_temp() # start_cpu_fan = __import__("start_cpu_fan") # if measure > 40: # start_cpu_fan.start_cpu_fan() # if measure < 20: # start_cpu_fan.stop_cpu_fan() elif sensor_type == "temperature": temp = func.value_ext_temperature() measure = temp elif sensor_type == "pressure": measure = func.value_sealevelpressure() elif sensor_type == "luminosity": measure = func.value_luminosity() elif sensor_type == "distance": # Calculate speed (celerity) of sound: measure = hc_sr04_lib_test.measure_distance(temp) elif sensor_type == "camera": local_camera_name = sensor_name elif sensor_type.startswith("remote:"): if first_remote: sleep( 5 ) # give time for very last value of remote sensors to be updated first_remote = False # Another remote PostgreSQL contains the measures. Those not "synchronised" will be copied # if having ~Connection refused~~port 5432?~ issues then # -> On remote server, in file postgresql.conf, set "listen_addresses = '*'" # # -> in file pg_hba.conf, add "host meteo pi 192.168.0.94/32 trust" # (those 2 configuration files are usually in /etc/postgresql/11/main/ ) # fixme the upper "trust" is not secured and should look for a decent unix authentication later... remote_server = sensor_type[7:] log.info("Sensor '" + sensor_name + "' -> reading values from " + remote_server + "...") if unit == "picture": rsync_pictures_from_server(sensor, remote_server, conn) else: copy_values_from_server(sensor, remote_server, conn) else: log.info("Sensor '" + sensor_name + "' -> ERROR! Unable to interpret '" + str(sensor_type) + "' as a sensor type! Skipped...") # measure = None # kept as None if measure is not None: log.info("Sensor '" + sensor_name + "' -> " + str(measure)) values.append("(" \ + str(utils.epoch_now()) + "," \ + str(func.round_value_decimals(measure, decimals)) + ", '" \ + sensor_name + "')") else: log.info("Sensor '" + sensor_name + "' -> No value") # end of for-loop on each sensor # Add values to database # TODO manage case if 'values' is empty sql_insert = "INSERT INTO raw_measures(epochtimestamp, measure, sensor) VALUES " \ + ",".join(values) + ";" log.info(str(sql_insert)) try: curs.execute(sql_insert) except Exception as err: log.error( "An Error occurred when trying to execute the upper request!") log.error(err) failed_request.append(sql_insert) finally: conn.commit() if local_camera_name is not None: is_camera_mult = is_multiple(main_call_epoch, 900) # is True every 900 s / 15 min if is_camera_mult: log.info("Once every 15 minutes: Capture picture") picture_name = func.take_picture(local_camera_name) sql_update = \ "UPDATE captures " + \ "SET filepath_last = '" + picture_name + "' " data_limit = 512000 # 500 kiB. Below this value, image is considered without data (mostly black) destination_file = utils.get_home( ) + "/meteo/captures/" + picture_name if pathlib.Path(destination_file).stat().st_size > data_limit: sql_update = sql_update + " , filepath_data = '" + picture_name + "' " sql_update = sql_update + \ "WHERE sensor_name = '" + local_camera_name + "';" # try: # log.info(sql_update) # for debugging... result = curs.execute(sql_update) # except Exception as err: if result == 0: sql_insert = \ "INSERT INTO captures (sensor_name, filepath_last, filepath_data) " + \ "VALUES ('" + local_camera_name + "', '" + picture_name + "', '" + picture_name + "')" # log.info(sql_insert) # for debugging... curs.execute(sql_insert) log.info("\tUpdated value for camera " + local_camera_name + "; committing...") conn.commit() failed_request.fix_previously_failed_requests(conn) if CONSOLIDATE_VAL: for sensor in sensors: (sensor_name, decimals, consolidated, sensor_type) = sensor period = int(consolidated) sql_req = "SELECT MAX(epochtimestamp) FROM raw_measures WHERE sensor = '" + sensor_name + "';" curs.execute(sql_req) max_epoch_from_raw = curs.fetchall()[0][0] sql_req = "SELECT MAX(maxepochtime) FROM consolidated_measures WHERE sensor = '" + sensor_name + "';" curs.execute(sql_req) max_epoch_from_consolidated = curs.fetchall()[0][0] if (max_epoch_from_consolidated is None) or ( max_epoch_from_consolidated + period) < max_epoch_from_raw: consolidate_from_raw(curs, sensor, period) # log.debug("closing cursor...") curs.close() # Close DB # log.debug("closing db...") conn.close() is_daily_run = is_multiple(main_call_epoch, 86400) # 60x60x24 s = 1 day if is_daily_run: log.info("Midnight run: trigger the pictures sorting...") # launch_daily_jobs(main_call_epoch) else: log.debug("(not midnight run)") log.info(utils.iso_timestamp_now() + " - Terminates " + "_" * 47)
def rsync_pictures_from_server(local_sensor, remote_server_src, conn_local_dest): (sensor_name, sensor_label_dest, decimals_dest, cumulative_dest, unit_dest, consolidated_dest, sensor_type_dest, filepath_last_local, filepath_data_local) = local_sensor sensor_name = sensor_name.decode('ascii') log.info("\tpicture sensor '" + sensor_name + "'") try: conn_remote_src = db_module.get_conn( host=remote_server_src) # Connect to REMOTE PostgreSQL DB except Exception as err: log.error("\tException: {0}".format(err)) return curs_src = conn_remote_src.cursor() read_filepath_query = "SELECT filepath_last, filepath_data" \ " FROM captures" \ " WHERE sensor_name='" + sensor_name + "';" # log.debug("\tread_filepath_query = '" + read_filepath_query + "'") # for debugging purpose curs_src.execute(read_filepath_query) (filepath_last_src, filepath_data_src) = curs_src.fetchall()[0] log.debug("\t(..._last_src , ..._data_src ) = (\t'" + filepath_last_src + "',\t'" + filepath_data_src + "')") log.debug("\t(..._last_local, ..._data_local) = (\t'" + filepath_last_local + "',\t'" + filepath_data_local + "')") if filepath_last_src == filepath_last_local: log.info("\tNo new picture detected for '" + sensor_name + "'. rsync not required.") return # else: # filepath_last_src != filepath_last_local: log.info("\tNew picture has been detected. Starting copying from '" + remote_server_src + "' to local...") config = utils.get_config() rsync_user = config.get('remote:' + remote_server_src, 'rsync_user', fallback="web") ssh_port = config.getint('remote:' + remote_server_src, 'ssh_port', fallback=22) # rsync connection relies on ssh connection. No password authentication is implemented here. # Authentication is done by keys: # Public key ~/.ssh/id_rsa.pub from local user should be added into ~/.ssh/authorized_keys of remote source. # remote_server_src_regex = "\"[r]sync(.*)" + remote_server_src.replace(".", "\.") + "\"" # rsync_already_running = subprocess.call(["ps", "-ef", "|", "grep", "-E", remote_server_src_regex], shell=True) # fixme Always return 0. To be fixed... # log.debug("\t" + str(rsync_already_running) + " <-- 'ps -ef | grep -E " + remote_server_src_regex + "'") # if rsync_already_running == 0: # log.info("\tAnother rsync process is detected still running for regex " + remote_server_src_regex # + " ('ps|grep' returned code " + str(rsync_already_running) + ").") # return log.info("\tStarting file copy process (scp)...") destination_file = utils.get_home( ) + "/meteo/captures/" + filepath_last_src create_folders_if_required(destination_file) command = [ "scp", "-P", str(ssh_port), rsync_user + "@" + remote_server_src + ":/home/pi/meteo/captures/" + filepath_last_src, destination_file ] cp_return_code = subprocess.call(command) log.info("\tscp terminated with return code " + str(cp_return_code)) if cp_return_code == 0: log.info("\tUpdating local db with values from remote...") curs_dest = conn_local_dest.cursor() update_last_pictures_values = "UPDATE captures" \ " SET filepath_last = '" + filepath_last_src + "'," \ " filepath_data = '" + filepath_data_src + "'" \ " WHERE sensor_name='" + sensor_name + "';" curs_dest.execute(update_last_pictures_values) conn_local_dest.commit() log.info("\tLocal db updated with ('" + filepath_last_src + "', '" + filepath_data_src + "').") else: log.info("\tAn error occured, command was: " + str(command)) # For more details, add verbosity to command with option '-v' # Return codes can be seen there: https://support.microfocus.com/kb/doc.php?id=7021696 log.info("\tStarting rsync process..." ) # if remote remained offline, previous pictures may miss... rsync_return_code = subprocess.call([ "rsync", "--recursive", # -[r]avz "--archive", # -r[a]vz "--verbose", # -ra[v]z "--compress", # -rav[z] "--size-only", "--perms", # preserve permissions "--rsh", "ssh -p " + str(ssh_port), # "--time-limit", "1", # not working on some distros (exemple: Synology NAS) "--timeout", "60", # if network is not good, we prefer exit quickly and let next execution finishing. rsync_user + "@" + remote_server_src + ":/home/pi/meteo/captures/" + sensor_name + "/", utils.get_home() + "/meteo/captures/" + sensor_name + "/" ]) if rsync_return_code != 0: log.error("\trsync returned code '" + str(rsync_return_code) + "' different from success '0'...") else: log.info("\tLocal and remote folder are successfully synchronised.") # TODO set new values 'folder_synchronised' in table 'captures' return
import failed_request import sensors_functions as func from time import sleep import utils import hc_sr04_lib_test import home_web.db_module as db_module # path_to_pydevd = "home/pi/.local/bin/pydevd" # found by 'find / -name pydevd' # sys.path.append(path_to_pydevd) # debugger_ip_address = "192.168.0.63" # pydevd.settrace(debugger_ip_address, port=5678) import logging logging.basicConfig(filename=utils.get_home() + "/meteo/logfile.log", level=logging.DEBUG, format='%(asctime)s\t%(levelname)s\t%(name)s\t%(message)s') log = logging.getLogger("periodical_sensor_reading.py") CONSOLIDATE_VAL = False def is_multiple(value, multiple): margin = 30 # 30 s is used as default (script should be launched once per minute) if (int(value / margin) % (multiple / margin)) == 0: return True else: return False
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import sys from math import pi import home_web.db_module as db_module import logging import sensors_functions import utils logging.basicConfig( filename=utils.get_home() + "/meteo/logfile.log", level=logging.DEBUG, format='%(asctime)s\t%(levelname)s\t%(name)s\t%(message)s') log = logging.getLogger("hc_sr04_lib_test.py") # todo Below variables should be stored in config file ~/.config/meteo.conf TRIGGER_PIN = 19 ECHO_PIN = 13 MAX_DISTANCE = 400 # Get really noisy over 3~4 meters UNIT = u'cm' def volume_water_tank(distance_cm): """Return volume in Litre from the height in centimeter, for a circular/conical water tank. Sensor measures the distance from a certain height to the surface of water, looking down.""" # Constants for the tank: r_max = 6.5 # Internal radius at very top of the tank, in dm d_min = 1.5 # Distance from the sensor to the very top of the tank, in dm r_base = 5.2 # Internal radius at the very bottom of the tank (base), in dm
def post_show_init(self): self._html_widget.post_show_init(self._scrolled_window) self._html_widget.connect('link-message', self.__link_message_cb) self._html_widget.connect('open-uri', self.__open_uri_cb) self._USING_AJAX = self._html_widget.is_ajax_ok() if self._USING_AJAX: logging.info("initializing ajax server") import threading from ajax import EntryInfoServer, MyTCPServer store_location = os.path.join( self._db.get_setting(ptvDB.STRING, '/apps/penguintv/media_storage_location', ""), os.path.join(utils.get_home(), "media", "images")) while True: try: if PlanetView.PORT == 8050: break self._update_server = MyTCPServer.MyTCPServer( ('', PlanetView.PORT), EntryInfoServer.EntryInfoServer, store_location) break except: PlanetView.PORT += 1 if PlanetView.PORT == 8050: logging.warning( "tried a lot of ports without success. Problem?") t = threading.Thread(None, self._update_server.serve_forever, name="PTV AJAX Server Thread") t.setDaemon(True) t.start() self._ajax_url = "http://localhost:" + str( PlanetView.PORT) + "/" + self._update_server.get_key() self._entry_formatter = EntryFormatter.EntryFormatter( self._mm, False, True, ajax_url=self._ajax_url, renderer=self._renderer) self._search_formatter = EntryFormatter.EntryFormatter( self._mm, True, True, ajax_url=self._ajax_url, renderer=self._renderer) else: logging.info("not using ajax") self._ajax_url = None self._entry_formatter = EntryFormatter.EntryFormatter( self._mm, False, True, basic_progress=True, renderer=self._renderer) self._search_formatter = EntryFormatter.EntryFormatter( self._mm, True, True, basic_progress=True, renderer=self._renderer) self.display_item() self._html_dock.show_all()
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import utils import logging logging.basicConfig( filename=utils.get_home() + "/meteo/logfile.log", level=logging.DEBUG, format='%(asctime)s\t%(levelname)s\t%(name)s\t%(message)s') log = logging.getLogger("failed_request.py") request_file = utils.get_home() + "/meteo/failed_request.sql" def append(request): if len(request) == 0: log.error("Dropping an empty line that should not have been there!") return with open(request_file, "a") as fappend: fappend.write(request + "\n") log.debug("Appending request to failed_request.sql: " + request) return def extract_first(): try: with open(request_file, 'r') as fin: data = fin.read().splitlines(True) except FileNotFoundError: log.warning("File '" + str(request_file) + "' was not found => no request to extract, returning None.")
def post_show_init(self): self._html_widget.post_show_init(self._scrolled_window) self._html_widget.connect('link-message', self.__link_message_cb) self._html_widget.connect('open-uri', self.__open_uri_cb) self._USING_AJAX = self._html_widget.is_ajax_ok() if self._USING_AJAX: logging.info("initializing ajax server") import threading from ajax import EntryInfoServer, MyTCPServer store_location = os.path.join(self._db.get_setting(ptvDB.STRING, '/apps/penguintv/media_storage_location', ""), os.path.join(utils.get_home(), "media", "images")) while True: try: if PlanetView.PORT == 8050: break self._update_server = MyTCPServer.MyTCPServer(('', PlanetView.PORT), EntryInfoServer.EntryInfoServer, store_location) break except: PlanetView.PORT += 1 if PlanetView.PORT==8050: logging.warning("tried a lot of ports without success. Problem?") t = threading.Thread(None, self._update_server.serve_forever, name="PTV AJAX Server Thread") t.setDaemon(True) t.start() self._ajax_url = "http://localhost:"+str(PlanetView.PORT)+"/"+self._update_server.get_key() self._entry_formatter = EntryFormatter.EntryFormatter(self._mm, False, True, ajax_url=self._ajax_url, renderer=self._renderer) self._search_formatter = EntryFormatter.EntryFormatter(self._mm, True, True, ajax_url=self._ajax_url, renderer=self._renderer) else: logging.info("not using ajax") self._ajax_url = None self._entry_formatter = EntryFormatter.EntryFormatter(self._mm, False, True, basic_progress=True, renderer=self._renderer) self._search_formatter = EntryFormatter.EntryFormatter(self._mm, True, True, basic_progress=True, renderer=self._renderer) self.display_item() self._html_dock.show_all()