class Main(): def __init__(self, args): self.args = args self.config = Config() self.config.load('s4config.toml') session = boto3.session.Session(**self.config['aws']['credentials']) self.s3 = session.resource('s3') # Check if bucket exists check = self.check_bucket_access(self.config['aws']['bucket']['name']) if check == BucketAccess.NotFound: print('Configured bucket does not exist.') exit(1) elif check == BucketAccess.Forbidden: print('Error: do not have permission to access configured bucket') print('The following are some known causes of this error:') print(' - User does not have access to S3') print(' - The bucket is owned by another account') exit(1) # Sync files self.sync() def sync(self): for root, dirs, files in os.walk(self.config['sync']['root']['path']): # TODO: add code to ignore any s4 configuration files print('%s:' % root) for dire in dirs: print(' d %s' % dire) for file in files: print(' f %s' % file) def check_bucket_access(self, bucketname): ''' Check if a bucket with the specified name exists. :param str bucketname: the name of the bucket to check. :returns bool: true - if the bucket exists, false otherwise. ''' try: self.s3.meta.client.head_bucket(Bucket=bucketname) return BucketAccess.Accessible except botocore.exceptions.ClientError as err: error_code = int(err.response['Error']['Code']) if error_code in BucketAccess: return error_code else: raise err
class MainWin(sqbrowser_xrc.xrcfrmMain): """Represents the main query browser window """ configPath = os.path.join(os.path.dirname(__file__), "app.cfg") def __init__(self): """__init__ """ sqbrowser_xrc.xrcfrmMain.__init__(self, None) self.SetSize((650, 500)) # setup the control events self.btOpenSqlFile.Bind(wx.EVT_BUTTON, self.btOpenSqlFile_Click) self.btOpenSqlDb.Bind(wx.EVT_BUTTON, self.btOpenSqlDb_Click) self.btExecuteFile.Bind(wx.EVT_BUTTON, self.btExecuteFile_Click) self.btCommit.Bind(wx.EVT_BUTTON, self.btCommit_Click) # setup timers self._interval = 2 # seconds self.tmrCheckFile = wx.Timer(self) self.Bind(wx.EVT_TIMER, self.tmrCheckFile_Tick, self.tmrCheckFile) # file operation vars self.file = None self._lastCheckTime = None # status bar self.CreateStatusBar() self.SetStatusText("SQBrowser") # setup sqlengine self._sqlEngine = None # misc setup self.config = Config(self.configPath) self.config.load() self.StartTimer() if self.config.db_path: self.txtSqlDb.SetValue(self.config.db_path) if self.config.src_path: self.txtSqlFile.SetValue(self.config.src_path) ## EVENTS METHODS def OnClose(self, evt): """OnClose @remark: overriden from base class """ if self._sqlEngine: self._sqlEngine.disconnect() # use default close action evt.Skip() def btOpenSqlFile_Click(self, evt): """btOpenSqlFile_Click """ path = self._openFileDialog(msg="Select the query file") if not path: return self.config.src_path = path self.config.save() self.txtSqlFile.SetValue(path) def btOpenSqlDb_Click(self, evt): """btOpenSqlDb_Click """ path = self._openFileDialog(msg="Select the data source") if not path: return self._clearLog() self.config.db_path = path self.config.save() self.txtSqlDb.SetValue(path) def tmrCheckFile_Tick(self, evt): """tmrServer_Tick """ if self.chkMonitorFile.IsChecked(): self.SetStatusText("Monitoring file...") else: self.SetStatusText("SQBrowser") return self.StopTimer() self._executeQueryFile() self.StartTimer() def btExecuteFile_Click(self, evt): """btExecuteFile_Click """ self._executeQueryFile(force_execute=True) def btCommit_Click(self, evt): """btCommit_Click """ if not self._sqlEngine: return self._sqlEngine.commit() self._addLog("Changes committed...") ## MISC METHODS def _executeQueryFile(self, force_execute=False): """Runs the query specified by the opened query file """ path = self.txtSqlFile.GetValue() if not os.path.isfile(path): self._addLog("Not a file: " + path) return dbPath = self.txtSqlDb.GetValue() hasChanged = self._checkFile(path) now = wx.DateTime().Now().Format() if hasChanged: self._addLogSplit() self._addLog("File Changed: " + now) elif not hasChanged: if not force_execute: return # get contents of query file contents = self._readContents(path) # get the query query = self._parseQuery(contents) # setup the engine, determine the query type isJson = query.lstrip()[:2] == "$." if isJson: pass else: self._sqlEngine = sqlbase.SqlBase(dbPath) if not self._sqlEngine.connect(): self._addLog("Unable to connect to: " + dbPath) return self._addLogSplit() # exec all queries, semi-colon separated queries = [query] if EXEC_ALL(query) is not None: queries = query.split(";") # beseech user msg = "Run %d queries?" % len(queries) if wx.MessageBox(message=msg, style=wx.YES | wx.NO) == wx.NO: self._addLog("Multi-query operation aborted.") return self._addLog("Running %d queries..." % len(queries)) if isJson: self._runJson(queries, dbPath) else: self._runSql(queries) def _runJson(self, queries, jsonPath): for query in queries: db = jsondb.from_file(jsonPath) results = db.query(query).values() # output to log, for now self._addLog(pprint.pformat(results)) def _runSql(self, queries): # execute the queries results = {} for query in queries: self._addLog(query.strip()) results = self._sqlEngine.execute_query(query) if not results: self._addLog("SQL ERROR: " + self._sqlEngine.last_error) return self._addLog(results["message"]) ## display the result data in the table # insert columns self._rebuildColumns(results["columns"]) # insert rows for row in results["rows"]: self._addRow(row) self._resizeColumns(results["columns"]) def _parseQuery(self, src): """Parses the src to only show the target query @remark: it stops at the first 'query stop' placeholder @return string the query to execute """ lines = src.split("\n") hasQueryStart = False for start in QUERY_START: if src.find(start) >= 0: hasQueryStart = True break doStart = False query = "" for line in lines: sLine = line.strip() # if flag seen, start grabbing if not hasQueryStart or doStart: # if no starter, then grab until eof query += line + "\n" if sLine in QUERY_START: # start grabbing from next line on doStart = True if not hasQueryStart or doStart: if sLine in QUERY_STOP: # done, return query, remove the query stop query = query.replace(sLine, "") break return query.strip() def _addLogSplit(self): """Adds an output separator to the log """ count = 70 self._addLog("-" * count) def _addLog(self, msg): """Adds a line of text to the output console """ self.txtMessages.AppendText(msg + "\n") def _clearLog(self): """Clears the output log """ self.txtMessages.Clear() def StartTimer(self): self.StopTimer() self.tmrCheckFile.Start(self._interval * 1000) def StopTimer(self): self.tmrCheckFile.Stop() def _openFileDialog(self, msg="Select the file", path=""): """Displays the open file dialog """ path = wx.FileSelector(message=msg, default_path=path, parent=self) return path def _rebuildColumns(self, columns): """Rebuilds the list ctrl @remarks: Deletes all columns and rows """ # remove all cols and rows self.lstResults.ClearAll() # insert the columns into the header for col, text in enumerate(columns): self.lstResults.InsertColumn(col, text) def _resizeColumns(self, columns): """Resizes the columns """ doAutoSize = len(columns) <= AUTOSIZE_COLUMNS # insert the columns into the header if doAutoSize: for col, text in enumerate(columns): self.lstResults.SetColumnWidth(col, wx.LIST_AUTOSIZE) def _addRow(self, row): """addRow """ count = self.lstResults.GetItemCount() idx = 0 for col, col_value in enumerate(row): if col == 0: idx = self.lstResults.InsertStringItem(sys.maxint, str(count)) self.lstResults.SetStringItem(idx, col, unicode(col_value)) ## FILE CHECK METHODS def _checkFile(self, filePath): """Checks the modification date for the specified file path """ if not os.path.isfile(filePath): return False hasFileChanged = False # get file info fileStats = os.stat(filePath) lastMod = time.localtime(fileStats[stat.ST_MTIME]) # create a dictionary to hold file info file_info = { "fname": filePath, "fsize": fileStats[stat.ST_SIZE], # last modified "f_lm": time.strftime("%m/%d/%Y %I:%M:%S %p", lastMod), # last accessed "f_la": time.strftime("%m/%d/%Y %I:%M:%S %p", time.localtime(fileStats[stat.ST_ATIME])), # creation time "f_ct": time.strftime("%m/%d/%Y %I:%M:%S %p", time.localtime(fileStats[stat.ST_CTIME])), } # get the datetime object of the file modification time lastModTime = wx.DateTime() lastModTime.ParseDateTime(file_info["f_lm"]) if self._lastCheckTime is None: self._lastCheckTime = wx.DateTime().Now() # make sure it is after the last checked time if lastModTime.IsLaterThan(self._lastCheckTime): hasFileChanged = True # get the last mod time self._lastCheckTime = lastModTime return hasFileChanged def _readContents(self, path): """read_contents """ if not os.path.isfile(path): return None # get the file contents with codecs.open(path, "r", encoding="utf-8") as fp: contents = fp.read() return contents
class Core(object): VERSION = 0.06 def __init__(self): # Configure logger logFormatter = logging.Formatter(fmt="[%(asctime)-15s][%(levelname)s] %(message)s", datefmt='%d.%m.%Y %H:%M:%S') log = logging.getLogger() log.setLevel(logging.DEBUG) fileName = "logs/"+"sensorshub_{}_".format(datetime.datetime.now().strftime("%d-%m-%Y"))+"{}.log" fileNum = 0 if not os.path.isdir("logs"): os.mkdir("logs") while os.path.isfile(fileName.format(fileNum)): fileNum += 1 fileHandler = logging.FileHandler(fileName.format(fileNum)) fileHandler.setFormatter(logFormatter) log.addHandler(fileHandler) consoleHandler = logging.StreamHandler(sys.stdout) consoleHandler.setFormatter(logFormatter) log.addHandler(consoleHandler) logging.info("Starting SensorsHub version {}".format(self.VERSION)) # Create databse and tables with sqlite3.connect("db.sqlite") as conn: # # SENSORS # Table for sensors, each sensor has one row. conn.execute( "CREATE TABLE IF NOT EXISTS sensors(" "sid INTEGER PRIMARY KEY AUTOINCREMENT, " # Sensor ID, must be unique "token TEXT, " # Generated string for authentication "title TEXT, " # Title of the sensor, f.eg. Outside "description TEXT, " # Description of the sensor, f.eg. ESP8266 temperature sensor "updated INTEGER, " # Timestamp updated when new reading is received "status INTEGER DEFAULT 0" # Current status (enabled, disabled) ")") # FIELDS conn.execute( "CREATE TABLE IF NOT EXISTS fields(" "fid INTEGER PRIMARY KEY AUTOINCREMENT, " # Field ID, must be unique "sid INTEGER, " # Sensor ID to which this field belong "parent INTEGER" # Parent fid, default None "type INTEGER" # Field type "updated INTEGER, " # Updated time in timestamp format "value FLOAT, " # Current field value "name TEXT, " # Name of the field, f.eg. temperature "display_name TEXT, " # Human friendly name of the field, f.eg. Temperature "color TEXT, " # Color (HEX) of the field without hashtag, f.eg. FF00AA "icon TEXT" # Font awesome icon f.eg. fa-sun-o "unit TEXT)") # Unit of the field, f.eg. °C # # READINGS # Table for readings, each reding must specify field and sensor # # sid - Sensor ID to which this reading belong # fid - Field ID to which this reading belong # updated - When reading has been created in timestamp format # value - New value of the field conn.execute( """CREATE TABLE IF NOT EXISTS readings(sid INTEGER, fid INTEGER, updated INT, value FLOAT)""") # # ACCOUNTS # Table for accounts # # uid - User ID, must be unique # session - Generated string for sessions # user - Unique user login # password - Hashed password using hashlib # lastlogin - Timestamp updated when user logged in # email - User email conn.execute( """CREATE TABLE IF NOT EXISTS accounts(uid INTEGER PRIMARY KEY, session TEXT, user TEXT UNIQUE , password TEXT, lastlogin INTEGER, email TEXT)""") # Load config self.config = Config() self.config.load() # Load lang self.lang = Lang(self) self.lang.load() # Load updater self.updater = Updater(self) if self.updater.check_updates(): if "update" in self.config.args: logging.info("Starting auto update") self.updater.update() else: logging.info("No updates available") # Create and read sensors from database self.sensors = Sensors() self.sensors.load() # Create and load accounts self.accounts = Accounts(self) # Load statistics self.statistics = Statistics(self) # Configure web template engine env = Environment(loader=FileSystemLoader('templates')) env.filters["to_json"] = lambda value: json.dumps(value) def format_datetime(value, format="%d.%m.%Y %H:%M"): if value == None: return "Never" else: try: return datetime.datetime.fromtimestamp(value).strftime(format) except TypeError: return value.strftime(format) env.filters["strftime"] = format_datetime # Configure web server cherrypy_config = { "server.socket_port": self.config.get("port"), "server.socket_host": self.config.get("host"), "checker.check_skipped_app_config": False, "log.screen": False, "log.access_file": '', "log.error_file": '' } cherrypy.config.update(cherrypy_config) cherrypy.tree.mount(WebRoot(self,env),"/", { "/static": { "tools.staticdir.root": os.getcwd(), "tools.staticdir.on": True, "tools.staticdir.dir": "static" } }) cherrypy.tree.mount(WebSettings(self, env), "/settings", { }) # Disable cherrypy loggers logging.info("Starting up web server at {}:{}".format(cherrypy_config["server.socket_host"], cherrypy_config["server.socket_port"])) logging.getLogger("cherrypy").propagate = False #logging.getLogger("cherrypy.error").propagate = False logging.getLogger("cherrypy.access").propagate = False cherrypy.engine.signals.subscribe() cherrypy.engine.start() logging.info("Done loading") Statistics.snooper(1) cherrypy.engine.block() def get_client_ip(self): headers = cherrypy.request.headers if "X-Forwarded-For" in headers and headers["X-Forwarded-For"] != "127.0.0.1": return headers["X-Forwarded-For"] if "X-Forwarded" in headers and headers["X-Forwarded"] != "127.0.0.1": return headers["X-Forwarded"] if "Remote-Addr" in headers and headers["Remote-Addr"] != "127.0.0.1": return headers["Remote-Addr"] return "0.0.0.0"
def setupConfig(): config = Config() config.setStorage(ConfigJson('config.json')) if os.path.exists('config.json'): config.load() return config