def main(xmlConfig): SectionPrint("parseXMLconfig") c = s.parseXMLConfig(xmlConfig) SectionPrint("unpackArchives") unpackArchives() if (exists(c["modelName"])): #result = raw_input("Existing " + config.modelName + " model will be replaced. Continue? (y/n) ") #if (result == "n"): #sys.exit() SectionPrint("rm -r "+c["modelName"]) subprocess.call(['rm', '-r', c["modelName"]]) mkpath(join("models",c["modelName"])) SectionPrint("loadSettings "+c["modelName"]) s.loadSettings(c["modelName"]) SectionPrint("buildModelStructures") buildModelStructure() SectionPrint("extractPackets"+c["pcapFilename"]) extractPackets(c["pcapFilename"], c["protoName"], c["modelName"], c["keyword"], c["fields"], c["dissectorFilename"]) SectionPrint("buildStateMachine"+c["modelName"]) buildStateMachine(c["modelName"])#uncomment # setupNS3Model(c["modelName"]) # buildNS3Grammar(c["modelName"]) SectionPrint("buildScapyGrammer "+c["modelName"]) buildScapyGrammar(c["modelName"]) SectionPrint("buildModels "+c["modelName"]) buildModels(c["modelName"], c["transLayer"], {"remote": c["remote"], "local": c["local"], "gateway": c["gateway"]})
def __init__(self, parent=None, id=-1, title=version.__title__, pos=wx.DefaultPosition, size=(800,600), style=wx.DEFAULT_FRAME_STYLE): """Create a Frame instance.""" wx.Frame.__init__(self, parent, id, title, pos, size, style) settings.loadSettings() self.CreateStatusBar() self.SetStatusText('Version %s' % self.revision) self._createMenus() self.iconized = False self.Bind(wx.EVT_MOVE, self.OnMove) self.Bind(wx.EVT_CLOSE, self.OnClose) self.Bind(wx.EVT_ICONIZE, self.OnIconize) # Handle incoming comms and settings when the UI is idle self.Bind(wx.EVT_IDLE, self.OnIdle) self.BuildWindow() # Load saved location/size settings x = settings.get('win.main.pos.x', -1) y = settings.get('win.main.pos.y', -1) pos = wx.Point(int(x), int(y)) h = settings.get('win.main.size.h', -1) w = settings.get('win.main.size.w', -1) size = wx.Size(int(h), int(w)) self.SetSize(size) self.Move(pos)
def activateSettings(self): """Activate the latex-access settings stored in file. Consult the actual function definition in settings.py for details and documentation.""" settings.loadSettings(os.path.expanduser(self.filename)) self.nemeth_translator = settings.brailleTableToUse() return settings.activateSettings({ "braille": self.nemeth_translator, "speak": self.speech_translator, "preprocessor": self.preprocessor })
def getUserNameList(): ''' Get a list of users to download comment. Do not return users whose data was already processed. ''' settingsDict = settings.loadSettings() # Get a list of all users that we want to collect comment data. allUsersSet = set() userListFile = open(settingsDict['userListFile'], 'r') for line in userListFile: userName = line.strip() if len(userName) > 1: allUsersSet.add(userName) userListFile.close() # Get a list of users that we have already processed. downloadedUsersSubset = set() for fileName in os.listdir(settingsDict['userCommentsCsvDataDir']): subId = os.path.split(fileName)[1].split('_')[0] downloadedUsersSubset.add(subId) print '%d users in dataset.' % len(allUsersSet) print 'Downloaded comments from %d users.' % len(downloadedUsersSubset) print '%d users remaining.' % (len(allUsersSet) - len(downloadedUsersSubset)) # Return the difference. return allUsersSet.difference(downloadedUsersSubset)
def getUserList(userName): settingsDict = settings.loadSettings() datasetPath = settingsDict['dataset_path'] topStoriesIdList = getTopStoriesIdList() currentStory = 0 collectedUsers = set() while len(collectedUsers) < numberOfUsers and currentStory < len( topStoriesIdList): print 'I have %d users.' % len(collectedUsers) currentStory += 1 storyId = topStoriesIdList[currentStory] commentIdList = getCommentIdList(storyId) if commentIdList is None: continue else: for commentId in commentIdList: userName = getUserName(commentId) if userName is not None: collectedUsers.add(getUserName(commentId)) # Print collected users. outputFilePath = os.path.join(datasetPath, 'user_list.txt') outputFile = open(outputFilePath, 'w') for userName in collectedUsers: outputFile.write(userName + '\n') outputFile.close()
def getUserComments(userName): settingsDict = settings.loadSettings() datasetPath = settingsDict['dataset_path'] jsonPath = os.path.join(datasetPath, settingsDict['json_dir']) # Create directory for dataset if it does not exists. if not os.path.exists(jsonPath): os.makedirs(jsonPath) requestUrl = "https://hacker-news.firebaseio.com/v0/user/%s.json" % ( userName) req = urllib2.Request(requestUrl, None) try: httpResponse = urllib2.urlopen(req, timeout=10) userData = json.load(httpResponse) except: print 'Failed to download user comments.' return if 'submitted' in userData: submittedIdList = userData['submitted'] timestampList = [] for submissionId in submittedIdList: timestamp = getTimestampFromSubmission(submissionId) if timestamp is not None: timestampList.append(timestamp) userData['timestamps'] = timestampList outputFilePath = os.path.join(jsonPath, '%s.comments.json' % (userName)) outputFile = open(outputFilePath, 'w') outputFile.write(json.dumps(userData)) outputFile.close()
def downloadSubCommenters(subId, commentsPerRequest, apiAfter, apiCount, page): settingsDict = settings.loadSettings() redditUrl = requestUrlPattern % subId redditUrl += '?limit=%d' % commentsPerRequest if apiAfter != None: redditUrl += '&after=%s' % apiAfter if apiCount > 0: redditUrl += '&count=%d' % apiCount print redditUrl headerDict = { 'User-Agent': settingsDict['http']['userAgent']} req = urllib2.Request(redditUrl, None, headerDict) # Make the HTTP request try: httpResponse = urllib2.urlopen(req) jsonData = httpResponse.read() except urllib2.HTTPError as e: # If the request fails, ignore it. print(e.reason) return except: print 'Unknown error.' return False jsonFileName = jsonFileNamePattern % (subId, page) filePath = os.path.join(settingsDict['subCommentersJsonDataDir'], jsonFileName) localJsonFile = open(filePath, 'w') localJsonFile.write(jsonData) localJsonFile.close() # Sleep to prevent hitting Reddit API rate limit. time.sleep(2) return True
def main(): # list of subreddits to be tracked without username mention subreddits = [ 'comedycemetery', 'memes', 'DeepFriedMemes', 'nukedmemes', 'ComedyNecrophilia', 'dankmemes' ] # login to reddit and enter main loop reddit = login() sub = reddit.subreddit('+'.join(subreddits)) while True: # load settings settings_dict = settings.loadSettings('./settings.txt') # check and fry username mentions check_mentions(reddit) # check and fry comments # get comments from subreddits try: comments = list(sub.comments(limit=settings_dict['max_comments'])) except Exception as e: print(str(e)) continue for comment in comments: # thread = threading.Thread(target=check, args=[comment]) # thread.start() check(comment) # All comments processed, wait for some time before rechecking time.sleep(settings_dict['check_delay'])
def publish(post_meta): doc_id = Post.url_friendly_text(post_meta["title"]) # Open the database couch = couchdb.Server() db = couch["mrvoxel_blog"] # Load the database settings blog_settings = settings.loadSettings(db) # Check to see if we have a valid category if not post_meta["category"] in blog_settings["blog_categories"]: raise ValueError("No such category: %s" % post_meta["category"]) print "checking for [%s]" % doc_id # Load the post (if it exists in our database) post = Post.load(db, doc_id) # If it exists, warn the user if post: raw = raw_input("This will replace an existing post of the same title...\nContinue? (y/N)") # if the existing post is published but we're trying to preview if (post["published"] == False) and post_meta["published"]: raise ValueError("Cannot yet preview posts that are already published") if raw != "y": print "Canceling publish." return None else: for k, v in post_meta.iteritems(): if k not in ["published", "title"]: print k post[k] = v print post """ post.markdown = mdtext post.html = html post.author = author post.timestamp = timestamp post.published = not preview post.title = title post.tags = tags post.category = category """ post.store(db) else: post = Post.create(**post_meta) print post["_id"] post.store(db) print post["_id"] return post["_id"]
def grab_screenshot(fullScreen = True, copyUrlIntoClipboard = False, userId = ''): """ Grab a screenshot. Keyword arguments: string fullScreen -- true to capture the full screen, false to capture only the currently active window string copyUrlToClipboard -- true to copy the public URL to the clipboard Returns: none """ if fullScreen: image = ImageGrab.grab() else: image = ImageGrab.grab(get_current_active_window_placement()) time = datetime.datetime.now().strftime("%Y%m%d%H%M%S") fileName = '%s_%s.%s' % (settings.settings['filename_prefix'], time, ('png' if settings.settings['image_format'] == 'PNG' else 'jpg')) settings.loadSettings() saveFolderPath = os.path.join(get_public_folder_path(), settings.settings['screenshot_save_directory']) saveLocation = os.path.join(saveFolderPath, fileName) # Resize the image if settings.settings['resize_image'] == '1': resizeValue = (float(settings.settings['resize_value'][:-1]) / 100) image = image.resize([int(size * resizeValue) for size in image.size], Image.ANTIALIAS) # Save it if settings.settings['image_format'] == 'JPEG': try: quality = [value[0] for value in JPEG_QUALITY_CHOICES \ if value[1] == settings.settings['image_quality']][0] quality = int(quality) except IndexError: quality = 100 image.save(saveLocation, settings.settings['image_format'], quality = quality) else: image.save(saveLocation, settings.settings['image_format'], optimize = True) # Copy file URL to the clipboard if copyUrlIntoClipboard and userId != '': copy_url_to_clipboard(userId, fileName) return fileName
def get_public_folder_path(): try: dropbox_path = get_dropbox_path() except IOError: # No Dropbox installation detected, check if user has manually specified location of # the Dropbox directory try: settings.loadSettings() dropbox_path = settings.settings['dropbox_directory'] except KeyError: dropbox_path = None if not dropbox_path: raise IOError('Could not find Dropbox folder') public_folder_path = os.path.join(dropbox_path, 'Public') return public_folder_path
def activateSettings(): """Activate the latex-access settings stored in file. Consult the actual function definition in settings.py for details and documentation.""" global n # handle to the braille translator # Paths pointing to potential configuration files. configFilePaths = (os.path.expanduser("~/.latex-access"), "/etc/latex-access.conf") for configFile in configFilePaths: if os.path.exists(configFile): break settings.loadSettings(configFile) n = settings.brailleTableToUse() return settings.activateSettings({ "braille": n, "speak": s, "preprocessor": p })
def createDataDirs(): ''' Create directories to save data if they do not exist. ''' settingsDict = settings.loadSettings() if not os.path.exists(settingsDict['subCommentersCsvDataDir']): os.makedirs(settingsDict['subCommentersCsvDataDir']) if not os.path.exists(settingsDict['subCommentersJsonDataDir']): os.makedirs(settingsDict['subCommentersJsonDataDir'])
def main(): settingStore = settings.loadSettings() settingStore2 = settings.processSettings(settingStore) while 0 == 0: print(" ") print( "________________________________________________________________") print(" ") print("Type 'steamDB' to regenerate the steam drm free games list.") print("Type 'steamdrm?' to check and see if a game has drm.") print("Type 'gogDB' to regenerate the GOG games database.") print("Type 'onGog' to check discounts and see if GOG has a game.") #print("Type 'onDisk' to generate lists of what games you have") #print("^Note: This requires providing the directories via settings.txt") print( "Type 'galaxyDB' to generate the database for games in GOG Galaxy 2 DB" ) print( "Type 'galaxyquery' to see if you own the game and find out your stats." ) print("Type 'quit' to exit the tool.") text = input("Response: ") if text == "steamDB": drmList = steam.drmListOpen() print("parsed text file") cleanedList = steam.listCleaner(drmList) cleanedList = steam.listOrganize(cleanedList) print("data cleaned") createFile(cleanedList, "dataBases/SteamDRMFree.csv") print("database creation successful") elif text == "steamdrm?": fetchedSteamDB = openFile("dataBases/SteamDRMFree.csv") steam.exists(fetchedSteamDB) elif text == "gogDB": rawdb = gog.processGog() writeOut = gog.createRaw(rawdb) contents = gog.readRaw() cleanedFile = gog.cleaner(contents) createFile(cleanedFile, "dataBases/GOG.csv") elif text == "onGog": fetchedGogDB = openFile("dataBases/GOG.csv") gog.selectAction(fetchedGogDB) elif text == "galaxyDB": settings.generateDB() elif text == "galaxyquery": opened_data = openFile("dataBases/gameDB.csv") settings.queryDB(opened_data) elif text == "quit": print("Hope ya enjoyed!") break else: print("Not a valid input please try again.")
def initSettings(self): self.settings = settings.loadSettings() autostart = self.settings["autostart"] self.builder.get_object("cbAutostart").set_active(autostart) autocascade = self.settings["autocascade"] self.builder.get_object("cbAutocascade").set_active(autocascade) debug("Got subjects from settings: %s" % str(self.settings["cascSubjects"])) self.addSubjects(self.settings["cascSubjects"]) if self.settings["cascading"] and self.settings["autocascade"]: self.startCascading()
def initSettings(self): self.settings = settings.loadSettings() autostart = self.settings['autostart'] self.builder.get_object('cbAutostart').set_active(autostart) autocascade = self.settings['autocascade'] self.builder.get_object('cbAutocascade').set_active(autocascade) debug('Got subjects from settings: %s' % str(self.settings['cascSubjects'])) self.addSubjects(self.settings['cascSubjects']) if self.settings['cascading'] and self.settings['autocascade']: self.startCascading()
def parseSubData(subId, page): settingsDict = settings.loadSettings() jsonFileName = jsonFileNamePattern % (subId, page) jsonFilePath = os.path.join(settingsDict['subCommentersJsonDataDir'], jsonFileName) try: jsonFile = open(jsonFilePath, 'r') jsonData = json.load(jsonFile) except: return None, None jsonCommentList = jsonData[1]['data']['children'] apiAfter = jsonData[1]['data']['after'] userList = [] for jsonComment in jsonCommentList: processCommentJson(jsonComment, userList) jsonFile.close() return userList, apiAfter
def get_sub_commenters(subId): createDataDirs() settingsDict = settings.loadSettings() print 'Downloading comment data for submission %s.' % subId # Make the first request. numberOfRequests = 100 apiAfter = None apiCount = 0 page = 1 commentsPerRequest = int(settingsDict['http']['commentsPerRequest']) if downloadSubCommenters(subId, commentsPerRequest, apiAfter, apiCount, page): userList, apiAfter = parseSubData(subId, page) if userList is None: return None apiCount += len(userList) page += 1 else: return None # Download the remaining pages. maxRequests = int(settingsDict['http']['maxRequests']) while (page - 1) <= maxRequests and apiAfter != None: if downloadSubCommenters(subId, commentsPerRequest, apiAfter, apiCount, page): partialUserList, apiAfter = parseSubData(subId, page) if partialUserList is None: break userList = userList + partialUserList apiCount += len(userList) page += 1 else: break return list(set(userList))
def getUserCommentsFromUserList(): settingsDict = settings.loadSettings() datasetPath = settingsDict['dataset_path'] jsonPath = os.path.join(datasetPath, settingsDict['json_dir']) # Get list of users that we have to download. userListFilePath = os.path.join(datasetPath, 'user_list.txt') userListFile = open(userListFilePath, 'r') allUsers = set([line.strip() for line in userListFile]) userListFile.close() # Check for user data that we already downloaded. downloadedUsers = set([fname.split('.')[0] for fname in os.listdir(jsonPath)]) usersToDownload = allUsers.difference(downloadedUsers) print('Downloaed %d users. %d users to download.' % (len(downloadedUsers), len(usersToDownload))) return for line in userListFile: userName = line.strip() print 'Downloading data for user %s.' % (userName) get_user_comments.getUserComments(userName) userListFile.close()
def loadSettings(self): self.settings = settings.loadSettings("quester")
def loadSettings(self): self.settings = settings.loadSettings("grinder")
import hashlib import json import os import requests import shutil import sys from settings import loadSettings, getSettingsMap AUTH_HOST = 'https://oauth.secure.pixiv.net' SEARCH_HOST = 'https://app-api.pixiv.net' if not os.path.isdir('images'): os.mkdir('images') loadSuccess = loadSettings('settings.json') if not loadSuccess: try: input('press ENTER to close') except: pass sys.exit(0) settings = getSettingsMap() local_time = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S+00:00') try: with open('refresh_token.txt', encoding='utf-8') as refresh_token_file: refresh_token = refresh_token_file.read().strip()
for name in recipients: print("Current Y Position: {}".format(p.CurrentPositionY), file=sys.stderr) print("Gift tag height: {}".format(d.giftTagHeight()), file=sys.stderr) print("Page height: {}".format(p.PageHeight), file=sys.stderr) if (p.CurrentPositionY - d.giftTagHeight() < p.PageHeight): p.newSheet() d.giftTag(name, "o") p.finishPrinting() if __name__ == '__main__': Leds().all_off() print("Starting Program", file=sys.stderr) settings.loadSettings() t.updateMetrics(settings.getTextSize()) # logo( 800 ) # giftTags() # p.startPrinting() # t.printText( "the quick brown fox jumped over a lazy dog." ) # p.finishPrinting() # testPaperFeed() # helloWorld() # p.calibrate() # time.sleep( 2 )
globalObjects["motd"] = "Message of the Day" # set general settings globalObjects["debug"] = True globalObjects["reed-switch-pin"] = 16 globalObjects["lock-object"] = physical.initServo( globalObjects["lock-servo-pin"], globalObjects["debug"]) globalObjects["fcgi-pipe"] = "/tmp/fcgi-pipe.sock" # start unlocked physical.unlock(globalObjects["lock-object"], globalObjects["debug"]) # import settings dictionary globalObjects["settings"] = settings.loadSettings("settings.json") globalObjects["authContext"] = auth.securityContext("auth.txt") # try to import pin number from settings.json if globalObjects["settings"].get("servo-pin-number") != None: globalObjects["lock-servo-pin"] = globalObjects["settings"][ "servo-pin-number"] # import debug preference from settings.json if globalObjects["settings"].get("debug") != None: globalObjects["debug"] = globalObjects["settings"]["debug"] # import reed switch pin number from settings.json if globalObjects["settings"].get("reed-switch-pin") != None: globalObjects["reed-switch-pin"] = globalObjects["settings"][
def buildProtocol(self, addr): p = Bot() p.factory = self import settings settings.loadSettings(p) return p
def startProgram(): if (os.path.isfile("settings.dat")): cfg.loadSettings() mainGui()
def main(): # Load settings filepath_settings = 'C:/DISCHARGEDB/code/data/settings.json' settings = initSettings() saveSettings(settings, filepath_settings) settings = fillSettingsTags(loadSettings(filepath_settings)) # Downlaod new images from ag mednet discharge = DISCHARGEDB(database=settings['database']) #discharge.download_images(settings) #discharge.update_images(settings) discharge.truncateTable(tablename='dicom') discharge.update_dicom(settings) ### Update agmednet reports ### discharge = DISCHARGEDB(host="127.0.0.1", port='3306', user="******", password="******", database=settings['database']) discharge.update_agmednet_01(settings) discharge.update_agmednet_02(settings) discharge = DISCHARGEDB(host="127.0.0.1", port='3306', user="******", password="******", database=settings['database']) rs = discharge.truncateTable('agmednet_02') discharge.update_agmednet_02(settings) df = discharge.getTable('agmednet_02') df = discharge.getTable('agmednet_01') #### Execute sript ##### discharge = DISCHARGEDB(host="127.0.0.1", port='3306', user="******", password="******", database=settings['database']) table = discharge.getTable('agmednet_01') discharge.truncateTable('agmednet_01') discharge.truncateTable('agmednet_02') discharge.connectSQL() table = discharge.getTable('agmednet_01') table = discharge.getTable('agmednet_01') self = db mysql_path = 'mysql://' + self.user + ':' + self.password + '@' + self.host + '/' + self.database + '?charset=utf8' sqlEngine = create_engine(mysql_path) df = pd.read_sql("SELECT * FROM agmednet_01", sqlEngine) ### Reset autoincrement db = DISCHARGEDB(host="127.0.0.1", port='3306', user="******", password="******", database=settings['database']) db.connectSQL() db.resetAutoIncrement() #db.createDB() db.initDB(settings) db.executeScript( fip_script= 'H:/cloud/cloud_data/Projects/DISCHARGEDB/src/scripts/set_primary_key.sql', replace=('TABLE_VAR', 'v_a06_docu_hosp')) result = db.executeSQL('SELECT * FROM dischargedb3.site;') db.sas7bdatTosql() db.closeSQL() filename = 'v_a01_fu_staff' db = DISCHARGEDB(database=settings['database']) db.connectSQL() db.executeSQL('ALTER TABLE ' + filename + ' ADD PRIMARY KEY index') command = "ALTER TABLE `dischargedb`.`v_a03_ses_staff` CHANGE COLUMN `index` `index` BIGINT NOT NULL ," cursor = db.db.cursor() cursor.execute(command) result = cursor.fetchall() db = DISCHARGEDB(database=settings['database']) db.connectSQL() #command = "ALTER TABLE `dischargedb`.`v_a02_fu_questf_sub01` CHANGE COLUMN `index` `index` BIGINT NULL ,ADD PRIMARY KEY (`index`);;" command = "ALTER TABLE dischargedb.v_a03_ses_staff CHANGE COLUMN index index BIGINT NOT NULL" db.executeSQL(command) ############################## reader = SAS7BDAT( 'H:/cloud/cloud_data/Projects/DISCHARGEDB/data/tmp/ecrf/v_g02_ct_reading_a.sas7bdat', skip_header=False) df1 = reader.to_data_frame() for i in range(len(reader.columns)): f = reader.columns[i].format print('format:', f) c = reader.columns[10] fip = 'H:/cloud/cloud_data/Projects/DISCHARGEDB/data/tmp/ecrf/v_a01_fu_staff.sas7bdat' df = pd.read_sas(fip, format='sas7bdat', encoding='iso-8859-1') df.to_sql(con=con, name='table_name_for_df', if_exists='replace', flavor='mysql') mysql_path = 'mysql://*****:*****@localhost/?charset=utf8' engine = create_engine(mysql_path, encoding="utf-8", echo=False) # with engine.connect() as con: # con.execute("use dischargedb3; drop table if exists " + name + ";") # df = pd.read_excel(path) # df.to_sql(name, engine, index=False) fip = 'H:/cloud/cloud_data/Projects/DISCHARGEDB/data/tables/sas/v_a02_fu_questf_sub01.sas7bdat' df = pd.read_sas(fip, format='sas7bdat', encoding='iso-8859-1') with engine.connect() as con: #con = engine.connect() con.execute("use dischargedb3;") df.to_sql('table6', engine, index=False) df = pd.read_excel( 'H:/cloud/cloud_data/Projects/DISCHARGEDB/data/tables/xlsx/discharge_ecrf_01092020.xlsx', sheet_name='Sheet1', index_col=0)
class application(object): # application variables doorStatus = None lockStatus = None messageContent = None defaultMessage = None lockServoObject = None lockServoPin = None reedSwitchPin = None doorButtonPin = None autolockEnable = None debugEnable = True settingList = settings.loadSettings("settings.json") def sensorUpdate(self): self.doorStatus = physical.readDoorStatus(self.reedSwitchPin, self.debugEnable) def resetSensorData(self): self.doorStatus = None self.lockStatus = None self.messageContent = None def engageLock(self, overrideSafety, debug): self.sensorUpdate() if overrideSafety == True: physical.lock(self.lockServoObject, self.debugEnable) self.lockStatus = True else: if self.doorStatus == True: physical.lock(self.lockServoObject, self.debugEnable) self.lockStatus = True else: logger.log("Door is not closed, so lock will not be engaged") logger.log("Either enable override or close door") def disengageLock(self, overrideSafety, debug): self.sensorUpdate() # overrideSafety is not needed but added for continuity physical.unlock(self.lockServoObject, self.debugEnable) self.lockStatus = False def doorEvent(self, channel): # only allow door closing event actions if autolock is enabled if self.autolockEnable: logger.log( "Door closing event detected, sending lock engage in 3 seconds" ) time.sleep(3) self.engageLock(overrideSafety=False, debug=self.debugEnable) else: logger.log( "Door closing event detected, but autolock is not enabled") def buttonEvent(self, channel): logger.log( "Button press event detected, sending lock engage in 0.5 seconds") time.sleep(0.5) self.disengageLock(overrideSafety=True, debug=self.debugEnable) def assignSettings(self): # set application setting values from parsed settings file if self.settingList.get("debug") != None: self.debugEnable = self.settingList["debug"] if self.settingList.get("servo-pin-number") != None: self.lockServoPin = self.settingList["servo-pin-number"] if self.settingList.get("reed-switch-pin") != None: self.reedSwitchPin = self.settingList["reed-switch-pin"] if self.settingList.get("door-button-pin") != None: self.doorButtonPin = self.settingList["door-button-pin"] if self.settingList.get("default-message") != None: self.defaultMessage = self.settingList["default-message"] else: self.defaultMessage = "message not set" if self.settingList.get("enable-autolock") != None: self.autolockEnable = self.settingList["enable-autolock"] else: self.autolockEnable = False # fetch objects and initiallize if self.lockServoPin != None: self.lockServoObject = physical.initServo(self.lockServoPin, self.debugEnable) # setup GPIO pins for read physical.initReedSwitch(self.reedSwitchPin, self.debugEnable) physical.initDoorButton(self.doorButtonPin, self.debugEnable) # enable event callback physical.enableDoorEvent(self.reedSwitchPin, self.doorEvent, self.debugEnable) physical.enableButtonEvent(self.doorButtonPin, self.buttonEvent, self.debugEnable) # http exposed functions @cherrypy.expose @cherrypy.tools.accept() def unlockdoor(self): logger.log("Door unlock signal received") self.disengageLock(overrideSafety=False, debug=self.debugEnable) @cherrypy.expose @cherrypy.tools.accept() def lockdoor(self): logger.log("Door lock signal received") self.engageLock(overrideSafety=False, debug=self.debugEnable) @cherrypy.expose @cherrypy.tools.accept(media="text/plain") def setmessage(self, motdtext): if motdtext != None: self.messageContent = motdtext return json.dumps({"motd": self.messageContent}) @cherrypy.expose @cherrypy.tools.accept() def toggleAutolock(self): logger.log("Recieved signal to toggle autolock") if self.autolockEnable: logger.log("Autolock is enabled - disabling...") self.autolockEnable = False else: logger.log("Autolock is disabled - enabling...") self.autolockEnable = True @cherrypy.expose def infodigest(self): # update sensors before assembling data self.sensorUpdate() infoArray = dict() infoArray["door-status"] = self.doorStatus infoArray["lock-status"] = self.lockStatus infoArray["motd"] = self.messageContent infoArray["autolock-enabled"] = self.autolockEnable # not implemented functions infoArray["light-status"] = None infoArray["temperature"] = None # return a formatted JSON reply # hopefully this works return json.dumps(infoArray) @cherrypy.expose def getdoorstatus(self): # update sensors before assembling data self.sensorUpdate() infoArray = dict() infoArray["door-status"] = self.doorStatus infoArray["lock-status"] = self.lockStatus return json.dumps(infoArray) @cherrypy.expose def getautolockstatus(self): return json.dumps({"autolock-enabled": self.autolockEnable})
import datetime import pytz import urllib.request from urllib.parse import urlparse from icalendar import Calendar, Event import settings import json from collections import namedtuple programSettings = settings.loadSettings() #################################################### # MAIN() #################################################### def main(): calendarPath = programSettings.getCalendarPath() if calendarPath == "": getICS() todo = Todo(loadICS()) todo.displayTODO() return #################################################### # Gets the link to the .ics file from the user and # downloads the file #################################################### def getICS(): url = input("Enter Canvas url for .ics file: ") savePath = 'resources/cal.ics' #input("Enter to save file: ")