def run(self): atSettings = {} atSettings['fp'] = getAudioBasePath() + self._audioFileName atSettings['vol'] = 100 atSettings['repeat'] = False atSettings['tName'] = "AudioThread_id_" + str(self._id) audioThread = at(**atSettings) #Use Id to mark entry as active Models(Db(dbc.MYSQL_DB).connection()).push( ModelType.FOR_ID_SET_ACTIVE_UPDATE_TS, self._id) #Start audio thread audioThread.start() AudioDbInterface.candidateAudioThreads.append(audioThread) logger("_INFO_", " Starting thread: ", audioThread.getName(), " state=", audioThread.isAlive()) #Wait for completion audioThread.join() #Use Id to mark entry as inactive Models(Db(dbc.MYSQL_DB).connection()).push( ModelType.FOR_ID_UNSET_ACTIVE, self._id) AudioDbInterface.candidateAudioThreads.remove(audioThread) logger("_INFO_", " Thread ending: ", audioThread.getName(), " state=", audioThread.isAlive()) return
def __fetchModel(self): if (self._appSettingsId > 0): self._appSettings = Models(Db(dbc.MYSQL_DB).connection()).fetch( ModelType.APP_SETTINGS_FOR_ID, self._appSettingsId) else: self._appSettings = Models(Db(dbc.MYSQL_DB).connection()).fetch( ModelType.APP_SETTINGS) self._settingsJson = json.loads(self._appSettings[0][dbc.KEY_SETTINGS]) return
def __init__(self, platform_url: str, name: str, code: str): super().__init__(platform_url, name, code) self.domain = platform_url self.HTTPRequestHeaders = {} self.HTTPGetParameters = {} self.databaseHandler = Db()
def terminate(self): logger("_INFO_", "Terminating continuous playback") #Cancel existing callback if (self._futureTerminationCallback != None): self._futureTerminationCallback.cancel() #Update the database queryFrag = "'$.continuousPlayback.enabled', False" Models(Db(dbc.MYSQL_DB).connection()).push( ModelType.UPDATE_APP_SETTINGS_IN_PLACE, str(queryFrag)) self._rerun = False return
def __init__(self, platform_url: str, name: str, code: str, sources: List[PlatformSource] = []): # Leave out until full conversion of scrapers. # if len(sources) == 0: # raise ValueError("Expecting at least one source!") self.platform_url = platform_url self.name = name self.code = code self._sources = sources self._db = Db() self._collect_recovery = ScraperExceptionRecoveryStrategy(3)
def disableAmbientChannelAndUpdateSettings(ch): if (ch == 'ambientAudioChannel1'): queryFrag = "'$.continuousPlayback.ambience1', 'None'" elif (ch == 'ambientAudioChannel2'): queryFrag = "'$.continuousPlayback.ambience2', 'None'" else: logger("_INFO_", "Unsupported channel: ", ch) return #Update app settings in-place a = Models(Db(dbc.MYSQL_DB).connection()).push( ModelType.UPDATE_APP_SETTINGS_IN_PLACE, str(queryFrag)) logger("_INFO_", "Settings updated inplace in database. Return: ", str(a)) return
def geocode(self): db = Db() locationset = db.session.query(InitiativeImport)\ .filter(InitiativeImport.location.isnot(None))\ .with_for_update().all() # Regex voor postcode geschreven ls `9999XX` pattern = r'\d{4}[A-Z]{2}' p = re.compile(pattern) for item in locationset: geocodeterm = item.location # item.location prepareren voor stadsdelen geocodeterm = geocodeterm.replace('Amsterdam Algemeen', 'Amsterdam') if geocodeterm.startswith('Stadsdeel'): geocodeterm = geocodeterm + ' Amsterdam' # item.location prepareren voor `landelijk` if geocodeterm in ['Landelijk', 'Heel Nederland']: geocodeterm = 'Nederland' else: # item.location prepareren voor `postcode` zipwithoutspace = p.findall(item.location) if len(zipwithoutspace) > 0: for hit in zipwithoutspace: geocodeterm = geocodeterm.replace( hit, hit[:4] + '' + hit[4:]) match = self.geolocator.geocode(geocodeterm) if match is None: print("ERROR : " + geocodeterm + " niet gevonden") item.osm_address = "Niet gevonden" else: item.osm_address = match.address # Only write location if not already set. if item.latitude is None: item.latitude = match.latitude item.longitude = match.longitude db.session.add(item) db.session.commit() print("SUCCESS: " + match.address) db.session.add(item) db.session.commit() time.sleep(1) # Sleep so we don't overstretch the nominatim api
def __init__(self, platform_url: str, name: str, code: str, sources: List[PlatformSource] = []): # Leave out until full conversion of scrapers. # if len(sources) == 0: # raise ValueError("Expecting at least one source!") self.platform_url: str = platform_url self.name: str = name self.code: str = code self._sources = sources self._db = Db() self._collect_recovery = ScraperExceptionRecoveryStrategy(3) self.limit: int = 0 """Limits the iteration if a debugger is attached""" self._batch: ImportBatch
def getCandidateAudioFiles(appSettings, **kwargs): #print("\n--> ",inspect.stack()[0][3], " CALLED BY ",inspect.stack()[1][3]) #2. TODO: Verify that required time has passed since last playback try: requestedChannels = kwargs['requestedChannels'] except: requestedChannels = 1 #Purge dead entries #Fetch activeEntries = Models(Db(dbc.MYSQL_DB).connection()).fetch( ModelType.ACTIVE_ENTRIES) activeEntries = [d[dbc.KEY_ID] for d in activeEntries] #3. Verify that number of active entries don't exceed maximum allowed if (len(activeEntries) > 0): purged = purgeDeadEntries(60) if (purged > 0): logger("_INFO_", purged, "Entries purged") activeEntries = Models(Db(dbc.MYSQL_DB).connection()).fetch( ModelType.ACTIVE_ENTRIES) logger("_INFO_", "Active entries:", activeEntries) logger("_INFO_", "Active/maxAllowed=", len(activeEntries), "/", appSettings.maxNumberOfAllowedSimultaneousChannels()) if (len(activeEntries) >= appSettings.maxNumberOfAllowedSimultaneousChannels()): logger("_INFO_", "Channels saturated. Ignoring trigger. Exiting\n") return [] #4. Compute number of channels to implement if (requestedChannels == 1): numberOfChannels = 1 elif (requestedChannels > 1): emptyChannels = appSettings.maxNumberOfAllowedSimultaneousChannels( ) - len(activeEntries) if (requestedChannels > emptyChannels): numberOfChannels = emptyChannels else: numberOfChannels = requestedChannels if (randomizeNumberOfChannels): numberOfChannels = random.randint(1, requestedChannels) else: logger("_ERROR_", "Unsupported requested number of channels:", str(requestedChannels)) return [] #Scope: # (1) Fetch all sorted by last_updated asc data = Models(Db(dbc.MYSQL_DB).connection()).fetch( ModelType.IDS_NAMES_AUDIOFILE_SORTED_BY_LAST_UPDATED_OLDEST_FIRST) # (2) Select one t random from top 75% of that list #for d in data: # print("New line") # print(d[0]) # print(d[1]) logger("_INFO_", "Total data rows: ", len(data)) #print(type(data)) #for d in data: # print(type(d)) #print(data) #print("\nJSON dumps:",json.dumps(data)) allIds = [d[dbc.KEY_ID] for d in data] logger("_INFO_", allIds) eligibleLength = int(0.75 * len(data)) candidates = [] #Choose first at random candidates.append(data[random.randint(0, eligibleLength - 1)]) logger("_INFO_", "CHOOSING 1st candidate:") logger( "_INFO_", "{:>4.4} {:32.32} {}".format(str(candidates[0][dbc.KEY_ID]), candidates[0][dbc.KEY_NAME], candidates[0][dbc.KEY_AUDIO_FILE])) data.remove(candidates[0]) #Remove last 25% indicesToRemove = len(data) - eligibleLength for i in range(0, indicesToRemove): data.remove(data[len(data) - 1]) logger("_INFO_", "Total number of channels to implement: ", numberOfChannels) if (numberOfChannels > 1): speciesConstrainedSet = [] logger("INFO_", "Limit to same species: ", appSettings.isBirdChoiceLimitedToSameSpecies()) if (appSettings.isBirdChoiceLimitedToSameSpecies()): for d in data: if (d == candidates[0]): print(d, " :Already exists. Skipping") #data.remove(candidates[0]) continue elif (d[dbc.KEY_NAME] == candidates[0][dbc.KEY_NAME]): speciesConstrainedSet.append(d) data = speciesConstrainedSet logger("_INFO_", "Curated candidate data set: size=", len(data)) for element in data: logger( "_INFO_", "{:>4.4} {:32.32} {}".format(str(element[dbc.KEY_ID]), element[dbc.KEY_NAME], element[dbc.KEY_AUDIO_FILE])) if (numberOfChannels >= len(data)): logger( "_INFO_", "Number of channels to implement " + numberOfChannels + " is more or equal to data set at hand ", len(data)) for d in data: candidates.append(d) else: for i in range(0, numberOfChannels - 1): logger("_INFO_", "\nSelecting For channel ", i + 2) randomlyChosenRowIdx = random.randint(0, len(data) - 1) logger( "_INFO_", "Size of data:", len(data), " Chosen idx:", randomlyChosenRowIdx, "id={} {} {}".format( str(data[randomlyChosenRowIdx][dbc.KEY_ID]), data[randomlyChosenRowIdx][dbc.KEY_NAME], data[randomlyChosenRowIdx][dbc.KEY_AUDIO_FILE])) candidates.append(data[randomlyChosenRowIdx]) #remove that row to avoid duplication data.remove(data[randomlyChosenRowIdx]) #logger("_INFO_", "Final candidate list: ") #candidateAudioFiles = [] #for c in candidates: # print (c) # candidateAudioFiles.append(c[1]) return candidates
def processUpstageSoundscape(ch, **kwargs): try: if (kwargs['terminate']): if (kwargs.get("terminate") == True): terminateSoundscapeAudioThread(ch) return except KeyError: logger("_INFO_", ch, " won't be terminated") atSettings = {} for key, value in kwargs.items(): if (key == 'name'): if (value == 'None'): terminateSoundscapeAudioThread(ch) return else: d = Models(Db(dbc.MYSQL_DB).connection()).fetch( ModelType.ID_FILE_FOR_NAME, value)[0] atSettings['fp'] = getAudioBasePath() + d[dbc.KEY_AUDIO_FILE] elif (key == 'endTime'): atSettings['terminateAt'] = value elif (key == 'vol'): atSettings['vol'] = value else: print("Unsupported key/value pair: ", str(key), ":", str(value)) if (globals()[ch] == None or globals()[ch].isAlive() == False): #start new globals()[ch] = at(**atSettings) globals()[ch].start() #Update database if required logger("_INFO_", "audiothread started and waiting for completion") #Wait for completion globals()[ch].join() #Update database: appSettings logger("_INFO_", "Update appSettings here to reflect thread termination") disableAmbientChannelAndUpdateSettings(ch) else: #update existing for k, v in atSettings.items(): if (k == 'fp'): try: globals()[ch].changeFile(v) except: logger("_ERROR_", "Fatal error: Could not change ", k, "on", ch) elif (k == 'vol'): try: globals()[ch].changeVolume(v) except: logger("_ERROR_", "Fatal error: Could not change ", k, "on", ch) elif (k == 'terminateAt'): try: globals()[ch].setFutureTerminationTime(v) except: logger("_ERROR_", "Fatal error: Could not change ", k, "on", ch) else: print("Unsupported AT key/value pair: ", str(k), ":", str(v)) return
def purgeDeadEntries(seconds): return int( Models(Db(dbc.MYSQL_DB).connection()).push( ModelType.UNSET_ACTIVE_FOR_DEAD_ENTRIES, seconds))
def __init__(self): self.geo_locator = Nominatim(user_agent="code-for-nl-covid-19") self.db = Db()
def __init__(self, platform_url: str, name: str, code: str): self.platform_url = platform_url self.name = name self.code = code self._db = Db()
def save(self, jsonStr): Models(Db(dbc.MYSQL_DB).connection()).push(ModelType.APP_SETTINGS, jsonStr) self.refresh() return