def get_haar_points(self, haarCascade, method=co.cv.CV_HAAR_DO_CANNY_PRUNING): """ Search for points matching the haarcascade selected. Arguments: - self: The main object pointer. - haarCascade: The selected cascade. - methode: The search method to use. DEFAULT: co.cv.CV_HAAR_DO_CANNY_PRUNING. Returns a list with the matches. """ cascade = co.cv.cvLoadHaarClassifierCascade( haarCascade, self.imgSize ) if not cascade: debug.exception( "ocvfw", "The Haar Classifier Cascade load failed" ) co.cv.cvResize( self.img, self.small_img, co.cv.CV_INTER_LINEAR ) co.cv.cvClearMemStorage( self.storage ) points = co.cv.cvHaarDetectObjects( self.small_img, cascade, self.storage, 1.2, 2, method, co.cv.cvSize(20, 20) ) if points: matches = [ [ co.cv.cvPoint( int(r.x*self.imageScale), int(r.y*self.imageScale)), \ co.cv.cvPoint( int((r.x+r.width)*self.imageScale), int((r.y+r.height)*self.imageScale) )] \ for r in points] debug.debug( "ocvfw", "cmGetHaarPoints: detected some matches" ) return matches
def get_haar_roi_points(self, haarCascade, rect, origSize=(0, 0), method=co.cv.CV_HAAR_DO_CANNY_PRUNING): """ Search for points matching the haarcascade selected. Arguments: - self: The main object pointer. - haarCascade: The selected cascade. - methode: The search method to use. DEFAULT: co.cv.CV_HAAR_DO_CANNY_PRUNING. Returns a list with the matches. """ cascade = co.cv.cvLoadHaarClassifierCascade( haarCascade, self.imgSize ) if not cascade: debug.exception( "ocvfw", "The Haar Classifier Cascade load failed" ) co.cv.cvClearMemStorage(self.storage) imageROI = co.cv.cvGetSubRect(self.img, rect) if cascade: points = co.cv.cvHaarDetectObjects( imageROI, cascade, self.storage, 1.2, 2, method, co.cv.cvSize(20,20) ) else: debug.exception( "ocvfw", "The Haar Classifier Cascade load Failed (ROI)" ) if points: matches = [ [ co.cv.cvPoint( int(r.x+origSize[0]), int(r.y+origSize[1])), \ co.cv.cvPoint( int(r.x+r.width+origSize[0]), int(r.y+r.height+origSize[1] ))] \ for r in points] debug.debug( "ocvfw", "cmGetHaarROIPoints: detected some matches" ) return matches
def watch(build_queue): while True: debug.set_prefix("repository_watcher") debug.message("Retrieving projects") try: projects = Api.get_projects() for project in projects: debug.message("Check repository status for project %s" % project["Name"]) repository = Git(get_path(project), get_origin_url(project)) commit_count = handle_incoming_commits(repository.check_for_new_commits_on_origin(), project, repository, build_queue) if commit_count > 0: repository.merge_origin() # Add <initial_nr_commits> commits if this is a new repository if project["Commits"] is None or len(project["Commits"]) == 0: handle_incoming_commits( repository.get_commits(registry.config["repositories"]["initial_nr_commits"]), project, repository, build_queue) except ValueError, e: debug.exception("Error retrieving projects", e) except GitError, e: debug.exception("Error with Git repository", e)
def update(self, force=False) -> UpdateStatus: if force or self.__should_update(): self.starttime = time.time() try: debug.log("Fetching data for game %s", str(self.game_id)) self._data = statsapi.get("game", { "gamePk": self.game_id, "fields": API_FIELDS }) self._status = self._data["gameData"]["status"] if self._data["gameData"]["datetime"][ "officialDate"] > self.date: # this is odd, but if a game is postponed then the 'game' endpoint gets the rescheduled game debug.log( "Getting game status from schedule for game with strange date!" ) try: scheduled = statsapi.get( "schedule", { "gamePk": self.game_id, "sportId": 1, "fields": SCHEDULE_API_FIELDS }) self._status = next(g["games"][0]["status"] for g in scheduled["dates"] if g["date"] == self.date) except: debug.error("Failed to get game status from schedule") return UpdateStatus.SUCCESS except: debug.exception( "Networking Error while refreshing the current game data.") return UpdateStatus.FAIL return UpdateStatus.DEFERRED
def write_pid(pid): try: with open(registry.pid_file, "w") as pid_file: pid_file.write(str(pid)) pid_file.close() except OSError, e: debug.exception("Exception while writing PID file", e) sys.exit(1)
def get(self, key): if self.data is None: self.load() try: return self.data[key] except Exception, e: debug.exception("Key does not exist in config: %s" % key, e)
def load(self): debug.message("Reading config") try: with open(self.config_file, "r") as file: self.data = yaml.load(file.read()) except Exception, e: debug.exception("Exception while reading config", e)
def update(self, force=False) -> UpdateStatus: if force or self.__should_update(): self.date = self.__parse_today() debug.log("Refreshing standings for %s", self.date.strftime("%m/%d/%Y")) self.starttime = time.time() try: if not self.is_postseason(): season_params = { "standingsTypes": "regularSeason", "leagueId": "103,104", "hydrate": "division,team,league", "season": self.date.strftime("%Y"), "fields": API_FIELDS, } if self.date != datetime.today().date(): season_params["date"] = self.date.strftime("%m/%d/%Y") divisons_data = statsapi.get("standings", season_params) self.standings = [ Division(division_data) for division_data in divisons_data["records"] ] if self.wild_cards: season_params["standingsTypes"] = "wildCard" wc_data = statsapi.get("standings", season_params) self.standings += [ Division(data, wc=True) for data in wc_data["records"] ] else: postseason_data = statsapi.get( "schedule_postseason_series", { "season": self.date.strftime("%Y"), "hydrate": "league,team", "fields": "series,id,gameType,games,description,teams,home,away,team,isWinner,name", }, ) self.leagues["AL"] = League(postseason_data, "AL") self.leagues["NL"] = League(postseason_data, "NL") except: debug.exception("Failed to refresh standings.") return UpdateStatus.FAIL else: return UpdateStatus.SUCCESS return UpdateStatus.DEFERRED
def __init__(self, year: int): try: data = statsapi.get("season", {"sportId": 1, "seasonId": year}) self.__parse_important_dates(data["seasons"][0], year) now = datetime.now() if year == now.year and self.season_ends_date < now: data = statsapi.get("season", {"sportId": 1, "seasonId": year + 1}) self.__parse_important_dates(data["seasons"][0], year + 1) except: debug.exception("Failed to refresh important dates") self.playoffs_start_date = datetime(3000, 10, 1) self.important_dates = [{"text": "None", "date": datetime(3000, 1, 1), "max_days": 1}]
def watch(queue): while True: debug.set_prefix("build_watcher") try: project = queue.get() prepare_repository(project) build = Build(project) build.run() except ValueError, e: debug.exception("Error communicating with API", e) except GitError, e: debug.exception("Error with Git repository", e)
def update(self, force=False) -> UpdateStatus: if force or self.__should_update(): debug.log("Weather should update!") self.starttime = time.time() if self.apikey_valid: debug.log("API Key hasn't been flagged as bad yet") try: observation = self.client.weather_at_place(self.location) weather = observation.weather self.temp = weather.temperature(self.temperature_unit).get("temp", -99) wind = weather.wind(self.speed_unit) self.wind_speed = wind.get("speed", 0) self.wind_dir = wind.get("deg", 0) self.conditions = weather.status self.icon_name = weather.weather_icon_name debug.log( "Weather: %s; Wind: %s; %s (%s)", self.temperature_string(), self.wind_string(), self.conditions, self.icon_filename(), ) return UpdateStatus.SUCCESS except pyowm.commons.exceptions.UnauthorizedError: debug.warning( "[WEATHER] The API key provided doesn't appear to be valid. Please check your config.json." ) debug.warning( "[WEATHER] You can get a free API key by visiting https://home.openweathermap.org/users/sign_up" ) self.apikey_valid = False return UpdateStatus.DEFERRED except pyowm.commons.exceptions.APIRequestError: debug.warning("[WEATHER] Fetching weather information failed from a connection issue.") debug.exception("[WEATHER] Error Message:") # Set some placeholder weather info if this is our first weather update if self.temp is None: self.temp = -99 if self.wind_speed is None: self.wind_speed = -9 if self.wind_dir is None: self.wind_dir = 0 if self.conditions is None: self.conditions = "Error" if self.icon_name is None: self.icon_name = "50d" return UpdateStatus.FAIL return UpdateStatus.DEFERRED
endtime = time.time() time_delta = endtime - starttime rotate_rate = data.config.rotate_rate_for_status( data.current_game.status()) if time_delta >= rotate_rate and data.scrolling_finished: starttime = time.time() if rotate: data.advance_to_next_game() def __render_main(matrix, data): MainRenderer(matrix, data).render() if __name__ == "__main__": # Check for led configuration arguments command_line_args = args() matrixOptions = led_matrix_options(command_line_args) # Initialize the matrix matrix = RGBMatrix(options=matrixOptions) try: config, _ = os.path.splitext(command_line_args.config) main(matrix, config) except: debug.exception("Untrapped error in main!") sys.exit(1) finally: matrix.Clear()
# ---------------------------------------------------------------------------- if __name__ == '__main__': debug.init(sys.stdout) sys.stdout = debug.out sys.stderr = debug.err debug.brf('brief') debug.out('normal') debug.vrb('verbose') debug.err('error') sys.stderr.write('Error message\n') try: 1/0 except Exception, e: debug.exception(e) def addGuiAppender(remotePort = 31337): """ Create and add UDP appender for GUI """ appender = Appender.UdpAppender() appender.Name = 'GuiUdpAppender' debugFilter = Filter.LevelMatchFilter() debugFilter.LevelToMatch = Core.Level.Debug debugFilter.AcceptOnMatch = True errorFilter = Filter.LevelMatchFilter() errorFilter.LevelToMatch = Core.Level.Error errorFilter.AcceptOnMatch = True appender.AddFilter(debugFilter)
from build import Build def watch(queue): while True: debug.set_prefix("build_watcher") try: project = queue.get() prepare_repository(project) build = Build(project) build.run() except ValueError, e: debug.exception("Error communicating with API", e) except GitError, e: debug.exception("Error with Git repository", e) except Exception, e: debug.exception("Unknown error", e) time.sleep(registry.config["build"]["check_interval"]) def prepare_repository(project): debug.message("Checkout commit %s" % project["commit"], indent=1) repo = Git(repository.get_path(project), repository.get_origin_url(project)) # Check out the correct commit and create a reference to it (deployed) repo.checkout_commit(project["commit"])
repository, build_queue) if commit_count > 0: repository.merge_origin() # Add <initial_nr_commits> commits if this is a new repository if project["Commits"] is None or len(project["Commits"]) == 0: handle_incoming_commits( repository.get_commits(registry.config["repositories"]["initial_nr_commits"]), project, repository, build_queue) except ValueError, e: debug.exception("Error retrieving projects", e) except GitError, e: debug.exception("Error with Git repository", e) except OSError, e: debug.exception("Error connecting to remote", e) except Exception, e: debug.exception("Unknown error", e) time.sleep(registry.config["repositories"]["check_interval"]) def get_path(project): return os.path.join(registry.config["repositories"]["path"], project["FormattedName"]) def get_origin_url(project): if project["OriginUsername"] is not None and project["OriginPassword"] is not None: protocol = project["OriginUrl"][0:project["OriginUrl"].index("/") + 2] url = project["OriginUrl"][project["OriginUrl"].index("/") + 2:]