def getEventTeamsMatches(self): event = {} teams = {} matches = {} requestURI = self.getCurrentDivisionURI() if "api.ftcscores.com" in requestURI: # Specific logic for ftcscores.com event, teams, matches = self.getEventTeamsMatches_ftcscores() elif "theorangealliance.org/apiv2" in requestURI: # Specific logic for theorangealliance event, teams, matches = self.getEventTeamsMatches_toa() elif "worlds.pennfirst.org/cache/TeamInfo" in requestURI: event, teams, matches = self.getEventTeamsMatches_pennfirst() else: # Default logic - ftc scoring system event, teams, matches = self.getEventTeamsMatches_default() # Now update powerscores self.__calculatePowerScore(teams, matches) LOG.debug("powerscore calculated") return (event, teams, matches)
def refresh_token(self): LOG.debug('Refreshing token') if identity_lock.acquire(blocking=False): try: data = self.send({ "path": "auth/token", "headers": { "Authorization": "Bearer " + self.identity.refresh, "Device": self.identity.uuid } }) IdentityManager.save(data, lock=False) LOG.debug('Saved credentials') except HTTPError as e: if e.response.status_code == 401: LOG.error('Could not refresh token, invalid refresh code.') else: raise finally: identity_lock.release() else: # Someone is updating the identity wait for release with identity_lock: LOG.debug('Refresh is already in progress, waiting until done') time.sleep(1.2) os.sync() self.identity = IdentityManager.load(lock=False) LOG.debug('new credentials loaded')
def save_video(self, item: str, folder: str) -> bool: LOG.info(f"[Video] Trying to download {item.url} to {folder}") if not is_video(item): return False # return subprocess.call(['youtube-dl', item.url], cwd=folder) == 0
def is_paired(ignore_errors=True): """ Determine if this device is actively paired with a web backend Determines if the installation of Mycroft has been paired by the user with the backend system, and if that pairing is still active. Returns: bool: True if paired with backend """ global _paired_cache if _paired_cache: # NOTE: This assumes once paired, the unit remains paired. So # un-pairing must restart the system (or clear this value). # The Mark 1 does perform a restart on RESET. return True try: api = DeviceApi() device = api.get() _paired_cache = api.identity.uuid is not None and \ api.identity.uuid != "" return _paired_cache except HTTPError as e: if e.response.status_code == 401: return False except Exception as e: LOG.warning('Could not get device infO: ' + repr(e)) if ignore_errors: return False if connected(): raise BackendDown raise InternetDown
def check_remote_pairing(ignore_errors): """Check that a basic backend endpoint accepts our pairing. Arguments: ignore_errors (bool): True if errors should be ignored when Returns: True if pairing checks out, otherwise False. """ try: DeviceApi().get() return True except HTTPError as e: if e.response.status_code == 401: return False error = e except Exception as e: error = e LOG.warning('Could not get device info: {}'.format(repr(error))) if ignore_errors: return False if isinstance(error, HTTPError): if connected(): raise BackendDown from error else: raise InternetDown from error else: raise error
def refresh_token(self): LOG.debug('Refreshing token') if identity_lock.acquire(blocking=False): try: data = self.send({ "path": "auth/token", "headers": { "Authorization": "Bearer " + self.identity.refresh } }) IdentityManager.save(data, lock=False) LOG.debug('Saved credentials') except HTTPError as e: if e.response.status_code == 401: LOG.error('Could not refresh token, invalid refresh code.') else: raise finally: identity_lock.release() else: # Someone is updating the identity wait for release with identity_lock: LOG.debug('Refresh is already in progress, waiting until done') time.sleep(1.2) os.sync() self.identity = IdentityManager.load(lock=False) LOG.debug('new credentials loaded')
def __init__(self, mycroft, client_id: str, client_secret: str, user_agent: str): self.mycroft = mycroft self.reddit = praw.Reddit(client_id=client_id, client_secret=client_secret, user_agent=user_agent) self.limit = 1000 LOG.info(f"{client_id}, {client_secret}, {user_agent}")
def render_image(self, image): self.publish("(oled:clear)") pixels = image_to_pixels(image) for pixel in pixels: x, y = pixel payload = f"(oled:pixel {x} {y})" LOG.info(payload) self.publish(payload)
def _on_connect(self, client, userdata, flags, rc): if rc == 0: LOG.info("MQTT Client connected successfully to host {}".format( self.host)) self.connected = True else: LOG.error("MQTT Client could not connect to host {}".format( self.host))
def send_json(socket, data): # try: serialized = json.dumps(data.__dict__) LOG.info(serialized) # except (TypeError, ValueError) as e: # raise Exception('You can only send JSON-serializable data') # send the length of the serialized data first socket.send('%d\n'.encode() % len(serialized)) # send the serialized data socket.sendall(serialized.encode())
def _on_message(self, client, userdata, message): if message.topic in self.registered_handlers: format_message = Message(type='mqtt', destination=str(message.topic).split('/'), data=json.loads(str(message.payload))) self.registered_handlers[message.topic](format_message) else: LOG.error( "No handler was registered for message on topic {}".format( message.topic))
def get_reddit_replies(self, arg): result = [] try: result = self.reddit.subreddit(arg).hot(limit=self.limit) LOG.info(f"Getting replies") except Exception as e: LOG.info(f"Got zero replies {e}") result = [] finally: return result
def save_image(self, item: str, folder: str) -> bool: if not is_image(item): return False # filename = self.get_file_name(item) full_file_path = os.path.join(folder, filename) if os.path.exists(full_file_path): LOG.info(f"[File Exists] {filename}") return False # try: response = requests.get(item.url) if not response.ok: LOG.info("Error downloading file.") return False except Exception as e: LOG.info(f"Error downloading path {e}") return False # LOG.info(f"[File Saved] {filename}") pathlib.Path(full_file_path).write_bytes(response.content) return True
def download_photos_and_videos(self, data_type, community: str, video_folder: str, photos_folder: str, max_images: int, max_videos: int) -> None: current_images = 0 current_videos = 0 try: community_posts = self.get_reddit_replies(community) except Exception as e: self.mycroft.speak( f"Unable to reach reddit, verify the reddit skill configuration" ) return # for item in community_posts: try: if (data_type in [Reddit.DataTypes.ALL, Reddit.DataTypes.VIDEO] and current_videos < max_videos and self.save_video(item=item, folder=video_folder)): current_videos += 1 # if (data_type in [Reddit.DataTypes.ALL, Reddit.DataTypes.IMAGES] and current_images < max_images and self.save_image(item=item, folder=photos_folder)): current_images += 1 # except Exception as e: LOG.info(f"Error trying to get {community}:") LOG.info(f"{e}") # # Break early from the loop if we finished downloading things if data_type == Reddit.DataTypes.IMAGES and current_images >= max_images: return # if data_type == Reddit.DataTypes.VIDEO and current_videos >= max_videos: return # if (data_type == Reddit.DataTypes.ALL and current_videos >= max_videos and current_images >= max_images): return
def anime(self, anime_id): if anime_id not in self._animes: try: def do_request(): return self._jikan.anime(anime_id) anime = self._delay_request(do_request) self._animes[anime_id] = anime except APIException as e: LOG.error(str.format('exception getting anime id:{}', anime_id)) LOG.error(e) return None return self._animes[anime_id]
def forward(): sleep(.5) # TODO: needs to be removed GPIO.output(DIR, GPIO.LOW) LOG.info("DIR set to LOW") LOG.info("Rotating CW") GPIO.output(LED, GPIO.HIGH) for x in range(durationFwd): GPIO.output(PUL, GPIO.HIGH) sleep(delay) GPIO.output(PUL, GPIO.LOW) sleep(delay) GPIO.output(LED, GPIO.LOW) sleep(.5) # TODO: needs to be removed return
def create_client(protocol, settings): """Create the appropriate client for a specified protocol Args: protocol(str): protocol to be used settings(dict): skill settings object containing Returns: Client: Client object """ if protocol == "mqtt": return MQTTClient(settings) else: LOG.error("Protocol {} not supported".format(protocol)) return None
def reverse(): sleep(.5) # TODO: needs to be removed GPIO.output(DIR, GPIO.HIGH) LOG.info("DIR set to HIGH") LOG.info("Rotating CCW") GPIO.output(LED, GPIO.HIGH) for y in range(durationBwd): GPIO.output(PUL, GPIO.HIGH) sleep(delay) GPIO.output(PUL, GPIO.LOW) sleep(delay) GPIO.output(LED, GPIO.LOW) sleep(.5) # TODO: needs to be removed return
def animelist_watching(self): if self._animelist_watching is None or time.time( ) - self._animelist_watching_time > 5 * 60: try: def do_request(): return self._jikan.user(username=self._mal_username, request='animelist', argument='watching') self._animelist_watching = self._delay_request(do_request) self._animelist_watching_time = time.time() except APIException as e: LOG.error('exception getting watchlist') LOG.error(e) return self._animelist_watching
def delete_skill_metadata(self, uuid): """ Delete the current skill metadata from backend TODO: Real implementation when method exists on backend Args: uuid (str): unique id of the skill """ try: LOG.debug("Deleting remote metadata for {}".format(skill_gid)) self.request({ "method": "DELETE", "path": ("/" + self.identity.uuid + "/skill" + "/{}".format(skill_gid)) }) except Exception as e: LOG.error("{} cannot delete metadata because this".format(e))
def send(self, params): """ Send request to mycroft backend. The method handles Etags and will return a cached response value if nothing has changed on the remote. Arguments: params (dict): request parameters Returns: Requests response object. """ query_data = frozenset(params.get('query', {}).items()) params_key = (params.get('path'), query_data) etag = self.params_to_etag.get(params_key) method = params.get("method", "GET") headers = self.build_headers(params) data = self.build_data(params) json_body = self.build_json(params) query = self.build_query(params) url = self.build_url(params) # For an introduction to the Etag feature check out: # https://en.wikipedia.org/wiki/HTTP_ETag if etag: headers['If-None-Match'] = etag response = requests.request(method, url, headers=headers, params=query, data=data, json=json_body, timeout=(3.05, 15)) if response.status_code == 304: LOG.debug('Etag matched. Nothing changed for: ' + params['path']) response = self.etag_to_response[etag] elif 'ETag' in response.headers: etag = response.headers['ETag'].strip('"') LOG.debug('Updating etag for: ' + params['path']) self.params_to_etag[params_key] = etag self.etag_to_response[etag] = response return self.get_response(response)
def format_related(cls, abstract, query): LOG.debug('Original abstract: ' + abstract) ans = abstract if ans[-2:] == '..': while ans[-1] == '.': ans = ans[:-1] phrases = ans.split(', ') first = ', '.join(phrases[:-1]) last = phrases[-1] if last.split()[0] in cls.start_words: ans = first last_word = ans.split(' ')[-1] while last_word in cls.start_words or last_word[-3:] == 'ing': ans = ans.replace(' ' + last_word, '') last_word = ans.split(' ')[-1] category = None match = re.search('\(([a-z ]+)\)', ans) if match: start, end = match.span(1) if start <= len(query) * 2: category = match.group(1) ans = ans.replace('(' + category + ')', '()') words = ans.split() for article in cls.articles: article = article.title() if article in words: index = words.index(article) if index <= 2 * len(query.split()): name, desc = words[:index], words[index:] desc[0] = desc[0].lower() ans = ' '.join(name) + cls.is_verb + ' '.join(desc) break if category: ans = ans.replace('()', cls.in_word + category) if ans[-1] not in '.?!': ans += '.' return ans
def respond(self, query): if len(query) == 0: return 0.0 r = ddg.query(query) LOG.debug('Query: ' + str(query)) LOG.debug('Type: ' + r.type) if (r.answer is not None and r.answer.text and "HASH" not in r.answer.text): return (query + self.is_verb + r.answer.text + '.') elif len(r.abstract.text) > 0: sents = split_sentences(r.abstract.text) return sents[0] elif len(r.related) > 0 and len(r.related[0].text) > 0: related = split_sentences(r.related[0].text)[0] return (self.format_related(related, query)) else: return None
def upload_skills_data(self, data): """ Upload skills.json file. This file contains a manifest of installed and failed installations for use with the Marketplace. Arguments: data: dictionary with skills data from msm """ if not isinstance(data, dict): raise ValueError('data must be of type dict') _data = deepcopy(data) # Make sure the input data isn't modified # Strip the skills.json down to the bare essentials to_send = {} if 'blacklist' in _data: to_send['blacklist'] = _data['blacklist'] else: LOG.warning('skills manifest lacks blacklist entry') to_send['blacklist'] = [] # Make sure skills doesn't contain duplicates (keep only last) if 'skills' in _data: skills = {s['name']: s for s in _data['skills']} to_send['skills'] = [skills[key] for key in skills] else: LOG.warning('skills manifest lacks skills entry') to_send['skills'] = [] for s in to_send['skills']: # Remove optional fields backend objects to if 'update' in s: s.pop('update') # Finalize skill_gid with uuid if needed s['skill_gid'] = s.get('skill_gid', '').replace( '@|', '@{}|'.format(self.identity.uuid)) self.request({ "method": "PUT", "path": "/" + UUID + "/skillJson", "json": to_send })
def get_video_url(self, reddit_url: str) -> str: try: if ("gfycat" in reddit_url): extra_params = [] elif ("youtube" in reddit_url): extra_params = ["--format", "best[height<=480]"] youtube_dl_params = ["youtube-dl", "--get-url"] for param in extra_params: youtube_dl_params.append(param) youtube_dl_params.append(reddit_url) video_url = subprocess.check_output(youtube_dl_params).decode( "utf-8") LOG.info(f"Getting Video URL Video Url: {video_url}") return video_url except Exception as e: LOG.info(f"{e}") return ""
def download(self, data_type, communities: List[str], download_folder: str, max_images: int, max_videos: int) -> None: LOG.info(f"Inside of the reddit library {data_type}") total = len(communities) for community in communities: idx = communities.index(community) LOG.info(f"Trying to get {community}") video_folder = folder(download_folder=download_folder, subfolder="video", community=community) photos_folder = folder(download_folder=download_folder, subfolder="images", community=community) self.download_photos_and_videos(data_type=data_type, community=community, video_folder=video_folder, photos_folder=photos_folder, max_images=max_images, max_videos=max_videos)
def refresh_token(self): LOG.debug('Refreshing token') if identity_lock.acquire(blocking=False): try: data = self.send({ "path": "auth/token", "headers": { "Authorization": "Bearer " + self.identity.refresh } }) IdentityManager.save(data, lock=False) LOG.debug('Saved credentials') finally: identity_lock.release() else: # Someone is updating the identity wait for release with identity_lock: LOG.debug('Refresh is already in progress, waiting until done') time.sleep(1.2) os.sync() self.identity = IdentityManager.load(lock=False) LOG.debug('new credentials loaded')
def stop(): GPIO.output(ENA, GPIO.LOW) LOG.info("Stopping stepper...")
except: is_imported = False pass PUL = 17 # Driver pulse DIR = 27 # Driver direction ENA = 22 # Driver enable LED = 18 # Indicator if is_imported: GPIO.setmode(GPIO.BCM) GPIO.setup(PUL, GPIO.OUT) LOG.info("PUL = GPIO 17") GPIO.setup(DIR, GPIO.OUT) LOG.info("PUL = GPIO 27") GPIO.setup(ENA, GPIO.OUT) LOG.info("PUL = GPIO 22") GPIO.setup(LED, GPIO.OUT) LOG.info("PUL = GPIO 18") LOG.info("GPIO Setup Complete.") durationFwd = 200 # Forward spin duration (full rotation 1600) durationBwd = 200 # Reverse spin duration delay = 0.0000002 # Delay between PUL.
def start(): GPIO.output(ENA, GPIO.HIGH) LOG.info("Starting stepper...") run()
def cleanup(): GPIO.cleanup() LOG.info("Cleanup for stepper...")