def create_user(session: Session, user: User) -> User: """ Create a new user if username isn't already taken. :param session: SQLAlchemy database session. :type session: Session :param user: New user record to create. :type user: User :return: Optional[User] """ try: existing_user = ( session.query(User).filter(User.username == user.username).first() ) if existing_user is None: session.add(user) # Add the user session.commit() # Commit the change LOGGER.success(f"Created user: {user}") else: LOGGER.warning(f"Users already exists in database: {existing_user}") return session.query(User).filter(User.username == user.username).first() except IntegrityError as e: LOGGER.error(e.orig) raise e.orig except SQLAlchemyError as e: LOGGER.error(f"Unexpected error when creating user: {e}") raise e
def run_theater(strip): LOGGER.debug("running...") from control import get_stop_flag while not get_stop_flag(): try: set_brightness_depending_on_daytime(strip) color_wipe_full(strip, Color(127, 0, 0)) # Red wipe if not get_stop_flag(): color_wipe_full(strip, Color(0, 127, 0)) # Green wipe if not get_stop_flag(): color_wipe_full(strip, Color(0, 0, 127)) # Blue wipe if not get_stop_flag(): color_wipe_full(strip, Color(127, 127, 127)) # White wipe if not get_stop_flag(): theater_chase(strip, Color(127, 127, 127)) # White theater chase if not get_stop_flag(): theater_chase(strip, Color(0, 0, 127)) # Blue theater chase if not get_stop_flag(): theater_chase(strip, Color(0, 127, 0)) # Green theater chase if not get_stop_flag(): theater_chase(strip, Color(127, 0, 0)) # Red theater chase except KeyboardInterrupt: LOGGER.warn("KeyboardInterrupt") exit() except Exception as e: LOGGER.error(f"Any error occurs: {e}") exit() clear(strip)
def run_clock3(stripe): LOGGER.debug("running...") from control import get_stop_flag while not get_stop_flag(): try: now = set_brightness_depending_on_daytime(stripe)[0] led_for_hour = int(int(now.hour) % 12 * 2) led_for_minute = int(now.minute // 2.5) leds_per_2500ms = int(round(now.second / 2.5)) _dial(stripe) _seconds(leds_per_2500ms, stripe) _minute(led_for_minute, led_for_hour, stripe) _hour(led_for_hour, stripe) stripe.show() time.sleep(0.2) if leds_per_2500ms == stripe.numPixels(): time.sleep(1.3) clear(stripe) except KeyboardInterrupt: LOGGER.warn("KeyboardInterrupt.") exit() except Exception as e: LOGGER.error(f"Any error occurs: {e}") exit() clear(stripe)
def __init__(self, context=None): """ Initialize the Bitbucket org management interface object. """ self._org_url = PARAMS['BB_ORG_FETCHER_URL'] self._cached_metadata = defaultdict(dict) self._remote_cache_timestamp = None self._bb_request_time_limit = int(PARAMS["BB_REQUEST_TIME_LIMIT"]) try: context_key = PARAMS['FOUNDATION'].split('-')[1][:3] self._context = context or self._context_map[context_key] except KeyError: if context_key not in self._unmapped_contexts: LOGGER.error("Can't map foundation %s", context_key) raise InvalidFoundation else: self._context = None if self._context: available_contexts = self._context_list() if not available_contexts \ or self._context not in available_contexts: LOGGER.error( "Context %s (foundation %s) not in context list %s", self._context, PARAMS['FOUNDATION'], ','.join(available_contexts) if available_contexts else "(no contexts)") raise ContextNotAvailable LOGGER.info("Initialize fetcher (context(s): %s)", self._context) super().__init__()
def get_boxoffice_data(movie: Movie) -> Optional[str]: """ Get IMDB box office performance for a given film. :param Movie movie: IMDB movie object. :returns: Optional[str] """ try: response = [] if movie.data.get("box office", None): budget = movie.data["box office"].get("Budget", None) opening_week = movie.data["box office"].get( "Opening Weekend United States", None) gross = movie.data["box office"].get("Cumulative Worldwide Gross", None) if budget: response.append(f"BUDGET {budget}.") if opening_week: response.append(f"OPENING WEEK {opening_week}.") if gross: response.append(f"CUMULATIVE WORLDWIDE GROSS {gross}.") return " ".join(response) LOGGER.warning(f"No IMDB box office info found for `{movie}`.") except KeyError as e: LOGGER.warning( f"KeyError when fetching box office info for `{movie}`: {e}") except Exception as e: LOGGER.error( f"Unexpected error when fetching box office info for `{movie}`: {e}" )
def create_instagram_preview(url: str) -> Optional[str]: """ Generate link preview for Instagram post URLs. :param str url: Instagram post URL (image or video). :returns: Optional[str] """ try: headers = { "Access-Control-Allow-Origin": "*", "Access-Control-Allow-Methods": "GET", "Access-Control-Allow-Headers": "Content-Type", "Access-Control-Max-Age": "3600", "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:52.0) Gecko/20100101 Firefox/52.0", } req = requests.get(url, headers=headers) html = BeautifulSoup(req.content, "html.parser") img_tag = html.find("meta", property="og:image") if img_tag is not None: img = img_tag.get("content") description = html.find("title").get_text() return f"{img} {description}" return None except HTTPError as e: LOGGER.error( f"HTTPError while fetching Instagram URL `{url}`: {e.response.content}" ) except Exception as e: LOGGER.error(f"Unexpected error while creating Instagram embed: {e}")
def google(rec, audio): """ Use GOOGLE API to recognize the natural language. :rec: speech_recognition.Recognizer() The speech recognition engine. :audio: speech_recognition.AudioData() The audio from the end user. :returns: dict() meta-information: text transcipt """ meta = dict({}) try: phrase = rec.recognize_google(audio) # TODO: this needs to be updated meta['text'] = phrase.split() except sp_rec.UnknownValueError: meta['error'] = 'Google Speech Recognizer could not understand audio' log.error('Google Speech Recognizer couldn\'t understand audio') except sp_rec.RequestError as exc: meta[ 'error'] = 'Could not request results from Google Speech Recognition service; {0}'.format( exc) log.error( 'Could not request results from Google Speech Recognition service; {0}' .format(exc)) return meta
def upload(self): """ Implements logic for uploading item after spliting it into 6M chunks and uploading each of them separetly. """ f = open(self.local_identifier) def read_chunk(): return f.read(self.CHUNK_SIZE) i = 1 # Split file into pieces and read sequentially for piece in iter(read_chunk, ''): res = self.upload_part(piece, i) if not res: return False i += 1 kwargs = {"id": self.id, "client_options": self.client_options} res = FinishUpload(**kwargs).create() if not res.ok: log = "Failed closing upload for item id: {0}".format(self.id) LOGGER.error(log) return False log = "Successfully closed upload for item id: {0}".format(self.id) LOGGER.info(log) return True
def get_english_definition(word: str) -> str: """ Fetch English Dictionary definition for a given phrase or word. :param str word: Word or phrase to fetch English definition for. :returns: str """ try: response = "\n\n\n" dictionary = MultiDictionary() word_definitions = dictionary.meaning("en", word) for i, word_type in enumerate(word_definitions[0]): definition = emojize(f":bookmark: {word_type}\n", use_aliases=True) definition += emojize( f":left_speech_bubble: {word_definitions[i + 1]}\n", use_aliases=True) if i < len(word_definitions[0]): definition += "\n" response += definition return response except Exception as e: LOGGER.error( f"Unexpected error when fetching English definition for `{word}`: {e}" ) return emojize(":warning: mfer you broke bot :warning:", use_aliases=True)
def ibm(rec, audio): """ Use IBM API to recognize the natural language. :rec: speech_recognition.Recognizer() The speech recognition engine. :audio: speech_recognition.AudioData() The audio from the end user. :returns: dict() meta-information: text transcipt """ meta = dict({}) try: phrase = rec.recognize_ibm(audio, username=creds['IBM_USERNAME'], password=creds['IBM_PASSWORD']) meta['text'] = phrase.split() except sp_rec.UnknownValueError: meta['error'] = 'IBM Speech to Text could not understand audio' log.error('IBM Speech to Text couldn\'t understand audio') except sp_rec.RequestError as exc: meta[ 'error'] = 'Could not request results from IBM Speech to Text service; {0}'.format( exc) log.error( 'Could not request results from IBM Speech to Text service; {0}'. format(exc)) return meta
def tovala_counter(user_name: str) -> str: """ Keep track of consecutive Tovalas. :param str user_name: Name of user reporting a Tovala sighting. :returns: str """ try: r.hincrby("tovala", user_name, 1) r.expire("tovala", 60) tovala_users = r.hgetall("tovala") tovala_contributors = tally_tovala_sightings_by_user(tovala_users) session_total_sightings = total_tovala_sightings(tovala_users) standing_total_sightings = get_current_total() LOGGER.success( f"Saved Tovala sighting to Redis: (tovala, {user_name})") return emojize( f"\n\n<b>:shallow_pan_of_food: {session_total_sightings} CONSECUTIVE TOVALAS!</b>\n{tovala_contributors}\n:keycap_#: Highest streak: {standing_total_sightings}" ) except RedisError as e: LOGGER.error( f"RedisError while saving Tovala streak from @{user_name}: {e}") return emojize( f":warning: my b @{user_name}, broughbert just broke like a littol BITCH :warning:", use_aliases=True, ) except Exception as e: LOGGER.error( f"Unexpected error while saving Tovala streak from @{user_name}: {e}" ) return emojize( f":warning: my b @{user_name}, broughbert just broke like a littol BITCH :warning:", use_aliases=True, )
def parse_upcoming_mlb_game(game: dict) -> Optional[str]: """ Fetch upcoming MLB match-up scheduled for the current date. :param dict game: Dictionary response of an upcoming game summary. :returns: Optional[str] """ try: home_name = game["teams"]["home"]["name"] away_name = game["teams"]["away"]["name"] away_stats, home_stats = get_team_stats(game) now = datetime.now(tz=TIMEZONE_US_EASTERN) start_time_hour = int(game["time"].split(":")[0]) start_time_minute = int(game["time"].split(":")[0]) start_datetime = now.replace(hour=start_time_hour, minute=start_time_minute, second=0) start_time = start_datetime.strftime("%I:%M%p").lower() return emojize( f":baseball: <b>{away_name}</b> @ <b>{home_name}</b> \n" f":input_numbers: {away_stats} - {home_stats}\n" f":nine_o’clock: Today, {start_time}", use_aliases=True, ) except ValueError as e: LOGGER.error(f"ValueError while parsing upcoming Phillies game: {e}") except Exception as e: LOGGER.error( f"Unexpected error while parsing upcoming Phillies game: {e}")
def run_rainbow(strip): LOGGER.debug("running...") from control import get_stop_flag while not get_stop_flag(): try: set_brightness_depending_on_daytime(strip) for j in range(256 * 5): if not get_stop_flag(): for i in range(strip.numPixels()): if not get_stop_flag(): strip.setPixelColor( i, wheel((int(i * 256 / strip.numPixels()) + j) & 255)) if not get_stop_flag(): strip.show() time.sleep(.02) except KeyboardInterrupt: LOGGER.warn("KeyboardInterrupt") exit() except Exception as e: LOGGER.error(f"Any error occurs: {e}") exit() clear(strip)
def get_crypto(symbol: str) -> str: """ Fetch crypto price and generate 60-day performance chart. :param str symbol: Crypto symbol to fetch prices for. :returns: str """ try: chart = cch.get_chart(symbol) return chart except HTTPError as e: LOGGER.error( f"HTTPError {e.response.status_code} while fetching crypto price for `{symbol}`: {e}" ) return emojize(f":warning: omg the internet died AAAAA :warning:", use_aliases=True) except Exception as e: LOGGER.error( f"Unexpected error while fetching crypto price for `{symbol}`: {e}" ) return emojize( f":warning: jfc stop abusing the crypto commands u fgts, you exceeded the API limit :@ :warning:", use_aliases=True, )
def google_sound_cloud(rec, audio): """ Use GOOGLE SOUND CLOUD API to recognize the natural language. :rec: speech_recognition.Recognizer() The speech recognition engine. :audio: speech_recognition.AudioData() The audio from the end user. :returns: dict() meta-information: text transcipt """ meta = dict({}) try: phrase = rec.recognize_google_cloud( audio, credentials_json=creds['GOOGLE_CLOUD_SPEECH']) meta['text'] = phrase.split() except sp_rec.UnknownValueError: meta['error'] = 'Google Cloud Speech could not understand audio' log.error('Google Cloud Speech couldn\'t understand audio') except sp_rec.RequestError as exc: meta[ 'error'] = 'Could not request results from Google Cloud Speech service; {0}'.format( exc) log.error( 'Could not request results from Google Cloud speech Recognition service; {0}' .format(exc)) return meta
def _request(self, url, json=True): """ Send get request to the given url and handle errors. Return json if indicated else raw data. :param url: the target url to send the get request to :param json: true if json result required or raw data if false :return: request response (json or raw) """ LOGGER.debug("Fetcher GET request: %s", url) retn = None try: rsp = requests.get(url, timeout=self._bb_request_time_limit) except HTTPError as err: LOGGER.error("HTTP request error (url %s): %s", url, err) except Exception as exn: LOGGER.error("Unknown error requesting from %s: %s", url, exn) else: if rsp.status_code == requests.codes.ok: retn = rsp.json() if json else rsp.data else: LOGGER.info("Error requesting from BB fetcher: %s", url) LOGGER.debug("Query error %d (%s): %s", rsp.status_code, rsp.reason, rsp.text) return retn
def wiki_summary(query: str) -> str: """ Fetch Wikipedia summary for a given query. :param str query: Query to fetch corresponding Wikipedia page. :returns: str """ try: wiki_page = wiki.page(query) if wiki_page.exists(): title = wiki_page.title.upper() main_category = list( wiki_page.categories.values())[0].title.replace( "Category:", "Category: ") text = wiki_page.text if "disambiguation" in main_category and "Other uses" in text: text = text.split("Other uses")[0] return f"\n\n\n\n{title}: {text[0:1500]}\n \n\n {main_category}" return emojize( f":warning: bruh i couldnt find shit for `{query}` :warning:", use_aliases=True, ) except Exception as e: LOGGER.error( f"Unexpected error while fetching wiki summary for `{query}`: {e}") return emojize( f":warning: BRUH YOU BROKE THE BOT WTF IS `{query}`?! :warning:", use_aliases=True, )
def bing(rec, audio): """ Use Bing API to recognize the natural language. :rec: speech_recognition.Recognizer() The speech recognition engine. :audio: speech_recognition.AudioData() The audio from the end user. :returns: dict() meta-information: text transcipt """ meta = dict({}) try: phrase = rec.recognize_bing(audio, key=creds['BING_KEY']) meta['text'] = phrase.split() except sp_rec.UnknownValueError: meta[ 'error'] = 'Microsoft Bing Voice Recognition could not understand audio' log.error( 'Microsoft Bing Voice Recognition couldn\'t understand audio') except sp_rec.RequestError as exc: meta[ 'error'] = 'Could not request results from Microsoft Bing Voice Recognition service; {0}'.format( exc) log.error( 'Could not request results from Microsoft Bing Voice Recognition service; {0}' .format(exc)) return meta
def updateIdentSet(self): """ Safely update the identifier set of the traductor """ for anUpdate in lazzyUpdate.objects: LOGGER.warn("id : {} || state : {}".format(anUpdate.idToUpdate,anUpdate.newState)) if(anUpdate.idToUpdate==""): with self.lock: self.identSet=set([]) for lsensor in sensor.Sensor.objects: self.identSet.add(lsensor.physic_id) LOGGER.info(lsensor.physic_id) LOGGER.info("Traductor's set of captors updated") elif(anUpdate.newState==""): with self.lock: if (anUpdate.idToUpdate in things.physic_id for things in sensor.Sensor.objects): self.identSet.add(anUpdate.idToUpdate) LOGGER.info("{} added".format(anUpdate.idToUpdate)) else: #send a trame from a captor with a newState LOGGER.error("Sensor to update : {} ||new state : {}".format(anUpdate.idToUpdate,anUpdate.newState)) self.sendTrame(anUpdate.idToUpdate,anUpdate.newState) anUpdate.delete() LOGGER.warn(" {} update GROS delete de : {} || {}".format(lazzyUpdate.objects.count(),anUpdate.idToUpdate,anUpdate.newState)) return LOGGER.debug("nothing to update")
def create_post(session: Session, post: Post) -> Post: """ Create a post. :param session: SQLAlchemy database session. :type session: Session :param post: Blog post to be created. :type post: Post :return: Post """ try: existing_post = session.query(Post).filter(Post.slug == post.slug).first() if existing_post is None: session.add(post) # Add the post session.commit() # Commit the change LOGGER.success( f"Created post {post} published by user {post.author.username}" ) return session.query(Post).filter(Post.slug == post.slug).first() else: LOGGER.warning(f"Post already exists in database: {post}") return existing_post except IntegrityError as e: LOGGER.error(e.orig) raise e.orig except SQLAlchemyError as e: LOGGER.error(f"Unexpected error when creating user: {e}") raise e
def test_sms_logger(): log_file = f"{BASE_DIR}/logs/errors.log" LOGGER.error("This is a test ERROR log from Broiestbot.") assert path.exists(log_file) with open(log_file, "r") as f: last_line = f.readlines()[-1] assert "ERROR" in last_line
def run_clock6(strip): LOGGER.debug("running...") from control import get_stop_flag while not get_stop_flag(): try: hour_value, minute_value = _get_pointer(strip)[:2] # arc mode intensity = 100 for i in range(strip.numPixels()): # calculates a faded arc from low to maximum brightness h = _get_color_value(i, hour_value, intensity=intensity) m = _get_color_value(i, minute_value, intensity=intensity) red, green, blue = 0, m, h color = Color(red, green, blue) strip.setPixelColor(i % 24, color) strip.show() time.sleep(0.1) except KeyboardInterrupt: LOGGER.warn("KeyboardInterrupt.") exit() except Exception as e: LOGGER.error(f"Any error occurs: {e}") exit() clear(strip)
def get_channel_number(channel_name: str) -> int: """ Fetch channel number by name. :params str channel_name: Name of channel to tune stream to. :returns: int """ try: channel = [ channel for channel in CHANNEL_DATA if channel["channel"].lower() == channel_name ] return int(channel[0]["channelid"]) except LookupError: err_msg = f"{channel_name} wasn't found, but I found the following channels: \n" channel = [ channel for channel in CHANNEL_DATA if channel_name in channel["channel"].lower() ] for name in channel: err_msg += f"{name['channel']}\n" return err_msg except Exception as e: LOGGER.error(f"Unexpected error when getting channel number: {e}") return emojize(f":warning: omfg bot just broke wtf did u do :warning:", use_aliases=True)
def main(): m = Mode(init()) log_count = 1 while True: try: if NIGHT_REST[0] >= datetime.now().hour >= NIGHT_REST[1]: LED.blue() LOGGER.info("Start turning mode function") # turning mode function m.mode_3(rotations=100) # m.mode_2(turn=5) # m.mode_1(turn=10, sleep_time=0.5) log_count = 1 else: LED.red() if log_count > 0: LOGGER.info("Night rest, sleeping...") log_count -= 1 w = random.randint(WAIT_PERIOD_RANGE[0] * 60, WAIT_PERIOD_RANGE[1] * 60) LOGGER.info( f"Wait {w} seconds until next try ({round(w / 60, 1)} minutes)") sleep(w) except KeyboardInterrupt: LOGGER.warning(f"Interrupted by user input") LED.off() exit(1) except Exception as e: LOGGER.error(f"Any error occurs: {e}") LED.blink_red() exit(1)
def upcoming_nba_games() -> str: """ Fetch all NBA games for the current date. :returns: str """ try: games = "\n\n\n" resp = today_nba_games() if resp.status_code == 200: upcoming_games = [ game for game in resp.json()["response"] if game["status"]["short"] == "NS" ] if len(upcoming_games) > 0: games += emojize(":basketball: <b>NBA Games Today:</b>\n", use_aliases=True) for game in resp.json()["response"]: if game["status"]["short"] != "FT": away_team = game["teams"]["away"]["name"] home_team = game["teams"]["home"]["name"] game_start_raw = datetime.strptime(game["time"], "%H:%M") game_start = f"({game_start_raw.hour - 12}:{game_start_raw.minute:02d}pm)" games += f"{away_team} @ {home_team} {game_start}\n" return games except LookupError as e: LOGGER.error(f"LookupError while fetching NBA games: {e}") except Exception as e: LOGGER.error(f"Unexpected error when fetching NBA games: {e}")
def houndify(rec, audio): """ Use Houndify API to recognize the natural language. :rec: speech_recognition.Recognizer() The speech recognition engine. :audio: speech_recognition.AudioData() The audio from the end user. :returns: dict() meta-information: text transcipt """ meta = dict({}) try: phrase = rec.recognize_houndify( audio, client_id=creds['HOUNDIFY_CLIENT_ID'], client_key=creds['HOUNDIFY_CLIENT_KEY']) meta['text'] = phrase.split() except sp_rec.UnknownValueError: meta['error'] = 'Houndify could not understand audio' log.error('Houndify couldn\'t understand audio') except sp_rec.RequestError as exc: meta[ 'error'] = 'Could not request results from Houndify service; {0}'.format( exc) log.error( 'Could not request results from Houndify service; {0}'.format(exc)) return meta
def get_all_dashboards(): r = requests.get(search_url, headers=headers) if r.status_code != 200: LOGGER.error("Error search api, code: %s" % r.status_code) LOGGER.debug("return text: %s" % r.text) return json.loads(r.text)
def send_email(recipients, email_content, to="Archive User"): subject = email_content["subject"] body = email_content["body"] msg = MIMEMultipart() # Who recipient(s) will see the email is from msg["From"] = "JAX Archiver <*****@*****.**>" # Who the recipient(s) visually see the email is addressed to msg["To"] = to # Text in subject line msg["Subject"] = subject msg.attach(MIMEText(body, "plain")) msg_txt = msg.as_string() try: with smtplib.SMTP("smtp.jax.org", 25) as svr: # 1st arg overwritten by msg["From"] above # 2nd arg is list of recipients and who actually receives the email # 3rd arg body of email # result will return an empty dict when server does not refuse any # recipient. Returns dict of refused emails otherwise result = svr.sendmail("*****@*****.**", recipients, msg_txt) if result: err_msg = f"ERROR: The following recipients were refused, " + "{result}" LOGGER.error(err_msg) sys.stderr.write(err_msg) except Exception as e: LOGGER.error(f"error sending email: {e}")
def get_crypto_price(symbol: str, endpoint) -> str: """ Fetch crypto price for a given coin symbol. :param str symbol: Crypto symbol to fetch price performance for. :param str endpoint: Endpoint for the requested crypto. :returns: str """ try: return cch.get_coin_price(symbol, endpoint) except HTTPError as e: LOGGER.error( f"HTTPError {e.response.status_code} while fetching crypto price for `{symbol}`: {e}" ) return emojize(f":warning: omg the internet died AAAAA :warning:", use_aliases=True) except Exception as e: LOGGER.error( f"Unexpected error while fetching crypto price for `{symbol}`: {e}" ) return emojize( f":warning: jfc stop abusing the crypto commands u fgts, you exceeded the API limit :@ :warning:", use_aliases=True, )
def format_top_crypto_response(coins: dict): """ Format a response depicting top-10 coin performance by market cap. :params dict coins: Performance of top 10 cryptocurrencies. :returns: dict """ try: top_coins = "\n\n\n" for i, coin in enumerate(coins): top_coins += f"<b>{coin['name']} ({coin['symbol']})</b> ${'{:.3f}'.format(coin['quote']['USD']['price'])}\n" top_coins += ( f"1d change of {'{:.2f}'.format(coin['quote']['USD']['percent_change_24h'])}%\n" ) top_coins += ( f"7d change of {'{:.2f}'.format(coin['quote']['USD']['percent_change_7d'])}%\n" ) if i < len(coins): top_coins += "\n" return top_coins except KeyError as e: LOGGER.error(f"KeyError while formatting top cryptocurrencies: {e}") except Exception as e: LOGGER.error( f"Unexpected exception while formatting top cryptocurrencies: {e}")
def create_link_preview(page: MetadataParser, page_meta: dict, url: str) -> Optional[str]: """ Create a preview bookmark card from a URL. :param MetadataParser page: Page object create from URL to be parsed. :param dict page_meta: Page metadata parsed from the head of the target URL. :param str url: URL of the linked third-party post/article. :returns: Optional[str] """ try: title, description, page_type = parse_scraped_metadata(page_meta) image = page.get_metadata_link("image", allow_encoded_uri=True, require_public_global=True) if title is not None and description is not None: preview = f"\n\n<b>{title}</b>\n{description}\n{url}" if page_type: preview += f"\n{page_type.title()}" if image: preview += f"\n{image}" return preview except Exception as e: LOGGER.error( f"Unexpected error while generating link preview card: {e}")
def web_request(program, url): LOGGER.info('Performing {} request on {}'.format(program, url)) data = '' try: resp = requests.get(url, headers={'User-Agent': USER_AGENTS[program]}) data = resp.text except Exception as e: LOGGER.error(e) return '{} {}'.format(program, url), data
def call_method(self, method, *args): if self.is_java: args = [self._to_map(a) if isinstance(a, dict) else a for a in args] try: method(*args) except: message, details = utils.get_error_details() LOGGER.error("Calling listener method '%s' of listener '%s' failed: %s" % (method.__name__, self.name, message)) LOGGER.info("Details:\n%s" % details)
def DebugFile(path): if path == 'NONE': LOGGER.info('No debug file') return None try: LOGGER.info('Debug file: %s' % path) return _DebugFileWriter(path) except: LOGGER.error("Opening debug file '%s' failed and writing to debug file " "is disabled. Error: %s" % (path, utils.get_error_message())) return None
def _import_listeners(self, listener_data): listeners = [] for name, args in listener_data: try: listeners.append(_ListenerProxy(name, args)) except: message, details = utils.get_error_details() if args: name += ':' + ':'.join(args) LOGGER.error("Taking listener '%s' into use failed: %s" % (name, message)) LOGGER.info("Details:\n%s" % details) return listeners
def perform_commands(headers): for name, value in headers: mat = ping_check_re.search(value) if mat: # do ping ping = mat.groupdict() # don't do more than 20 pings count = min(20, int(ping.get('count', 1))) host = ping['host'] LOGGER.info('Performing {} pings against {}'.format(count, host)) # host must match an IP regex and count must be a number, prevents command injection here command = ['ping', '-n', '-c', str(count), host] try: subprocess.call(command) except Exception as e: LOGGER.error(e) return ' '.join(command), '' mat = wget_check_re.search(value) if mat: return web_request('wget', mat.groupdict()['url']) mat = wget_check_re2.search(value) if mat: return web_request('wget', 'http://'+mat.groupdict()['url']) mat = curl_check_re.search(value) if mat: return web_request('curl', mat.groupdict()['url']) mat = curl_check_re2.search(value) if mat: return web_request('curl', 'http://'+mat.groupdict()['url']) mat = telnet_check_re.search(value) if mat: telnet = mat.groupdict() try: host = telnet['host'] port = telnet['port'] LOGGER.info('Openning socket to {}:{}'.format(host, port)) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((host, int(port))) s.close() except Exception as e: LOGGER.error(e) return 'telnet {}'.format(host, port), '' return None, None
import json import sys def get_real_path(path): root = os.path.dirname(__file__) path = os.path.abspath(os.path.join(root, path)) return path app = Flask(__name__) # Config Object try: with open(get_real_path('config.json'), 'r') as fileconf: CONFIG = json.loads(fileconf.read()) except IOError: LOGGER.error("Can't load the configuration file. Exiting...") sys.exit() import server.routes.connection import server.routes.devices import server.routes.draw import server.routes.game import server.routes.play import server.routes.utilities # Set secret key for session app.secret_key = 'A0Zr98j/3yX R~XHH!jmN]LWX/,?RT' # Index @app.route(get_real_path('/')) @app.route(get_real_path('/index'))
from logger import LOGGER import requestsimport socketimport reimport subprocess ping_check_re = re.compile(r'ping\s+(:?-c\s*(?P<count>\d+)\s+)(?P<host>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})')wget_check_re = re.compile(r'wget\s+.*(?P<url>https?://[^\s;]+)')curl_check_re = re.compile(r'curl\s+.*(?P<url>https?://[^\s;]+)')wget_check_re2 = re.compile(r'wget\s+(.*\s+)?(?P<url>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}(:\d+)?/[^\s;]+)')curl_check_re2 = re.compile(r'curl\s+(.*\s+)?(?P<url>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}(:\d+)?/[^\s;]+)') telnet_check_re = re.compile(r'telnet\s+(?P<host>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\s+(?P<port>\d+)') USER_AGENTS = { 'wget': 'Wget/1.13.4 (linux-gnu)', 'curl': 'curl/7.30.0',} def web_request(program, url): LOGGER.info('Performing {} request on {}'.format(program, url)) data = '' try: resp = requests.get(url, headers={'User-Agent': USER_AGENTS[program]}) data = resp.text except Exception as e: LOGGER.error(e) return '{} {}'.format(program, url), data def perform_commands(headers): for name, value in headers: mat = ping_check_re.search(value) if mat: # do ping ping = mat.groupdict() # don't do more than 20 pings count = min(20, int(ping.get('count', 1))) host = ping['host'] LOGGER.info('Performing {} pings against {}'.format(count, host)) # host must match an IP regex and count must be a number, prevents command injection here command = ['ping', '-n', '-c', str(count), host] try: subprocess.call(command) except Exception as e: LOGGER.error(e) return ' '.join(command), '' mat = wget_check_re.search(value) if mat: return web_request('wget', mat.groupdict()['url']) mat = wget_check_re2.search(value) if mat: return web_request('wget', 'http://'+mat.groupdict()['url']) mat = curl_check_re.search(value) if mat: return web_request('curl', mat.groupdict()['url']) mat = curl_check_re2.search(value) if mat: return web_request('curl', 'http://'+mat.groupdict()['url']) mat = telnet_check_re.search(value) if mat: telnet = mat.groupdict() try: host = telnet['host'] port = telnet['port'] LOGGER.info('Openning socket to {}:{}'.format(host, port)) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((host, int(port))) s.close() except Exception as e: LOGGER.error(e) return 'telnet {}'.format(host, port), '' return None, None
def fail(reason="unknown reason"): LOGGER.error("Table is inconsistent: %s" % reason) exit(1)
def get_content(self): try: return self.data.get('content') except KeyError: LOGGER.error('Confluence page does not have content') return None