def fetch_existing_user(room_name: str, user: User, message: Message) -> Optional[ChatangoUser]: """ Persist metadata regarding message history. :param str room_name: Chatango room. :param User user: User responsible for triggering command. :param Message message: User submitted message. :returns: Optional[ChatangoUser] """ try: if message.ip: return (session.query(ChatangoUser).filter( ChatangoUser.username == user.name.lower(), ChatangoUser.chatango_room == room_name, ChatangoUser.ip == message.ip, ).first()) except SQLAlchemyError as e: LOGGER.warning( f"SQLAlchemyError occurred while fetching metadata for {user.name}: {e}" ) except Exception as e: LOGGER.warning( f"Unexpected error while attempting to save data for {user.name}: {e}" )
def get_full_gif_metadata(image: dict) -> str: """ Parses additional metadata for a randomly selected gif. :param dict image: Dictionary containing a single gif response. :returns: str """ try: image_url = image["urls"]["sd"].replace("-mobile", "").replace( ".mp4", "-small.gif") likes = image["likes"] views = image["views"] tags = ", #".join(image["tags"]) return emojize( f"\n\n\n{image_url}\n:thumbsup: Likes {likes}\n:eyes: Views {views}\n#{tags}", use_aliases=True, ) except Exception as e: LOGGER.warning( f"Unexpected error while fetching nsfw image for id `{image['id']}`: {e}" ) return emojize( f":warning: dude u must b a freak cuz that just broke bot :warning:", use_aliases=True, )
def create_user(session: Session, user: User) -> User: """ Create a new user if username isn't already taken. :param session: SQLAlchemy database session. :type session: Session :param user: New user record to create. :type user: User :return: Optional[User] """ try: existing_user = ( session.query(User).filter(User.username == user.username).first() ) if existing_user is None: session.add(user) # Add the user session.commit() # Commit the change LOGGER.success(f"Created user: {user}") else: LOGGER.warning(f"Users already exists in database: {existing_user}") return session.query(User).filter(User.username == user.username).first() except IntegrityError as e: LOGGER.error(e.orig) raise e.orig except SQLAlchemyError as e: LOGGER.error(f"Unexpected error when creating user: {e}") raise e
def get_boxoffice_data(movie: Movie) -> Optional[str]: """ Get IMDB box office performance for a given film. :param Movie movie: IMDB movie object. :returns: Optional[str] """ try: response = [] if movie.data.get("box office", None): budget = movie.data["box office"].get("Budget", None) opening_week = movie.data["box office"].get( "Opening Weekend United States", None) gross = movie.data["box office"].get("Cumulative Worldwide Gross", None) if budget: response.append(f"BUDGET {budget}.") if opening_week: response.append(f"OPENING WEEK {opening_week}.") if gross: response.append(f"CUMULATIVE WORLDWIDE GROSS {gross}.") return " ".join(response) LOGGER.warning(f"No IMDB box office info found for `{movie}`.") except KeyError as e: LOGGER.warning( f"KeyError when fetching box office info for `{movie}`: {e}") except Exception as e: LOGGER.error( f"Unexpected error when fetching box office info for `{movie}`: {e}" )
def create_post(session: Session, post: Post) -> Post: """ Create a post. :param session: SQLAlchemy database session. :type session: Session :param post: Blog post to be created. :type post: Post :return: Post """ try: existing_post = session.query(Post).filter(Post.slug == post.slug).first() if existing_post is None: session.add(post) # Add the post session.commit() # Commit the change LOGGER.success( f"Created post {post} published by user {post.author.username}" ) return session.query(Post).filter(Post.slug == post.slug).first() else: LOGGER.warning(f"Post already exists in database: {post}") return existing_post except IntegrityError as e: LOGGER.error(e.orig) raise e.orig except SQLAlchemyError as e: LOGGER.error(f"Unexpected error when creating user: {e}") raise e
def main(ctx): # GET DATAFACTORY CONFIGURATION with open('config.json') as json_data_file: config = json.load(json_data_file) # test if postgres docker container is running container_name = config['db_docker']['container_name'] client = docker.from_env(timeout=10800) try: pg_container = client.containers.get(container_name) LOGGER.info('Found postgres container: %s' % container_name) except: LOGGER.warning('Unable to find db container: %s' % container_name) exit() # docker db credentials dbconfig = DbConfig(pg_container, config['db_docker']['postgres_port'], config['db_docker']['postgres_user'], config['db_docker']['postgres_pwd']) ctx.obj = { 'cfgjson': config, 'dbconfig': dbconfig, }
def main(): m = Mode(init()) log_count = 1 while True: try: if NIGHT_REST[0] >= datetime.now().hour >= NIGHT_REST[1]: LED.blue() LOGGER.info("Start turning mode function") # turning mode function m.mode_3(rotations=100) # m.mode_2(turn=5) # m.mode_1(turn=10, sleep_time=0.5) log_count = 1 else: LED.red() if log_count > 0: LOGGER.info("Night rest, sleeping...") log_count -= 1 w = random.randint(WAIT_PERIOD_RANGE[0] * 60, WAIT_PERIOD_RANGE[1] * 60) LOGGER.info( f"Wait {w} seconds until next try ({round(w / 60, 1)} minutes)") sleep(w) except KeyboardInterrupt: LOGGER.warning(f"Interrupted by user input") LED.off() exit(1) except Exception as e: LOGGER.error(f"Any error occurs: {e}") LED.blink_red() exit(1)
def get_top_crypto() -> str: """ Fetch top 10 crypto coin performance. :returns: str """ try: params = {"start": "1", "limit": "10", "convert": "USD"} headers = { "Accepts": "application/json", "X-CMC_PRO_API_KEY": COINMARKETCAP_API_KEY, } resp = requests.get(COINMARKETCAP_LATEST_ENDPOINT, params=params, headers=headers) if resp.status_code == 200: coins = resp.json().get("data") return format_top_crypto_response(coins) except HTTPError as e: LOGGER.warning( f"HTTPError while fetching top coins: {e.response.content}") return emojize( f":warning: F**K the bot broke :warning:", use_aliases=True, ) except Exception as e: LOGGER.warning(f"Unexpected exception while fetching top coins: {e}") return emojize( f":warning: F**K the bot broke :warning:", use_aliases=True, )
def load_filter(): try: ffilter = yaml.load(open(FILTER_FILE)) return ffilter except Exception: LOGGER.warning('missing filter file or format incorrect') return None
def load_config(): try: config = yaml.load(open(CONFIG_FILE)) return config except Exception: LOGGER.warning('missing config file or format incorrect') return None
def load_traffic(): try: traffic = yaml.load(open(TRAFFIC_FILE)) return traffic except Exception: LOGGER.warning('missing traffic file or format incorrect') return None
def save_filter(ffilter): try: f = open(FILTER_FILE, 'w+') yaml.dump(ffilter, f) return True except Exception: LOGGER.warning('error writing filter file') return False
def save_traffic(traffic): try: f = open(TRAFFIC_FILE, 'w+') yaml.dump(traffic, f) return True except Exception: LOGGER.warning('error writing traffic') return False
def save_config(config): try: f = open(CONFIG_FILE, 'w+') yaml.dump(config, f) return True except Exception: LOGGER.warning('error writing configuration') return False
def on_connection_closed(self, _unused_connection, reason): self._channel = None if self._closing: self._connection.ioloop.stop() else: LOGGER.warning('Connection closed, reconnect necessary: %s', reason) self.reconnect()
def _context_list(self): """ Get a list of contexts from the BB fetcher. """ url = "{}/contexts/".format(self._org_url) contexts = self._request(url) if not contexts: LOGGER.warning("No contexts available") return contexts
def run_callback(): """Run callback and store result.""" try: task = asyncio.ensure_future(coro(*args, **kargs), loop=self.loop) task.add_done_callback(got_result) except Exception as exc: if future.set_running_or_notify_cancel(): future.set_exception(exc) else: LOGGER.warning("Exception on lost future: ", exc_info=True)
def on_channel_closed(self, channel, reason): """ :param pika.channel.Channel channel: The closed channel :param Exception reason: why the channel was closed """ LOGGER.warning('Channel %i was closed: %s', channel, reason) self._channel = None if not self._stopping: self._connection.close()
def ban_user(room: Room, message: Message) -> None: """ Ban and delete chat history of a user. :param Room room: Chatango room object. :param Message message: User submitted message. :returns: None """ LOGGER.warning( f"BANNED user: username={message.user.name} ip={message.ip}") room.clear_user(message.user) room.ban_user(message.user)
def generateWasm(self, namespace, bc, OUT_FOLDER, fileName, debug=True, generateOnlyBc=False): llFileName = "%s/%s" % (OUT_FOLDER, fileName) if generateOnlyBc: hashvalue = hashlib.sha256(bc) return hashvalue.hexdigest(), len( bc), "%s.bc" % (fileName, ), "%s.bc" % (fileName, ) with ContentToTmpFile(name="%s.bc" % llFileName, content=bc, ext=".bc", persist=True) as TMP_WASM: tmpWasm = TMP_WASM.file try: finalObjCreator = ObjtoWASM(namespace, debug=debug) finalObjCreator(args=["%s.wasm" % (llFileName, ), tmpWasm], std=None) wat = WASM2WAT(namespace, debug=debug) wat(std=None, args=[ "%s.wasm" % (llFileName, ), "%s.wat" % (llFileName, ) ]) finalStream = open("%s.wasm" % (llFileName, ), 'rb').read() hashvalue = hashlib.sha256(finalStream) if debug: LOGGER.warning( namespace, "%s: WASM SIZE %s" % ( namespace, len(finalStream), )) LOGGER.warning( namespace, "%s: WASM SHA %s" % ( namespace, hashvalue.hexdigest(), )) return hashvalue.hexdigest(), len(finalStream), "%s.wasm" % ( llFileName, ), "%s.wat" % (llFileName, ) except Exception as e: LOGGER.error(namespace, traceback.format_exc())
def publish_message(self, message): if self._channel is None or not self._channel.is_open: LOGGER.warning('Channel is not open, could not publish event', json.dumps(message, cls=UUIDEncoder)) return properties = pika.BasicProperties(content_type='application/json') self._channel.basic_publish( self.EXCHANGE, self.ROUTING_KEY, json.dumps(message, ensure_ascii=False, cls=UUIDEncoder), properties) self._message_number += 1 self._deliveries.append(self._message_number) LOGGER.info('Published message # %i', self._message_number)
def on_connection_closed(self, _unused_connection, reason): """ :param pika.connection.Connection connection: The closed connection obj :param Exception reason: exception representing reason for loss of connection. """ self._channel = None if self._stopping: self._connection.ioloop.stop() else: LOGGER.warning('Connection closed, reopening in 5 seconds: %s', reason) self._connection.ioloop.call_later(5, self._connection.ioloop.stop)
def max_tasks(self, args, total_tasks=0): default_jobs = self.config.getint("jobs") cpu_count = multiprocessing.cpu_count() if args.jobs != 0: default_jobs = args.jobs if default_jobs == 0: default_jobs = cpu_count if total_tasks > 0 and total_tasks < cpu_count: return total_tasks if default_jobs > cpu_count: log.warning( f"Requesting {default_jobs} concurrent processing jobs is higher than current {cpu_count} core count " ) return default_jobs
def run_advent(stripe): LOGGER.debug("running...") from control import get_stop_flag i = 1 while not get_stop_flag(): year = datetime.datetime.now().year if __is_advent_period(year): __advent_cycle(stripe) else: while i > 0: LOGGER.warning(f"Wrong period to show xmas/advent animation, " f"it\'s {time.strftime('%A, %d.%B %Y')}!") i -= 1 theater_chase(stripe, Color(ZERO[0], ZERO[1], ZERO[2])) clear(stripe)
def anonymize_csv_wrapper(input_csv, output_folder, anon_csv_name, hash_df, dataset): if not os.path.exists(output_folder): try: os.makedirs(output_folder) LOGGER.info('Output directory %s is created' % output_folder) except OSError: LOGGER.warning('Creation of the output directory %s failed' % output_folder) if hash_df == 'sha224': hash_method = 'sha3' elif hash_df == 'md5': hash_method = 'md5' output_path = os.path.join(output_folder, anon_csv_name) anonymize_csv(input_csv, output_path, columns=[0], method=hash_method) LOGGER.info('Anonymized csv is saved in %s' % output_path)
def set_language(lng=EN): """ Set chat language, default = English. :param lng: language key (i.e. 'de') :return: None (set up global chat language) """ if lng not in assignment: global language LOGGER.warning( f"Language key \'{lng}\' not found, set default chat language!") language = EN else: language = lng LOGGER.info( f"Chat language was set to '{(assignment[language].get(NAME))}'.")
def check_file(self, file: str): program_name = file.split("/")[-1].split(".")[0] if file.endswith(".c") or file.endswith(".cpp"): return True if file.endswith(".ll"): # Check triple if not open(file, 'r').read().find('triple = "wasm32-unknown-unknown"'): LOGGER.warning( program_name, "LL file triple is not targeting wasm32-unknown-unknown") return False return True return False
def row_to_dict(row, column_list): """ Convert a query result row (tuple) into a dict. :param row: cursor row :param column_list: column name mapping :return: dict """ LOGGER.debug("Convert query row to dict") if len(row) != len(column_list): LOGGER.warning("WARNING: query row %d items, %d expected", len(row), len(column_list)) rtn = {} else: rtn = dict(zip(column_list, row)) LOGGER.debug("row_to_dict returning dict length %d", len(rtn)) return rtn
def setup_backend(self, db, table): db_type = db.split(":")[0] if db_type == "mongodb": self.client = MongoClient(db) self.db = self.client.twitter self.table = self.db[table] self.backend = db_type elif db_type == "sqlite": self.db = dataset.connect(db) self.table = self.db[table] self.backend = db_type else: # unable to parse connection string l.warning( "{} is no a not supported back end\nConnection string: {}". format(db_type, db)) sys.exit(1)
def __getitem__(self, index): with self.env.begin(write=False) as txn: length = int( txn.get(format_for_lmdb(index, 'length')).decode('utf-8')) if length < 1: LOGGER.warning( f'There is no frames in a video with an index {index}, ' 'so a random video will be used instead.') return self.__getitem__(random.randint(0, len(self) - 1)) selected_frame_index = random.randint(0, length - 1) with self.env.begin(write=False) as txn: img = get_img_from_lmdb(txn, index, selected_frame_index) transformed_frame = self.transform(img).unsqueeze_(0) crops = self.tensor_transform( torch.cat([ self.crop(transformed_frame) for i in range(self.num_frames) ])) return crops
#!/usr/bin/env python # -*- coding: utf-8 -*- from logger import LOGGER # apparraitra comme une ligne info dans le log LOGGER.info('Hello') # apparraitra comme une ligne warning dans le log dans le log LOGGER.warning('Testing %s', 'foo')