class Respondent(Resource): #method_decorators = [auth] # If you want apply to some method use: {'post': [auth],'put': [auth]} def __init__(self): self.log = Logger() self.db = DB().client def get(self, name=None): if (name): typeGet = "GET ONE" respondents = self.db.respondents.find_one({"firstName": name}) else: typeGet = "GET ALL" respondents = self.db.respondents.find({}) if (typeGet == "GET ALL" and respondents.count() > 0) or (typeGet == "GET ONE" and respondents): return jsonify(code=200, type=typeGet, data=dumps(respondents)) else: return None, 400 def post(self): self.log.info('This a example info') self.log.debug('This a example debug') self.log.silly(request.form) self.log.warn('This is a example warn') self.log.error('This is a example error') return request.form
class GameQueue: def __init__(self, min_size, max_size): self.log = Logger(log_level="DEBUG").get_logger() self.players = OrderedDict() self.game_dict = {} self.max_game_size = max_size self.min_game_size = min_size def add_player(self, player): self.players[player] = None def build_game(self, game_id): # add validation somewhere here to ensure players are still active current_size = len(self.players) if current_size >= self.min_game_size: for i in range(0, min(self.max_game_size, current_size)): player = self.players.popitem(last=False)[0] self.game_dict[player] = game_id self.log.debug("Adding {} to game.".format(player)) def queue_status(self, player): # TODO: more fine grained checks if player in self.game_dict.keys(): return self.game_dict[player] else: return None
def crawl(module, url_queue): #Execute the crawl process Logger.info('Start Crawler') while True: try: url = url_queue.get(timeout=config.NESTED_CRAWL_TIMEOUT) except: #If all threads are done then break the loop, Otherwise continue. #Why 2 ? because its need to deduct by the main thread and queue thread, # You can comment out the enumerate() line to see what is going on #Logger.debug(str(enumerate())) if activeCount() <= 2: Logger.info('Break crawl') break else: Logger.debug('There are ' + str(activeCount() - 2) + ' threads left') continue #Spawn a new threads immediate after get the url thread = Thread(target=crawl_thread, args=(url, module), name='CrawlThread') thread.start() Logger.info('Crawl done')
def crawl(): #Use logger to debug the code Logger.debug('Hello google') #Get the html knowledge by parsing url soup = get_b_soup('http://www.google.com') #Send the data to output pipe line Queues.send_to_output(soup.head.title.text) Queues.send_to_output('http://www.google.com' + soup.head.meta['content'])
def crawl(): #Use logger to debug the code Logger.debug('Hello google') #Get the html knowledge by parsing url soup = get_b_soup('http://www.google.com') #Send the data to output pipe line Queues.send_to_output(soup.head.title.text) Queues.send_to_output('http://www.google.com' + soup.head.meta['content'])
def collect(url_queue): #Use logger to debug the code Logger.debug('Hello amazon') #Get the html knowledge by parsing url soup = get_b_soup(base_path) #Travel with html knowledge for all_url in soup.findAll('a', href=re.compile('/gp/product/')): #REQUIRED #Send all url via url_queue url_queue.put(base_path + all_url['href'])
def crawl_thread(url, module): Logger.debug('Thread start') encoding = getattr(module, 'ENCODING', None) try: soup = get_b_soup(url, encoding=encoding) module.crawl(soup) except: Logger.error('Crawl error url: ' + url) Logger.error(traceback.format_exc()) return Logger.debug('Thread done')
def crawl_thread(url, module): Logger.debug('Thread start') encoding = getattr(module, 'ENCODING', None) try: soup = get_b_soup(url, encoding=encoding) module.crawl(soup) except: Logger.error('Crawl error url: ' + url) Logger.error(traceback.format_exc()) return Logger.debug('Thread done')
def execute_output(): Logger() Logger.debug('Start Output Process') while True: try: result = Queues.get_output() Logger.info(result) #Your output logic go there except: break Logger.debug('End Output Process')
def execute_output(): Logger() Logger.debug('Start Output Process') while True: try: result = Queues.get_output() Logger.info(result) #Your output logic go there except: break Logger.debug('End Output Process')
class Respondent(Resource): #method_decorators = [auth] # If you want apply to some method use: {'post': [auth],'put': [auth]} def __init__(self): self.log = Logger() self.db = DB().client def get(self, name=None): self.log.info('example info') self.log.debug('example debug') self.log.silly(name) self.log.warn('example warn') self.log.error('example error') match = {} if name: match = {"firstName": name} respondents = self.db.respondents.find(match) if respondents: return jsonify(code=200, data=dumps(respondents)) else: return None, 400
def crawl(module, url_queue): #Execute the crawl process Logger.info('Start Crawler') while True: try: url = url_queue.get(timeout=config.NESTED_CRAWL_TIMEOUT) except: #If all threads are done then break the loop, Otherwise continue. #Why 2 ? because its need to deduct by the main thread and queue thread, # You can comment out the enumerate() line to see what is going on #Logger.debug(str(enumerate())) if activeCount() <= 2: Logger.info('Break crawl') break else: Logger.debug('There are ' + str(activeCount() - 2) + ' threads left') continue #Spawn a new threads immediate after get the url thread = Thread(target=crawl_thread, args=(url, module), name='CrawlThread') thread.start() Logger.info('Crawl done')
class QueueSystem: def __init__(self): self.db = sqlite3.connect('queue.db') self.log = Logger(log_level="DEBUG").get_logger() def build_game(self, min_players, max_players, game_id): cursor = self.db.cursor() cursor.execute("select player_id, queue_time, claimed, in_game, game_id from queue where claimed is 0 " "order by queue_time") results = cursor.fetchall() queue_count = len(results) player_count = min(queue_count, max_players) if queue_count >= min_players: for x in range(0, player_count): player_id = results[x][0] cursor.execute("update queue set claimed=?, game_id=?, player_count=? where player_id=?", (1, game_id, player_count, player_id)) self.log.debug("{} is being added to game_id {}".format(player_id, game_id)) self.db.commit() def build_db(self): cursor = self.db.cursor() cursor.execute("create table queue(player_id text, queue_time int, claimed boolean, in_game boolean, " "game_id int, player_count int)") self.db.commit() def dequeue_player(self, player_id): cursor = self.db.cursor() cursor.execute("delete from queue where player_id=?", (player_id,)) self.log.debug("Removing player_id {} from the queue.".format(player_id)) self.db.commit() def queue_player(self, player_id): current_time = time.time() cursor = self.db.cursor() cursor.execute("select player_id from queue where player_id=?", (player_id,)) result = cursor.fetchone() if result is None: cursor.execute("insert into queue values(?, ?, 0, 0, NULL, NULL)", (player_id, current_time)) self.db.commit() self.log.debug("Queueing player_id {} at {}".format(player_id, current_time)) def start_game(self, game_id): cursor = self.db.cursor() cursor.execute("SELECT * from queue where game_id=?", (game_id,)) game_ready = True player_list = cursor.fetchall() player_count = len(player_list) player_id_list = [] for player in player_list: if player[3] == 0: game_ready = False self.log.debug("Game not ready because player_id {} has not confirmed the game".format(player[0])) elif player_count < player[5]: game_ready = False self.log.debug("Game not ready because {} does not match the required count of {}" .format(player_count, player[5])) else: player_id_list.append(player[0]) if game_ready: self.log.debug("game_id {} is ready to be started. players in game: {}" .format(game_id, str(player_id_list))) else: cursor.execute("SELECT * from queue where game_id=?", (game_id,)) player_list = cursor.fetchall() for player in player_list: cursor.execute("update queue set claimed=0, game_id=NULL, player_count=NULL where player_id=?", (player[0],)) self.db.commit() self.log.debug("player_id {} is removed from game_id {} and placed back in the queue" .format(player[0], player[4]))