def do_tweet(debug, tweet=None, script=None): if script is not None: msg = script if not debug: api = get_twitter_api() api.update_status(status=tweet.content) msg = f'tweeted: {tweet}' log.info(msg) print(msg) return if tweet is None: tweet = PendingTweet.query.order_by(PendingTweet.added_at.asc()).first() if not tweet: msg = 'has no pending tweet. generate..' print(msg) log.warn(msg) do_generate(debug, 1) tweet = PendingTweet.query.order_by(PendingTweet.added_at.asc()).first() if not debug: api = get_twitter_api() api.update_status(status=tweet.content) db.session.delete(tweet) db.session.commit() msg = f'tweeted: {tweet}' log.info(msg) print(msg)
def store_mutable_data_version( conf, data_id, ver ): """ Locally store the version of a piece of mutable data, so we can ensure that its version is incremented on subsequent puts. Return True if stored Return False if not """ if conf is None: conf = config.get_config() if conf is None: log.warning("No config found; cannot store version for '%s'" % data_id) return False metadata_dir = conf['metadata'] if not os.path.isdir( metadata_dir ): log.warning("No metadata directory found; cannot store version of '%s'" % data_id) return False serialized_data_id = data_id.replace("/", "\x2f").replace('\0', "\\0") version_file_path = os.path.join( metadata_dir, serialized_data_id + ".ver") try: with open( version_file_path, "w+" ) as f: f.write("%s" % ver ) return True except Exception, e: # failed for whatever reason log.warn("Failed to store version of '%s' to '%s'" % (data_id, version_file_path)) return False
def delete_mutable_data_version( conf, data_id ): """ Locally delete the version of a piece of mutable data. Return True if deleted. Return False if not """ if conf is None: conf = config.get_config() if conf is None: log.warning("No config found; cannot store version for '%s'" % data_id) return False metadata_dir = conf['metadata'] if not os.path.isdir( metadata_dir ): log.warning("No metadata directory found; cannot store version of '%s'" % data_id) return False serialized_data_id = data_id.replace("/", "\x2f").replace('\0', "\\0") version_file_path = os.path.join( metadata_dir, serialized_data_id + ".ver") try: os.unlink( version_file_path ) return True except Exception, e: # failed for whatever reason log.warn("Failed to remove version file '%s'" % (version_file_path)) return False
async def on_command_error(self, ctx, exception): log.info(exception) if isinstance(exception, commands.errors.MissingPermissions): exception = f'Sorry {ctx.message.author.name}, you don\'t have permissions to do that!' elif isinstance(exception, commands.errors.CheckFailure): exception = f'Sorry {ctx.message.author.name}, you don\'t have the necessary roles for that!' elif isinstance(exception, TimeoutError): log.warn(f'TimeoutError: {exception}') return error_embed = discord.Embed(title='', timestamp=datetime.utcnow(), description=f'> ```css\n> {exception}```', color=discord.Color.from_rgb(200, 0, 0)) error_embed.set_author(name='Woops!', icon_url=str(ctx.message.guild.icon_url)) error_embed.set_footer(text=str(type(exception).__name__)) error_message = await ctx.send(embed=error_embed) await error_message.add_reaction('❔') def check_reaction(reaction, user): return user != ctx.bot.user and str(reaction.emoji) == '❔' try: reaction, user = await ctx.bot.wait_for('reaction_add', timeout=15.0, check=check_reaction) except asyncio.TimeoutError: await error_message.remove_reaction('❔', ctx.bot.user) else: await error_message.remove_reaction('❔', ctx.bot.user) if exception.__doc__: error_embed.add_field(name='Details', value=exception.__doc__, inline=False) #if exception.__cause__: # error_embed.add_field(name='Cause', value=exception.__cause__ , inline=False) await error_message.edit(embed=error_embed)
def delete_mutable_data_version(conf, data_id): """ Locally delete the version of a piece of mutable data. Return True if deleted. Return False if not """ if conf is None: conf = config.get_config() if conf is None: log.warning("No config found; cannot store version for '%s'" % data_id) return False metadata_dir = conf['metadata'] if not os.path.isdir(metadata_dir): log.warning( "No metadata directory found; cannot store version of '%s'" % data_id) return False serialized_data_id = data_id.replace("/", "\x2f").replace('\0', "\\0") version_file_path = os.path.join(metadata_dir, serialized_data_id + ".ver") try: os.unlink(version_file_path) return True except Exception, e: # failed for whatever reason log.warn("Failed to remove version file '%s'" % (version_file_path)) return False
async def crawler(client, url_queue, archive): while True: url = await url_queue.get() try: log.debug("Crawling url: {}".format(url)) headers = ACCEPT_HEADERS headers['Referer'] = archive['top'] response = await client.get(url, headers=headers) if response.status != 200: raise Exception( "got response code other than 200 for url: {}".format(url)) else: data = await response.read() content_type, params = parse_header( response.headers['content-type']) if CHANGE_DOMAIN_FROM and CHANGE_DOMAIN_TO: wrUrl = url.replace(CHANGE_DOMAIN_FROM, CHANGE_DOMAIN_TO) else: wrUrl = url item = { "WebResourceData": data, "WebResourceMIMEType": content_type, "WebResourceURL": wrUrl } if 'charset' in params: item['WebResourceTextEncodingName'] = params['charset'] archive['items'].append(item) except Exception as exc: log.warn('Exception {}:'.format(exc), exc_info=True) finally: url_queue.task_done()
def is_word_type(file_path): '''判断文件是否是word文档类型''' if not os.path.isfile(file_path): log.warn('s3_local_file not exists: %s' % file_path) return False has_doc_tag = file_path.endswith('.doc') or file_path.endswith('.docx') # 根据后缀判断类型 if has_doc_tag: log.debug('file_type:ms_word,file_path:%s' % file_path) return True return False
async def crawler(client, url_queue, archive): while True: url = await url_queue.get() try: log.debug(url) headers = ACCEPT_HEADERS headers['Referer'] = archive['top'] response = await client.get(url, headers=headers) if response.status != 200: log.warn('BAD RESPONSE: {}: {}'.format(response.status, url)) else: data = await response.read() content_type, params = parse_header(response.headers['content-type']) item = { "WebResourceData": data, "WebResourceMIMEType": content_type, "WebResourceURL": url } if 'charset' in params: item['WebResourceTextEncodingName'] = params['charset'] # TODO: attempt to reproduce the way HTTP headers are stored (NSKeyedArchiver?) archive['items'].append(item) archive['seen'][url] = True if 'text/html' == content_type: dom = html.fromstring(data) patterns = ['//img/@src', '//img/@data-src', '//img/@data-src-retina', '//script/@src', "//link[@rel='stylesheet']/@href"] for path in patterns: for attr in dom.xpath(path): log.debug("{}: {} {}".format(path, url, attr)) url = unquote(urljoin(url, urldefrag(attr)[0])) if url not in archive['seen']: archive['seen'][url] = True await url_queue.put(url) elif 'text/css' == content_type: # TODO: nested @import and better path inference for attr in getUrls(parseString(data)): log.debug(attr) url = unquote(urljoin(url, urldefrag(attr)[0])) if url not in archive['seen']: archive['seen'][url] = True await url_queue.put(url) except Exception as exc: log.warn('Exception {}:'.format(exc), exc_info=True) finally: url_queue.task_done()
async def connect_redis(loop=None): """Connect to a Redis server""" if not loop: loop = get_event_loop() parts = REDIS_SERVER.split(",") address = tuple(parts[0].split(":")) rest = parts[1:] types = {'db': int, 'password': str, 'ssl': bool} params = {'loop': loop} for param in rest: try: name, value = param.split('=', 1) if name in types: params[name] = types[name](value) except ValueError: log.warn("Could not parse %s" % param) next return await create_redis(address, **params)
def load_mutable_data_version(conf, name, data_id, try_remote=True): """ Get the version field of a piece of mutable data. Check the local cache first, and if we need to, fetch the data itself from mutable storage """ # try to get the current, locally-cached version if conf is None: conf = config.get_config() metadata_dir = None if conf is not None: metadata_dir = conf.get('metadata', None) if metadata_dir is not None and os.path.isdir(metadata_dir): # find the version file for this data serialized_data_id = data_id.replace("/", "\x2f").replace('\0', "\\0") version_file_path = os.path.join(metadata_dir, serialized_data_id + ".ver") if os.path.exists(version_file_path): ver = None try: with open(version_file_path, "r") as f: ver_txt = f.read() ver = int(ver_txt.strip()) # success! return ver except ValueError, ve: # not an int log.warn("Not an integer: '%s'" % version_file_path) except Exception, e: # can't read log.warn("Failed to read '%s': %s" % (version_file_path))
def part_upload(filepath: str, key: str) -> bool: res: dict = s3_client.create_multipart_upload(Bucket=bucket, Key=key) upload_id = res['UploadId'] log.info(upload_id) pool = Pool(processes=10) res: Set[ApplyResult] = set() parts: dict = {'Parts': list()} try: with open(filepath, 'r+b') as f: i: int = 1 while True: data = f.read(50 * 1024 * 1024) # 50mb each part if data == b'': break res.add( pool.apply_async(func=upload, args=( data, upload_id, key, i, ))) i += 1 pool.close() pool.join() for item in res: v = item.get() parts.get('Parts').append(v) s3_client.complete_multipart_upload(Bucket=bucket, Key=key, UploadId=upload_id, MultipartUpload=parts) return True except Exception as e: log.exception(e) log.warn("Abort %s" % abort_part_upload(key, upload_id)) return False
def load_mutable_data_version( conf, name, data_id, try_remote=True ): """ Get the version field of a piece of mutable data. Check the local cache first, and if we need to, fetch the data itself from mutable storage """ # try to get the current, locally-cached version if conf is None: conf = config.get_config() metadata_dir = None if conf is not None: metadata_dir = conf.get('metadata', None) if metadata_dir is not None and os.path.isdir( metadata_dir ): # find the version file for this data serialized_data_id = data_id.replace("/", "\x2f").replace('\0', "\\0") version_file_path = os.path.join( metadata_dir, serialized_data_id + ".ver") if os.path.exists( version_file_path ): ver = None try: with open( version_file_path, "r" ) as f: ver_txt = f.read() ver = int( ver_txt.strip() ) # success! return ver except ValueError, ve: # not an int log.warn("Not an integer: '%s'" % version_file_path ) except Exception, e: # can't read log.warn("Failed to read '%s': %s" % (version_file_path))
def store_mutable_data_version(conf, data_id, ver): """ Locally store the version of a piece of mutable data, so we can ensure that its version is incremented on subsequent puts. Return True if stored Return False if not """ if conf is None: conf = config.get_config() if conf is None: log.warning("No config found; cannot store version for '%s'" % data_id) return False metadata_dir = conf['metadata'] if not os.path.isdir(metadata_dir): log.warning( "No metadata directory found; cannot store version of '%s'" % data_id) return False serialized_data_id = data_id.replace("/", "\x2f").replace('\0', "\\0") version_file_path = os.path.join(metadata_dir, serialized_data_id + ".ver") try: with open(version_file_path, "w+") as f: f.write("%s" % ver) return True except Exception, e: # failed for whatever reason log.warn("Failed to store version of '%s' to '%s'" % (data_id, version_file_path)) return False
def run(self, no_board=False): # I'm the 2nd player if self.my_id == 2: dice = read_line() moves = read_moves() # Update coin positions using these moves self.opponent.make_moves(moves, self.player) if not no_board: self.update_board.emit(self.coins) self.update_status.emit(self.opponent, moves) # Track whether the 2nd player is repeating opponent_repeating = False while True: log.warn(self.dump_state()) # 2nd player is not repeating, so it is my turn! if not opponent_repeating: # Roll the die write_output("<THROW>") # Read die rolls from client (stdin) die_rolls = read_die() log.info("Received Roll: %s", die_rolls) # Save state saved_positions = { name: coin.rel_pos for (name, coin) in self.coins.items() } # Store: [(possible_moves, benefit)] all_possible_moves = [] # Consider all possible unique permutations of moves! for possible_rolls in set(permutations(die_rolls)): # Find all moves possible for this permutation of the rolls possible_moves, benefit = self.player.get_multiple_moves( possible_rolls, self.opponent) # Use percent_complete & profits of each move # Add it to list if possible_moves: all_possible_moves.append( (possible_moves, benefit + self.player.percent_complete - self.opponent.percent_complete)) # log.warn("State After these moves") # log.warn(self.dump_state()) # Reset state for name, coin in self.coins.items(): coin.rel_pos = saved_positions[name] # log.warn("State Reset") # log.warn(self.dump_state()) # if no move possible if not all_possible_moves: moves = "NA" else: # Only keep possible moves of maximal length maximal = max(all_possible_moves, key=lambda t: len(t[0])) max_len = len(maximal[0]) all_valid_moves = filter(lambda t: len(t[0]) == max_len, all_possible_moves) # log.critical("Max len: %r", max_len) # log.critical("Possible: %r", all_possible_moves) # log.critical("Valid: %r", all_valid_moves) # Sort all_valid_moves based on benefit moves = sorted(all_valid_moves, key=lambda t: t[1])[-1][0] # Play finally decided moves self.player.make_moves(moves, self.opponent) if not no_board: self.update_board.emit(self.coins) self.update_status.emit(self.player, moves) # Convert to a format that the external client understands moves = "<next>".join(moves) # Send the moves to client (stdout) log.info("Sending Moves: %s", moves) write_output(moves) else: opponent_repeating = False # Now read in opponent's dice rolls & moves dice = read_line() # If the moves I played didn't result in a REPEAT # The opponent will now get another chance if dice != "REPEAT": moves = read_moves() # Opponent made a move that resulted in a REPEAT! # So the next turn won't be mine if moves[-1] == 'REPEAT': opponent_repeating = True # Remove "REPEAT" from moves list moves.pop() self.opponent.make_moves(moves, self.player) if not no_board: self.update_board.emit(self.coins) self.update_status.emit(self.opponent, moves)
if dice != "REPEAT": moves = read_moves() # Opponent made a move that resulted in a REPEAT! # So the next turn won't be mine if moves[-1] == 'REPEAT': opponent_repeating = True # Remove "REPEAT" from moves list moves.pop() self.opponent.make_moves(moves, self.player) if not no_board: self.update_board.emit(self.coins) self.update_status.emit(self.opponent, moves) if __name__ == '__main__': # Test Board states g = LudoGame() log.warn(g.dump_state()) st = """ Players: RED, YELLOW Coins: Y3_2, R3_4, Y1_8, R1_0, Y2_0, R0_9, R2_18, Y0_0 """ g.load_state(st) log.warn(g.dump_state())