Пример #1
0
 def get_arena_teams(self, battlegroup, realm, site, get_characters=False, \
     ladders=[2,3,5], max_pages=None):
     '''Returns a list of arena teams as team objects. Setting get_characters
     to true will cause teams, their characters and their guilds to be
     downloaded. This cascading effect is very slow and should be used
     with caution.
     
     '''
     all_teams = []
     
     for ladder_number in ladders:
         try:
             source = self._download_url( \
                 WoWSpyderLib.get_arena_url(battlegroup, realm, site, \
                 ladder_number=ladder_number))
         except Exception, e:
             log.warning("Couldn't get arena page for ladder " + 
                 str(ladder_number) + ", continuing. ERROR: " + str(e))
             continue
         
         if not max_pages: 
             try:
                 max_pages = WoWSpyderLib.get_max_pages(source)
             except AttributeError, e:
                 # cflewis | 2009-04-22 | This means that
                 # this arena will be skipped, but it's better than
                 # crashing
                 max_pages = 0
Пример #2
0
    def _get_character_achievements(self, name, realm, site):
        urls = WoWSpyderLib.get_character_achievement_urls(name, realm, site)
        achievements = []

        for url in urls:
            source = self._download_url(url)
            achievements.append(self._parse_character_achievements( \
                StringIO.StringIO(source), name, realm, site))

        return WoWSpyderLib.merge(achievements)
Пример #3
0
 def _get_character_statistics(self, name, realm, site):
     urls = WoWSpyderLib.get_character_statistics_urls(name, realm, site)
     statistics = []
     
     for url in urls:
         source = self._download_url(url)
         statistics.append(self._parse_character_statistics( \
             StringIO.StringIO(source), name, realm, site))
         
     return WoWSpyderLib.merge(statistics)
Пример #4
0
    def _parse_guild_characters(self, name, realm, site):
        """Page through a guild, creating characters."""
        character_list = []

        log.debug(name + " " + realm + ": Downloading guild page")

        source = self._download_url( \
            WoWSpyderLib.get_guild_url(name, realm, site, page=1))

        character_list.append(self._parse_guild_file(StringIO.StringIO(source), site))

        return WoWSpyderLib.merge(character_list)
Пример #5
0
 def get_team(self, name, realm, site, size=None, get_characters=False, cached=False):
     """Returns a team object. Setting get_characters to True will
     cause characters in the team to be created at the same time. This is
     slower, but likely what you will want.
     
     Setting cached to True will return the cached version of the team, 
     if you're sure it's already in the database.
     
     """
     team = self._session.query(Team).get((name, realm, site))
     
     if cached and team:
         return team
     
     if not team and not size: raise NameError("No team on that PK, " + \
         "need size to create new team.")
         
     log.debug("Getting team...")
         
     # cflewis | 2009-04-02 | If downloading fails, the whole team
     # couldn't be found, so the exception should propagate up.
     source = self._download_url(\
         WoWSpyderLib.get_team_url(name, realm, site, size))
     team = self._parse_team(StringIO.StringIO(source), site, get_characters=get_characters)
     
     return team
Пример #6
0
    def get_guild_rank(self, guild_name, realm, site, character_name):
        """Returns the rank of a character (not specified by object)
        in a guild.
        
        """
        source = self._download_url( \
            WoWSpyderLib.get_guild_url(guild_name, realm, site, page=1))

        guild_rank_search = re.search("name=\"" + character_name + \
            "\".*rank=\"(\d*)\"", unicode(source, "utf-8"))
        if guild_rank_search:
            return int(guild_rank_search.group(1))

        raise IOError("No character in that guild")
Пример #7
0
 def get_character(self, name, realm, site, cached=False, force_refresh=False):
     """Return a character object. This only stubs the guild, which means
     the guild won't be populated with characters."""
     log.debug("Getting character " + name + "...")
     
     character = self._session.query(Character).get((name, realm, site))
     
     # cflewis | 2009-04-11 | Check if a character is actually updated
     # on the armory. If not, return the database version anyway.
     if character and (cached == True or character.is_updated_on_armory() == False) and not force_refresh:
         return character            
     
     source = self._download_url(\
         WoWSpyderLib.get_character_sheet_url(name, realm, site))
     character = self._parse_character(StringIO.StringIO(source), site)
         
     return character
Пример #8
0
    def get_guild(self, name, realm, site, get_characters=False, cached=False):
        """Get a guild. Setting get_characters=False will disable the
        behavior that causes the guild characters to also be created. You
        may want to do this for speed increases.
        
        """
        if name is None or name == "": 
            return None

        guild = self._session.query(Guild).get((name, realm, site))

        if cached and guild:
            return guild

        # cflewis | 2009-04-02 | If the downloading fails, the whole guild
        # couldn't be found, so the exception should propagate up.
        source = self._download_url(\
            WoWSpyderLib.get_guild_url(name, realm, site))
        guild = self._parse_guild(StringIO.StringIO(source), site, get_characters=get_characters)

        return guild
Пример #9
0
    def last_modified_on_armory(self):    
        # cflewis | 2009-04-11 | It's safe to cache because there's no way
        # the lifetime of a character object will exceed that of an armory
        # refresh.
        downloader = XMLDownloader.XMLDownloader()
        source = downloader.download_url(self.url)

        try:
            if self._last_modified_on_armory != None:
                return self._last_modified_on_armory
        except AttributeError:        
            try:
                armory_date_string = re.search("lastModified=\"(.*?)\"", source).group(1)
            except Exception, e:
                log.debug("Couldn't find last modified, returning what I had")
                return self.last_modified
            else:
                self._last_modified_on_armory = \
                    WoWSpyderLib.convert_last_modified_to_datetime(armory_date_string)
                log.debug("Saved last modified on armory as " + str(self._last_modified_on_armory))
                return self._last_modified_on_armory
Пример #10
0
 def get_item(self, item_id, cached=True):
     """Returns an item object.
     
     Setting cached to True will return the cached version of the item, 
     if you're sure it's already in the database. This will fall through
     if the item wasn't found. Because items are immutable, caching
     is on by default.
     
     """
     item = self._session.query(Item).get(item_id)
     
     if cached and item:
         return item
         
     log.debug("Getting item...")
         
     # cflewis | 2009-04-02 | If downloading fails, the whole team
     # couldn't be found, so the exception should propagate up.
     source = self._download_url(WoWSpyderLib.get_item_url(item_id))
     item = self._parse_item(StringIO.StringIO(source))
     
     return item
Пример #11
0
 def url(self):
     return WoWSpyderLib.get_team_url(self.name, self.realm, self.site, \
             self.size)
Пример #12
0
 def testGetEUArenaURLNonUnicode(self):
     eu_url = WoWSpyderLib.get_arena_url(self.eu_battlegroup, self.eu_realm, "eu")
     self.assertTrue(re.match("http://eu.wowarmory", eu_url))  
Пример #13
0
                else:
                    guild_name = guild.name
        
        # cflewis | 2009-04-02 | Last modified continues to be weird and
        # I can't track this bug down!
        last_modified = None
        
        log.debug("Guilds done, working on last modified date...")

        try:
            last_modified_string = character_node.attributes["lastModified"].value
        except KeyError, e:
            log.warning("Couldn't get last modified date. ERROR: " + str(e))
            last_modified = None
        else:
            last_modified = WoWSpyderLib.convert_last_modified_to_datetime(last_modified_string)
            log.debug("Last modified date is " + str(last_modified))
            if last_modified.year < 2008:
                log.warning("Last modified year was broken, fixing it to this year")
                # cflewis | 2009-03-30 | The Armory has been returning strange
                # years intermittently, not replicable when I manually visit
                # the page. I'll set the year to what the current year is.
                last_modified.replace(year=datetime.datetime.now().year)
            else:
                log.debug("Last modified year is " + str(last_modified.year) + " so continuing")
        
        log.debug("Character done, getting items...")
        
        items = []
        
        for i in range(0, 19):
Пример #14
0
 def testGetUSArenaURL(self):
     us_url = WoWSpyderLib.get_arena_url(self.us_battlegroup, \
         self.us_realm, u"us")
     self.assertTrue(re.match("http://www.wowarmory", us_url))
Пример #15
0
             log.debug(battlegroup + " " + realm + \
                 ": Downloading arena page " + str(page) + " of " \
                 + str(max_pages))
             
             try:
                 source = self._download_url( \
                     WoWSpyderLib.get_arena_url(battlegroup, realm, site, page=page, \
                         ladder_number=ladder_number))
             except Exception, e:
                 log.warning("Couldn't get arena page, continuing... ERROR: " + str(e))
                 continue
             
             teams = self._parse_arena_file(StringIO.StringIO(source), site, get_characters=get_characters)
             all_teams.append(teams)
             
     return WoWSpyderLib.merge(all_teams)
         
 def _parse_arena_file(self, xml_file_object, site, get_characters=False):
     """Parse the XML of an arena page"""
     xml = minidom.parse(xml_file_object)
     team_nodes = xml.getElementsByTagName("arenaTeam")
     teams = []
     
     for team_node in team_nodes:
         name = team_node.attributes["name"].value
         realm = team_node.attributes["realm"].value
         size = team_node.attributes["size"].value
         
         try:
             team = self._tp.get_team(name, realm, site, size, get_characters=get_characters)
         except Exception, e:
Пример #16
0
 def _get_character_talents(self, name, realm, site):
     source = self._download_url(\
         WoWSpyderLib.get_character_talents_url(name, realm, site))
     talents = self._parse_character_talents(StringIO.StringIO(source))
         
     return talents
Пример #17
0
    def get_url_page(self, page_number):
        log.debug("Returning URL for " + self.name + "," + self.realm + "," + self.site)

        return WoWSpyderLib.get_guild_url(self.name, self.realm, self.site)
Пример #18
0
 def url(self):
     return WoWSpyderLib.get_character_sheet_url(self.name, self.realm, self.site)