def fetch_stock(): f = urllib.urlopen("http://www.szse.cn/szseWeb/common/szse/webservice/zqhqpage.jsp?txtStockCode=000875&cycle=&actionid=&randnum=0.8005739768343246") resp = f.read() resp = resp.decode("gbk").encode('utf-8') if resp: soup = BeautifulSoup(resp) tables = soup.findAll("table", {'id':'ZQHQ1_GridA'}) assert len(tables)==1 table = tables[0] rows = table.findAll("tr") assert len(rows)==9 # omit the last row because there is nothing inside rows = rows[:7] messages = [] for row in rows: tds = row.findAll('td') assert len(tds)==2 name = tds[0].string value = "" if tds[1].string: value = tds[1].string else: value = tds[1].font.string messages.append(name + " : " + value) if messages: message = "\n".join(messages) netgrowl.send_notify_by_growl(name="Stock", message=message.encode("utf-8"), host="localhost")
def test_attachment_parsing(self): bugzilla = Bugzilla() soup = BeautifulSoup(self._example_attachment) attachment_element = soup.find("attachment") attachment = bugzilla._parse_attachment_element(attachment_element, self._expected_example_attachment_parsing['bug_id']) self.assertTrue(attachment) self._assert_dictionaries_equal(attachment, self._expected_example_attachment_parsing)
def test_attachment_parsing(self): bugzilla = Bugzilla() soup = BeautifulSoup(self._example_attachment) attachment_element = soup.find("attachment") attachment = bugzilla._parse_attachment_element( attachment_element, self._expected_example_attachment_parsing['bug_id']) self.assertTrue(attachment) self._assert_dictionaries_equal( attachment, self._expected_example_attachment_parsing)
def test_attachment_parsing(self): bugzilla = Bugzilla() soup = BeautifulSoup(self._example_attachment) attachment_element = soup.find("attachment") attachment = bugzilla._parse_attachment_element(attachment_element, self._expected_example_attachment_parsing['bug_id']) self.assertTrue(attachment) # Make sure we aren't parsing more or less than we expect self.assertEquals(attachment.keys(), self._expected_example_attachment_parsing.keys()) for key, expected_value in self._expected_example_attachment_parsing.items(): self.assertEquals(attachment[key], expected_value, ("Failure for key: %s: Actual='%s' Expected='%s'" % (key, attachment[key], expected_value)))
def test_attachment_parsing(self): reviewer = Reviewer('Test One', '*****@*****.**') committer = Committer('Test Two', '*****@*****.**') committer_list = CommitterList(committers=[committer], reviewers=[reviewer]) bugzilla = Bugzilla(committers=committer_list) soup = BeautifulSoup(self._example_attachment) attachment_element = soup.find("attachment") attachment = bugzilla._parse_attachment_element(attachment_element, self._expected_example_attachment_parsing['bug_id']) self.assertTrue(attachment) for key, expected_value in self._expected_example_attachment_parsing.items(): self.assertEquals(attachment[key], expected_value, ("Failure for key: %s: Actual='%s' Expected='%s'" % (key, attachment[key], expected_value)))
def get_golflink_details(self): if self.golflink_no: try: glpage = urlfetch.Fetch("http://www.golflink.com.au/HandicapHistory.aspx?golflink_no=" + self.golflink_no.golflink_no_for_golflink()).content except: glpage = None if glpage != None: soup = BeautifulSoup(glpage) try: self.exact_handicap = soup.find("div",id="exactHandicap").string self.playing_handicap = int(soup.find("div",id="playingHandicap").string) except: self.exact_handicap = "n/a" self.playing_handicap = None #TODO - get status self.handicap_status = ""
def test_status_parsing(self): buildbot = BuildBot() soup = BeautifulSoup(self._example_one_box_status) status_table = soup.find("table") input_rows = status_table.findAll('tr') for x in range(len(input_rows)): status_row = input_rows[x] expected_parsing = self._expected_example_one_box_parsings[x] builder = buildbot._parse_builder_status_from_row(status_row) # Make sure we aren't parsing more or less than we expect self.assertEquals(builder.keys(), expected_parsing.keys()) for key, expected_value in expected_parsing.items(): self.assertEquals(builder[key], expected_value, ("Builder %d parse failure for key: %s: Actual='%s' Expected='%s'" % (x, key, builder[key], expected_value)))
def test_status_parsing(self): buildbot = BuildBot() soup = BeautifulSoup(self._example_one_box_status) status_table = soup.find("table") input_rows = status_table.findAll('tr') for x in range(len(input_rows)): status_row = input_rows[x] expected_parsing = self._expected_example_one_box_parsings[x] builder = buildbot._parse_builder_status_from_row(status_row) # Make sure we aren't parsing more or less than we expect self.assertEquals(builder.keys(), expected_parsing.keys()) for key, expected_value in expected_parsing.items(): self.assertEquals(builder[key], expected_value, ( "Builder %d parse failure for key: %s: Actual='%s' Expected='%s'" % (x, key, builder[key], expected_value)))
def test_attachment_parsing(self): reviewer = Reviewer('Test One', '*****@*****.**') committer = Committer('Test Two', '*****@*****.**') committer_list = CommitterList(committers=[committer], reviewers=[reviewer]) bugzilla = Bugzilla(committers=committer_list) soup = BeautifulSoup(self._example_attachment) attachment_element = soup.find("attachment") attachment = bugzilla._parse_attachment_element( attachment_element, self._expected_example_attachment_parsing['bug_id']) self.assertTrue(attachment) for key, expected_value in self._expected_example_attachment_parsing.items( ): self.assertEquals( attachment[key], expected_value, ("Failure for key: %s: Actual='%s' Expected='%s'" % (key, attachment[key], expected_value)))
def get_golflink_details(self): if self.golflink_no: try: glpage = urlfetch.Fetch( "http://www.golflink.com.au/HandicapHistory.aspx?golflink_no=" + self.golflink_no.golflink_no_for_golflink()).content except: glpage = None if glpage != None: soup = BeautifulSoup(glpage) try: self.exact_handicap = soup.find("div", id="exactHandicap").string self.playing_handicap = int( soup.find("div", id="playingHandicap").string) except: self.exact_handicap = "n/a" self.playing_handicap = None #TODO - get status self.handicap_status = ""