def scrapy(self,driver):
     bs = BeautifulSoup(driver.page_source)
     item = bs.find("div",id="product-intro").find("div",id="itemInfo")
     name = item.find("div",id="name").find("h1").text
     price = item.find("div",id="summary-price").find("strong",class_="p-price").text
     #print(price)
     price = re_price.search(price).group()
     discount = item.find("div",id="summary-price").find("span",class_="p-discount").text
     #print(discount)
     discount = re_price.search(discount).group()
     #print(name,price,discount)
     Log4Spider.dataLog("insert a shop",name,price,discount)
     self.db.shops.insert({"name":name,"price":price,"discount":discount})
     super().scrapy(driver)
Exemple #2
0
    def scrapy(self, driver):
        exec = self.app.executor

        #css_div = 'div[class="%s"] h3'%('item-inner')
        #div_book = yield exec.submit(driver.find_elements_by_css_selector,css_div)
        #for h in div_book:
        #   ActionChains(driver).move_to_element(h).perform()
        pagesource = driver.page_source
        bs = BeautifulSoup(pagesource)
        eles = bs.findAll("div", {"class": "p-name"})
        prices = bs.findAll("div", {"class": "p-price"})
        for price in prices:
            i = price.find("i")
            if i:
                print(i.text)
        for ele, price in zip(eles, prices):
            Log4Spider.dataLog(ele.text, price.text)
        super().scrapy(driver)
    def scrapy(self, driver):
        exec = self.app.executor

        #css_div = 'div[class="%s"] h3'%('item-inner')
        #div_book = yield exec.submit(driver.find_elements_by_css_selector,css_div)
        #for h in div_book:
         #   ActionChains(driver).move_to_element(h).perform()
        pagesource = driver.page_source
        bs = BeautifulSoup(pagesource)
        eles = bs.findAll("div",{"class":"p-name"})
        prices = bs.findAll("div",{"class":"p-price"})
        for price in prices:
            i = price.find("i")
            if i:
                print(i.text)
        for ele,price in zip(eles,prices):
            Log4Spider.dataLog(ele.text,price.text)
        super().scrapy(driver)
Exemple #4
0
 def scrapy(self, driver):
     bs = BeautifulSoup(driver.page_source)
     item = bs.find("div", id="product-intro").find("div", id="itemInfo")
     name = item.find("div", id="name").find("h1").text
     price = item.find("div",
                       id="summary-price").find("strong",
                                                class_="p-price").text
     #print(price)
     price = re_price.search(price).group()
     discount = item.find("div",
                          id="summary-price").find("span",
                                                   class_="p-discount").text
     #print(discount)
     discount = re_price.search(discount).group()
     #print(name,price,discount)
     Log4Spider.dataLog("insert a shop", name, price, discount)
     self.db.shops.insert({
         "name": name,
         "price": price,
         "discount": discount
     })
     super().scrapy(driver)