Пример #1
0
def extract_results(search_word, condition=None):
    if condition == "new":
        return []
    url = util.create_url(MAIN_URL, search_word, DELIMITER)
    try:
        soup = util.check_exceptions(url)
        product_grid = soup.find('div', class_='v-product-grid')
        total_equips = product_grid.find_all('div', class_='v-product')
    except:
        return []
    equips = []

    for equip in total_equips:
        title = equip.find(
            'a', class_='v-product__title productnamecolor colors_productname'
        ).find(text=True).strip()
        equipment = Result(title)
        equipment.url = equip.find('a', class_='v-product__img').get('href')
        equipment.image_src = 'http:' + equip.find('img').get('src')
        price_text = equip.find(
            'div', class_='product_productprice').find_all(text=True)
        equipment.price = util.get_price(''.join(price_text))
        if util.is_valid_price(equipment.price):
            equips.append(equipment)
        if len(equips) >= 10:
            return equips
    return equips
Пример #2
0
def extract_results(item,condition=None):
        #Url is extended based on condition
        if condition == "new":
                specific_url = util.create_url(MAIN_URL,item,DELIMITER) + "&condition=New,New%20or%20Used&adtype=998"
        else:
                specific_url = util.create_url(MAIN_URL,item,DELIMITER) + "&condition=Used,Refurbished,For%20Parts/Not%20Working,New%20or%20Used&adtype=998"     
        url = util.create_url(MAIN_URL,item, DELIMITER)
        results=[]
        #Check if page has data
        try:
            soup = util.check_exceptions(url)
            table = soup.find('tbody', class_='ResultsNewTable')
            rows=table.find_all('tr')
        except:
                  return []
        #Get 1st 10 results only
        for i in range(len(rows)):
                  row= rows[i]
                  new_result = Result(row.find('a').get('title'))
                  new_result.url = row.find('a').get('href')
                  new_result.price = util.get_price(row.find_all('td')[4].contents[0])
                  number = util.get_price(new_result.title)
                  new_result.image_src = "https://photos.labx.com/labx/"+number+"/"+number+"-0.jpg"
                  if util.is_valid_price(new_result.price):
                          results.append(new_result)
                          if len(results) == 10:
                                  return results
        return results
Пример #3
0
def extract_results(search_word, condition=None):
    url = util.create_url(MAIN_URL, search_word, DELIMITER)
    url = url if condition != "new" else url + '&Condition=5067'
    try:
        soup = util.check_exceptions(url)
        product_grid = soup.find('div', class_='pagebody')
        total_equips = product_grid.find_all('div', class_='el')
    except:
        return []
    equips = []
    for equip in total_equips:
        # items_details have names of generic device, model, manufacturer bundled together
        items_details = equip.find('div',
                                   class_='item_details').find_all(text=True)
        title = ' '.join(items_details).strip()
        equipment = Result(title)
        equipment.url = equip.find('div', class_='image').find(
            'a', class_='item_number').get('href')
        equipment.image_src = equip.find('div',
                                         class_='image').find('img').get('src')
        price_text = equip.find('div', class_='price').find(
            text=True) if equip.find(
                'span', class_='price_element') == None else equip.find(
                    'span', class_='price_element').find(text=True)
        equipment.price = util.get_price(''.join(price_text))
        if util.is_valid_price(equipment.price):
            equips.append(equipment)
        if len(equips) >= 10:
            return equips
    return equips
Пример #4
0
def extract_results(search_word, condition=None):
    url = util.create_url(MAIN_URL, search_word, DELIMITER)
    try:
        soup = util.check_exceptions(url)
        product_table = soup.find('table', class_='table_content')
        result_links = product_table.find_all('a')
    except:
        return []

    equips = []
    for link in result_links:
        product_url = HOME_URL + link.get('href')
        product_page_content = BeautifulSoup(
            urllib.request.urlopen(product_url), "html.parser")
        title = ''.join(
            product_page_content.find(
                'div',
                class_='product_left').find('h1').find_all(text=True)).strip()
        equipment = Result(title)
        equipment.url = product_url
        equipment.image_src = HOME_URL + product_page_content.find(
            'img', {
                "id": "big_product_img"
            }).get('src')
        equipment.price = util.get_price(
            product_page_content.find('div',
                                      class_='pr_price2').find(text=True))
        if util.is_valid_price(equipment.price):
            equips.append(equipment)
        if len(equips) >= 10:
            return equips
    return equips