コード例 #1
0
def eden_robe(query):
	try:
		link = "https://edenrobe.com/catalogsearch/result/?q="+query
		req = requests.get(link)
		soup = BeautifulSoup(req.text, 'lxml')
		#print(soup.text)

		titles = soup.find_all('a',{'class':'product-item-link'})
		prices = soup.find_all('span',{'class':'price'})
		img = [i['href'] for i in soup.find_all('a',{'class':'MagicZoom'}, href=True) if i['href'] != "#"]
		for i in range(0,len(titles)):
			TITLE = titles[i].text
			TITLE = TITLE.replace("\t","")
			TITLE = TITLE.replace("\n","")
			PRICE = prices[i].text
			IMAGE = img[i]
			BRAND = "Eden Robe"
			LINK = titles[i].get('href')
			item = {
				'title': TITLE,
				'price': PRICE,
				'img': IMAGE,
				'brand': BRAND,
				'link': LINK
			}
			#print(item)
			#items_list.append(item)
			query=	query.replace("+"," ")
			QuerySet(title=item['title'],image_url=item['img'],item_url=item['link'],brand=item['brand'],price=item['price'],category=query).save()
		#print(titles[i].text)
		#print(prices[i].text)
		#print(img[i])
	except(ConnectionError, Exception):
		return False
コード例 #2
0
def outfitters(query):
	try:
		link = "https://outfitters.com.pk/search?type=product&q="+query
		req = requests.get(link)
		soup = BeautifulSoup(req.text, 'lxml')
		titles = soup.find_all('a',{'class':'product-title'})
		prices = soup.find_all('span',{'class':'money'})
		img_url = soup.find_all('img', {'class':'images-one lazyautosizes lazyloaded'})
		#//*[@id="category-products-grid"]/ol/li[1]/div/div[1]/div[1]/a/span[1]/span/span/img
		item_link = [i['href'] for i in soup.find_all('a',{'class':'product-title'}, href=True) if i['href'] != "#"]
		for i in range(0,len(titles)):
			TITLE = titles[i].text
			TITLE = TITLE.replace("\t","")
			TITLE = TITLE.replace("\n","")
			PRICE = prices[i].text
			IMAGE = img_url[i].get('src')
			IMAGE = IMAGE.replace("?v=1599216010","")
			LINK = item_link[i]
			BRAND = "Outfitters"
			item = {
				'title': TITLE,
				'price': PRICE,
				'img': IMAGE,
				'brand': BRAND,
				'link': LINK
			}
			print(item)
			#items_list.append(item)
			query=	query.replace("+"," ")
			QuerySet(title=item['title'],image_url=item['img'],item_url=item['link'],brand=item['brand'],price=item['price'],category=query).save()
			# item = QuerySet(title=TITLE,image_url=IMAGE,item_url=LINK,brand=BRAND,price=item['price'])
			# item.save()
		
	except(ConnectionError, Exception):
		return False
コード例 #3
0
def focus(query):
	try:
		
		link = "https://focusclothing.pk/search?q=" + query
		req = requests.get(link)
		soup = BeautifulSoup(req.text, 'html.parser')
		titles = soup.find_all('div',{'class':'prod-title'})
		prices = soup.find_all('div',{'class':'onsale'})
		img_url = soup.find_all('noscript')
		item_link = soup.find_all('div',{'class':'product-info'})
		L = (len(titles))
		for i in range(0,L):
			TITLE = titles[i].text
			IMAGE = img_url[i].img.get('src')
			LINK = "https://focusclothing.pk" + item_link[i].a.get('href')
			PRICE = prices[i].text
			BRAND = "Focus"
			item = {
				'title': TITLE,
				'price': PRICE,
				'img': IMAGE,
				'brand': BRAND,
				'link': LINK
			}
			#items_list.append(item)
			query=	query.replace("+"," ")
			QuerySet(title=item['title'],image_url=item['img'],item_url=item['link'],brand=item['brand'],price=item['price'],category=query).save()
			
	except(ConnectionError, Exception):
		return False
コード例 #4
0
def gulahmed(query):
	try:
		link = "https://www.gulahmedshop.com/catalogsearch/result/?q="+query
		req = requests.get(link)
		soup = BeautifulSoup(req.text, 'lxml')
		titles = soup.find_all('a',{'class':'product-item-link'})
		prices = soup.find_all('span',{'class':'price'})
		img_url = soup.find_all('img', {'class':'product-image-photo'})
		#//*[@id="category-products-grid"]/ol/li[1]/div/div[1]/div[1]/a/span[1]/span/span/img
		item_link = [i['href'] for i in soup.find_all('a',{'class':'product-item-link'}, href=True) if i['href'] != "#"]
		for i in range(0,len(titles)):
			TITLE = titles[i].text
			TITLE = TITLE.replace("\t","")
			TITLE = TITLE.replace("\n","")
			PRICE = prices[i].text
			IMAGE = img_url[i].get('src')
			#IMAGE = IMAGE.replace(".pagespeed.ic.py1u_bk3_f.webp","")
			LINK = item_link[i]
			BRAND = "Gul Ahmed"
			item = {
				'title': TITLE,
				'price': PRICE,
				'img': IMAGE,
				'brand': BRAND,
				'link': LINK
			}
			#print(item)
			#items_list.append(item)
			query=	query.replace("+"," ")
			QuerySet(title=item['title'],image_url=item['img'],item_url=item['link'],brand=item['brand'],price=item['price'],category=query).save()
		
	except(ConnectionError, Exception):
		return False
コード例 #5
0
def jdot(query):
	try:
		link = "https://www.junaidjamshed.com/catalogsearch/result/?q="+query
		#print(link)
		req = requests.get(link)
		soup = BeautifulSoup(req.text, 'lxml')
		titles = soup.find_all('a',{'class':'product-item-link'})
		prices = soup.find_all('span',{'class':'price'})
		img_url = soup.find_all('img', {'class':'product-image-photo lazy'})
		X = (len(titles))
		
		#item_link = [i['href'] for i in soup.find_all('a',{'class':'product photo product-item-photo'}, href=True) if i['href'] != "#"]
		#i = range(0,len(titles))
		#print(prices)
		for i in range(0,X):
			#print("in loop")
			#print(titles[i].text)
			#print(prices[i].text)
			#print(img_url[i].get('src'))
			TITLE = titles[i].text
			TITLE = TITLE.replace("\t","")
			TITLE = TITLE.replace("\n","")
			#print(TITLE)
			PRICE = prices[i].text
			IMAGE = img_url[i].get('data-original')
			LINK = titles[i].get('href')
			BRAND = "Junaid Jamshed"
			#print(titles[i].text)
			item = {
				'title': TITLE,
				'price': PRICE,
				'img': IMAGE,
				'brand': BRAND,
				'link': LINK
			}
			#print(item)
			#items_list.append(item)
			query=	query.replace("+"," ")
			QuerySet(title=item['title'],image_url=item['img'],item_url=item['link'],brand=item['brand'],price=item['price'],category=query).save()
			# item.save()
		
	except(ConnectionError, Exception):
		return False
コード例 #6
0
def diners(query):
	try:
		link = "https://diners.com.pk/search?type=product&q="+query
	 
		req = requests.get(link)
		soup = BeautifulSoup(req.text, 'lxml')
		titles = soup.find_all('a',{'class':'product-title'})
		prices = soup.find_all('div',{'class':'price-box'})
		img_url = soup.find_all('span',{'class':'images-two'})
		#item_link = soup.find_all('div',{'class':'product-info'})
		L = (len(titles))
	 
		for i in range(0,L):
	   
			TITLE = titles[i].span.text
		   
			IMAGE = img_url[i].img.get('src')
	   
			LINK = "https://diners.com.pk/" + titles[i].get('href')
		 
			PRICE = prices[i].div.span.span.text
	   
			BRAND = "Diners"
			item = {
				'title': TITLE,
				'price': PRICE,
				'img': IMAGE,
				'brand': BRAND,
				'link': LINK
			}
			#print(item)
			#items_list.append(item)
			query=	query.replace("+"," ")
			query=	query.replace("+"," ")
			QuerySet(title=item['title'],image_url=item['img'],item_url=item['link'],brand=item['brand'],price=item['price'],category=query).save()
			
	except(ConnectionError, Exception):
		return False
コード例 #7
0
def breakout(query):
	try:
		link = "https://breakout.com.pk/search?type=product&q="+query
		#print(link)
		req = requests.get(link)
		print('equest sussecfull')
		soup = BeautifulSoup(req.text, 'html.parser')
		print(soup)
		titles = soup.find_all('h2',{'class':'pt-title prod-thumb-title-color'})
		prices = soup.find_all('div',{'class':'pt-price'})
		img_url = soup.find_all('img',{'class':'lazyload'})
		print(img_url)
		#item_link = soup.find_all('a',{'class':'product-title'}, href=True)
		L = (len(titles))
		for i in range(0,L):
			#print("In Loop")
			TITLE = titles[i].text
			TITLE = TITLE.upper()	
			IMAGE = img_url[i].get('data-src')           
			LINK = "https://breakout.com.pk" + titles[i].find('a').get('href')
			PRICE = prices[i].text
			BRAND = "Breakout"
			item = {
				'title': TITLE,
				'price': PRICE,
				'img': IMAGE,
				'brand': BRAND,
				'link': LINK
			}
			print(item)
			#items_list.append(item)
			query=	query.replace("+"," ")
			QuerySet(title=item['title'],image_url=item['img'],item_url=item['link'],brand=item['brand'],price=item['price'],category=query).save()
			
	except(ConnectionError, Exception):
		return False