def to_representation(self, android): return { 'id': encode(android.id), 'title': android.title, 'appURL': android.app_url, 'publisher': android.publisher, 'publisherURL': android.pub_url, 'icon': android.cover, 'price': android.price, }
def to_representation(self, UWP): return { 'id': encode(UWP.id), 'title': UWP.title, 'appURL': UWP.app_url, 'publisher': UWP.publisher, 'publisherURL': '', 'icon': UWP.cover, 'price': UWP.price, 'appID': UWP.appid, }
def to_representation(self, iOS): return { 'id': encode(iOS.id), 'title': iOS.title, 'appURL': iOS.app_url, 'publisher': iOS.publisher, 'publisherURL': iOS.publisher_url, 'icon': iOS.cover, 'price': iOS.price, 'appID': iOS.appid, 'devices': iOS.devices, }
def get(self, request, uwpid): uwpid_hash = uwpid uwpid_decoded = str(decode(uwpid_hash)) apps = UWP.objects.filter(id=uwpid_decoded) # if app already exists, then fetch details from DB if apps.exists() and uwpid_decoded != '0': # the many param informs the serializer that it will be serializing more than a single article. serializer = UWPSerializer(apps, many=True) data = {"results": serializer.data} # else scrap the data and save it in DB. # Also provides full details when only google appid is provided. else: # Scraping the app details url = 'https://www.microsoft.com/en-us/p/app/' + uwpid page = requests.get(url) soup = BeautifulSoup(page.text, 'html.parser') appid = uwpid #appid title = soup.find('h1', { 'id': 'DynamicHeading_productTitle' }).text #title publisher = soup.find('span', {'role': 'text'}).text #publisher icon = soup.find('meta', {'property': 'og:image'}).attrs['content'] icon = icon.replace('//', 'http://').replace('w=120&h=120&q=60', 'w=200&h=200') #icon catinfo = soup.find('a', {'class': 'c-hyperlink'}) category = catinfo.text #category category_url = 'https://www.microsoft.com' + str( catinfo.attrs['href']) #category_url rating = soup.find_all('span', {'role': 'presentation'}) ratingCount = rating[-1].text #ratingCount ratingValue = rating[-2].text #ratingValue sshots = soup.find_all( 'img', {'class': 'lazyload f-screenshot-fixed-size'}) screenshots = [] #screenshots for shot in sshots: sshot = shot.attrs['data-src'].replace( '//', 'http://' ).replace( '?w=672&h=378&q=80&mode=letterbox&background=%23FFE4E4E4&format=jpg', '') screenshots.append(sshot) description = soup.find( 'p', { 'class': 'c-paragraph pi-product-description-text' }).text #description price = soup.find('meta', { 'itemprop': 'price' }).attrs['content'] #price publisherURL = '' objs = UWP.objects.filter(title=title, publisher=publisher, app_url=url, cover=icon, price=price, appid=appid) if not objs.exists(): app = UWP(title=title, publisher=publisher, app_url=url, cover=icon, price=price, appid=appid) app.save() id = encode(objs[0].id) keys = [ 'id', 'title', 'appid', 'publisher', 'publisherURL', 'icon', 'category', 'category_url', 'ratingCount', 'ratingValue', 'description', 'appURL', 'price', 'screenshots' ] values = [ id, title, appid, publisher, publisherURL, icon, category, category_url, ratingCount, ratingValue, description, url, price, screenshots ] appdata = [dict(zip(keys, values))] data = {"results": appdata} return Response(data)
def userFeeds(request): # For sorting of feeds store = request.GET.get('store', False) if store: store = store_ltos(store) # Getting feed requests from from DB objs = feeds.objects.filter( Q(store__icontains=store) & Q(author=str(request.user))).order_by('-id') else: objs = feeds.objects.filter(author=str(request.user)).order_by('-id') feedCount = objs.count() paginator = Paginator(objs, 3) page = request.GET.get('page') feedsCount = paginator.get_page(page) # print(paginator) # Making Lists ready for data title = [] appid = [] category = [] store = [] content = [] author = [] created_at = [] updated_at = [] screenshots = [] tags = [] unique_hash = [] app_data = [] feed_id = [] upvote_count = [] downvote_count = [] comment_count = [] if objs.exists(): for feed in objs: title.append(feed.title) appid.append(encode(feed.appid)) if feed.category == "NR": feed.category = '<button class="btn badge-success btn-success btn-sm btn-round">NEW RELEASE</button>' elif feed.category == "DI": feed.category = '<button class="btn badge-warning btn-warning btn-sm btn-round">DISCOVER</button>' elif feed.category == "UP": feed.category = '<button class="btn badge-info btn-info btn-sm btn-round">UPDATED</button>' elif feed.category == "BF": feed.category = '<button class="btn badge-danger btn-danger btn-sm btn-round">BUGS & FIXES</button>' else: feed.category = '<button class="btn badge-dark btn-dark btn-sm btn-round">PRICE DROP</button>' category.append(feed.category) store.append(feed.store) content.append(feed.content) author.append(feed.author) created_at.append(feed.created_at) screenshots.append(feed.sshots) tags.append(feed.tags) unique_hash.append(feed.unique_hash) feed_id.append(encode_alpha(feed.id)) feed.store = store_stol(feed.store) if feed.upvote_count is None: feed.upvote_count = '0' if feed.downvote_count is None: feed.downvote_count = '0' if feed.comment_count is None: feed.comment_count = '0' upvote_count.append(feed.upvote_count) downvote_count.append(feed.downvote_count) comment_count.append(feed.comment_count) # Getting app data app = app_details(request, encode(feed.appid), feed.store) # app_details function app_data_temp = [ app['title'], app['appURL'], app['publisher'], app['publisherURL'], app['price'], app['icon'] ] app_data.append(app_data_temp) # end of app data myFeed = list( zip(title, appid, category, store, content, author, created_at, screenshots, tags, unique_hash, app_data, feed_id, upvote_count, downvote_count, comment_count)) return render(request, 'userFeeds.html', locals())
def get(self, request, playid): playid_hash = playid playid_decoded = str(decode(playid_hash)) apps = android.objects.filter(id=playid_decoded) # if app already exists, then fetch details from DB if apps.exists() and playid_decoded != '0': # the many param informs the serializer that it will be serializing more than a single article. serializer = androidSerializer(apps, many=True) data = {"results": serializer.data} # else scrap the data and save it in DB. # Also provides full details when only google appid is provided. else: # Scraping the app details page = requests.get( 'https://play.google.com/store/apps/details?id=' + playid) soup = BeautifulSoup(page.text, 'html.parser') title = soup.find('h1', class_='AHFaub').text #title appid = playid #appid pub_cat = soup.find_all('a', class_='hrTbp R8zArc') publisher = pub_cat[0].text #publisher publisher_url = soup.find( 'a', class_='hrTbp R8zArc').attrs['href'] #publisher_url icon = soup.find('img', class_='T75of ujDFqe').attrs['src'] #icon category = pub_cat[1].text #category reviews = soup.find('span', class_='AYi5wd TBRnV').text #reviews # sshots = soup.find_all('img',class_='T75of lxGQyd') # screenshots = [] # for s in sshots: # screenshots.append(s.attrs['src']) #screenshots video = str(soup.find('div', class_='TdqJUe')) pos1 = video.find('https://www.youtube.com/') pos2 = video.find('" jsaction="') video = video[pos1:pos2] #video des = soup.find('content') description = des.text #description description_html = str(des).replace( '<content><div jsname="sngebd">', '').replace('</content>', '').replace('</div>', '') #description_html ec = soup.find('span', class_='giozf') if ec is not None: editors_choice = 'True' #editors_choice else: editors_choice = 'False' developer_id = str(publisher_url).replace( 'https://play.google.com/store/apps/dev?id=', '') #developer_id additional = soup.find_all('span', class_='htlgb') updated = additional[1].text #updated size = additional[2].text #size installs = additional[4].text #installs current_version = additional[6].text #current_version required_android_version = additional[ 8].text #required_android_version dev = soup.find_all('a', class_='hrTbp ') developer_url = dev[1].attrs['href'] #developer_url developer_email = soup.find( 'a', class_='hrTbp KyaTEc').text #developer_email developer_address = additional[-1].text pos1 = str(developer_address).find('Privacy Policy') developer_address = developer_address[pos1:].replace( 'Privacy Policy', '') #developer_address url = 'https://play.google.com/store/apps/details?id=' + playid #url price = soup.find_all('button') price = price[-1].text.replace('$0.00', '') #price if price == '': price = 'Free' objs = android.objects.filter(title=title, publisher=publisher, app_url=url, pub_url=publisher_url, cover=icon, price=price) if not objs.exists(): app = android(title=title, publisher=publisher, app_url=url, pub_url=publisher_url, cover=icon, price=price) app.save() id = encode(objs[0].id) keys = [ 'id', 'title', 'appid', 'publisher', 'publisherURL', 'icon', 'category', 'reviews', 'video', 'description', 'description_html', 'editors_choice', 'developer_id', 'updated', 'size', 'installs', 'current_version', 'required_android_version', 'developer_url', 'developer_email', 'developer_address', 'appURL', 'price' ] values = [ id, title, appid, publisher, publisher_url, icon, category, reviews, video, description, description_html, editors_choice, developer_id, updated, size, installs, current_version, required_android_version, developer_url, developer_email, developer_address, url, price ] appdata = [dict(zip(keys, values))] data = {"results": appdata} return Response(data)