def check_googlePlayStore(webhook_url, version_prev): result = app( 'com.aniplex.fategrandorder', lang='ja', country='jp' ) if result["version"] != version_prev: content = { "username": "******", "avatar_url": avatar_url, "embeds": [{ "title": "FGO アプリアップデート (Google Play Store)", "url": result["url"], "thumbnail": { "url": result["icon"] }, "fields": [ { "name": "バージョン", "value": result["version"] }, { "name": "更新内容", "value": result['recentChanges'] } ], "color": 5620992 }] } requests.post(webhook_url, json.dumps(content), headers={'Content-Type': 'application/json'}) return result["version"] return version_prev
def create_appcloud( app_id: str, lang: str, country: str): app = google_play_scraper.app(app_id, lang=lang, country=country) try: reviews, cont = google_play_scraper.reviews(app_id, count=500, lang=lang, country=country) except IndexError: print('no reviews found') reviews = [] review_string = ' '.join([x.get('content') for x in reviews]) + 'test' fig, ax = plt.subplots(2,1) try: desc_cloud = WordCloud(stopwords=stopwords[lang]).generate(app.get('description')) desc_im = ax[0].imshow(desc_cloud, interpolation='bilinear') ax[0].axis("off") ax[0].set_title('App Description') except ValueError: print('no content') try: review_cloud = WordCloud(stopwords=stopwords[lang]).generate(review_string) review_img = ax[1].imshow(review_cloud, interpolation='bilinear') ax[1].axis("off") ax[1].set_title('Comments') except ValueError: print('no content') return fig
def validate_appid(appid: str, country: str): store = _guess_store(appid) assert store in ["AppStore", "PlayStore"] if store == "AppStore": url = f"http://apps.apple.com/{country}/app/{appid}" res = requests.get(url) if res.status_code == 200: appname = re.search('(?<="name":").*?(?=")', res.text).group(0) publisher = re.search('(?<="author":).*("name":")(.*?)(?=")', res.text).group(2) category = re.search('(?<="applicationCategory":").*?(?=")', res.text).group(0) return appname, store, publisher, category else: raise Exception("Did not receive a valid response. Response code", res.status_code) if store == "PlayStore": try: appinfo = app(appid, country=country) appname = appinfo["title"] publisher = appinfo["developer"] category = appinfo["genre"] return appname, store, publisher, category except err as err: raise Exception("Did not receive a valid response.", err)
def scrape(apps_dataset_path, reviews_datase_path, show_debug: bool = False): app_packages = [ "com.anydo", "com.todoist", "com.ticktick.task", "com.habitrpg.android.habitica", "cc.forestapp", "com.oristats.habitbull", "com.levor.liferpgtasks", "com.habitnow", "com.microsoft.todos", "prox.lab.calclock", "com.gmail.jmartindev.timetune", "com.artfulagenda.app", "com.tasks.android", "com.appgenix.bizcal", "com.appxy.planner", ] app_infos = [] for ap in tqdm(app_packages): info = app(ap, lang="en", country="us") del info["comments"] app_infos.append(info) if show_debug: print_json(app_infos[0]) app_infos_df = pd.DataFrame(app_infos) if show_debug: print(app_infos_df) app_infos_df.to_csv(apps_dataset_path, index=None, header=True) app_reviews = [] for ap in tqdm(app_packages): for score in range(1, 6): for sort_order in [Sort.MOST_RELEVANT, Sort.NEWEST]: rvs, _ = reviews( ap, lang="en", country="us", sort=sort_order, count=300 if score == 3 else 150, filter_score_with=score, ) for r in rvs: r["sortOrder"] = ("most_relevant" if sort_order == Sort.MOST_RELEVANT else "newest") r["appId"] = ap app_reviews.extend(rvs) if show_debug: print(app_reviews[0]) app_reviews_df = pd.DataFrame(app_reviews) app_reviews_df.to_csv(reviews_datase_path, index=None, header=True)
def tap_data(app_id_list): keys_to_drop = ['messages', 'screenshots', 'comments', 'histogram'] for app_id in app_id_list: app_result = app(app_id,lang='en', country='uk') for k in keys_to_drop: if k in app_result: del app_result[k] yield app_result
def __init__(self, **kwargs): super().__init__(**kwargs) today = str(datetime.date.today()) if os.path.isfile("prev_details.txt"): # Checks to see if the file exists. with open("prev_details.txt", "r") as f: d = f.read().split(",") prev_date = d[0] if prev_date != today: # Checks if a day has passed. days = int(d[1]) + 1 else: days = int(d[1]) try: users = app('ca.gc.hcsc.canada.covid19').get("installs") # Finds how many people have the app. except ConnectionError: # For now just looks at the Canada COVID19 app. users = d[2] else: days = 0 users = "100,000+" with open("prev_details.txt", "w") as f: # Writes new information to file. f.write(f"{today},{days},{users}") self.cols = 1 # This shows text on the screen. self.add_widget(Label(text=f"You are on day {days} of quarantine with us. Keep it up!", font_name="DejaVuSans")) self.add_widget(Image(source='logo.png')) self.add_widget(Label(text=f"You aren't alone! There are {users} people with you!", font_name="DejaVuSans"))
def getGoogleReview(appid): result = app( appid, lang='en', # defaults to 'en' country='us' # defaults to 'us' ) return result
def test_e2e_scenario_5(self): """ Testing for free, price that excluded from scenario 1~4 """ res = app("com.choco.silentmode") self.assertFalse(res["free"]) self.assertEqual(1.49, res["price"])
def get_gp_title(application_name): app_info = app( application_name, lang=language, # defaults to 'en' country=country # defaults to 'us' ) app_title = app_info['title'] return app_title
def get_reviews_count(): number_of_reviews = app( 'com.aspiro.tidal', lang='en', # defaults to 'en' country='us' # defaults to 'us' )["reviews"] return number_of_reviews
def get(self, request): name = request.query_params.get("Name") result = app( name, lang='en', #default to 'en' country='us' #default to 'us ) y = json.dumps(result) return Response(y)
def import_app_info(app_id, **kwargs): already_exists = App.objects.app_already_exists(app_id) if already_exists: response = app( app_id, lang=kwargs.get('lang', 'en'), country=kwargs.get('country', 'us'), ) App.objects.save_app(response['appId'])
def persist_reviews_for_bundle_ids(session): bundle_ids = session.query(BundleId).all() for index, bundle_id in tqdm(enumerate(bundle_ids)): try: app_data = app(bundle_id.native_id) app_row_id = persist_app_data(session, app_data) if app_row_id > 0: review_data = reviews_all(bundle_id.native_id, count=5) persist_review_data(session, app_row_id, review_data) except Exception as e: logging.exception(e)
def get_gp_title(application_name): """ Getting application title from application code name """ app_info = app( application_name, lang=language, # defaults to 'en' country=country # defaults to 'us' ) app_title = app_info['title'] return app_title
def test_happypath(self): result = app("com.sgn.pandapop.gp") self.assertEqual("com.sgn.pandapop.gp", result["appId"]) self.assertEqual("Panda Pop! Bubble Shooter Saga & Puzzle Adventure", result["title"]) self.assertEqual( "https://play.google.com/store/apps/details?id=com.sgn.pandapop.gp&hl=en&gl=us", result["url"], ) validators.url(result["icon"])
def main(): arr = np.array([[1, 2, 4], [3, 4, 5]]) print(arr) arr1 = arr.transpose() print(arr1) result = app( 'org.duosoft.books20century', lang='ru', # defaults to 'en' country='ru' # defaults to 'us' ) print(result)
def test_e2e_scenario_2(self): """ Testing for privacyPolicy, false value of adSupported, containsAds, offersIAP that excluded from scenario 1 """ res = app("com.google.android.calendar") self.assertEqual("http://www.google.com/policies/privacy", res["privacyPolicy"]) self.assertIsNone(res["adSupported"]) self.assertFalse(res["containsAds"]) self.assertFalse(res["offersIAP"])
def getSimilarAppRecords(simApps): rs=pd.DataFrame() id =[] for x in simApps: url=x['url'] id=url.split("id=")[1] each_game_result=app(id) eachrow=add_details_as_row(each_game_result) # print(eachrow) rs = rs.append([eachrow]) return rs;
async def playstore(ps_e): """ For .play command, fetch content from Play Store. """ await ps_e.edit("`Finding...`") query = ps_e.pattern_match.group(1) try: res = app(query) except gpse.NotFoundError: await ps_e.edit("Invalid package ID") return await ps_e.edit("**" + res["title"] + "**\n\nBy " + res["developer"] + "\n\nSummary: " + res['summary'] + "\n\n[link](" + res["url"] + ")")
def test_e2e_scenario_3(self): """ Testing for video, videoImage that excluded from scenario 1~2 """ res = app("com.sgn.pandapop.gp") self.assertEqual( "https://www.youtube.com/embed/MaCigh28qQk?ps=play&vq=large&rel=0&autohide=1&showinfo=0", res["video"], ) self.assertEqual("https://i.ytimg.com/vi/MaCigh28qQk/hqdefault.jpg", res["videoImage"])
def __init__(self, name_of_app, package_name, country, lang): if not package_name or not lang or not country or not name_of_app: raise ValueError("Invalid parameters passed for scrapping") self.name_of_app = name_of_app self.package_name = package_name self.lang = lang self.country = country app_info = app(self.package_name, self.lang, self.country) del app_info["comments"] Utils.print_json(app_info) self.app_info = app_info self.reviews = {}
async def playstore(ps_e): """ For .play command, fetch content from Play Store. """ await ps_e.edit("`Finding...`") query = ps_e.text.split(" ")[1] try: res = app(query) except gpse.NotFoundError: await ps_e.edit("Invalid package ID") return if res["title"] is None: await ps_e.edit("Data error!") return await ps_e.edit("**"+res["title"]+"**\n\nBy "+res["developer"]+"\n\nSummary: "+res['summary']+"\n\n[link]("+res["url"]+")")
def get_app_details(package_id): """Get App Details form PlayStore.""" try: logger.info('Fetching Details from Play Store: %s', package_id) det = app(package_id) det.pop('descriptionHTML', None) det.pop('comments', None) description = BeautifulSoup(det['description'], features='lxml') det['description'] = description.get_text() det['error'] = False except Exception: det = app_search(package_id) return det
def get_contact_information(app_id_list, lang='en', country='us'): result = {} for app_id in app_id_list: response = app(app_id, lang=lang, country=country) app_result = { "developer: " + str(response['developer']), "developerId: " + str(response['developerId']), "developerEmail: " + str(response['developerEmail']), "developerWebsite: " + str(response['developerWebsite']), "developerAddress: " + str(response['developerAddress']), "developerInternalID: " + str(response['developerInternalID']) } result[app_id] = app_result return result
def do_stuff(file_path): # Grabbing the file data given the file path data = pd.read_excel(file_path) ''' Grabbing the column with all the identifiers It is the second column and it doesn't have a column name - so we use the default 'Unnamed: 1' ''' identifiers = data['Unnamed: 1'] # Creating empty array to store the data all_data = [] errored = [] err_count = 0 ''' Loop through all the identifiers and extract the data ''' for identifier in identifiers: try: result = app( identifier, lang='en', # defaults to 'en' country='us' # defaults to 'us' ) # Storing data related to identifier in dictionary data = { "identifier": identifier, "genre": result['genre'], "genreId": result['genreId'] } # Appending data dictionary to array all_data.append(data) except Exception as err: errored.append({"identifier": identifier}) err_count += 1 print(f'[ERROR] {identifier} - {err}') print(f'[FAILED] {err_count} / {len(identifiers)}') # Saving data to file! :D pd.DataFrame(all_data).to_csv('./data.csv') pd.DataFrame(errored).to_csv('./errored.csv')
def get_google_play_info(package): try: details = app( package, lang='en', # defaults to 'en' country='us' # defaults to 'us' ) if details: es.index(index=settings.ELASTICSEARCH_GP_INDEX, id=package, body=details) del details return {'status': 'success', 'info': ''} except Exception: pass finally: gc.collect() return {'status': 'error', 'info': f'Unable to retrieve Google Play details of {package}'}
def get_reviews(app_id): rows = [] result, continuation_token = reviews( app_id, lang='en', # defaults to 'en' country='in', # defaults to 'us' sort=Sort.MOST_RELEVANT, # defaults to Sort.MOST_RELEVANT count=200, # defaults to 100 # filter_score_with=5 # defaults to None(means all score) ) rows += result # If you pass `continuation_token` as an argument to the reviews function at this point, # it will crawl the items after 3 review items. # more_views = True iteration = 0 while continuation_token.token and (len(rows) <= 50000): # while result: iteration += 1 print(f"Iteration number : {iteration}") result, continuation_token = reviews( app_id, continuation_token= continuation_token # defaults to None(load from the beginning) ) rows += result print(f"Total_reviews so far is {len(rows)}") df = pd.DataFrame(rows) result = app( app_id, lang='en', # defaults to 'en' country='in' # defaults to 'us' ) print(f"retrived total of {len(rows)} for app -> {result['title']}") app_reviews_folder = "app_reviews" if not os.path.isdir(app_reviews_folder): os.mkdir(app_reviews_folder) csv_file_name = result['title'].replace(" ", "_").replace( "/", "_") + "__" + app_id.replace(".", "_") csv_url = os.path.join(app_reviews_folder, csv_file_name + ".csv") df.to_csv(csv_url, index=False) print(f"saved reviews for {app_id} to {csv_url}")
def insert_daKanji(readme: str): result = {"score": 0.0} while (result["score"] == 0.0): result = app( 'com.DaAppLab.DaKanjiRecognizer', lang='en', # defaults to 'en' country='us' # defaults to 'us' ) print("DaKanji:\n", "\tinstall:", result["installs"], "stars:", result["score"], "ratings:", result["ratings"]) # put the playstore rating in the README readme = readme.replace(r"%DaKanjiAS%", "{:.2f}".format(result["score"]) + "⭐") # put the playstore downloads in the README readme = readme.replace(r"%DaKanjiAD%", result["installs"] + "️⬇️") # microsoft store rating readme = readme.replace("%DaKanjiDW%", "Download") # snap store rating readme = readme.replace("%DaKanjiDL%", "Download") # Mac app store rating readme = readme.replace("%DaKanjiDM%", "Download") # github mobile stars rest_api = "https://api.github.com/repos/CaptainDario/DaKanji-mobile" page = requests.get(rest_api).json() stars = page["stargazers_count"] readme = readme.replace(r"%DaKanjiMG%", str(stars) + "⭐") # github desktop stars on github rest_api = "https://api.github.com/repos/CaptainDario/DaKanji-desktop" page = requests.get(rest_api).json() stars = page["stargazers_count"] readme = readme.replace(r"%DaKanjiDG%", str(stars) + "⭐") readme = readme.replace(r"%DaKanjiDG%", str(stars) + "⭐") return readme
def upload(x): url1 = "https://nasfistsolutions.com/illuminate/InsertData.php?" request = "request=" + str(x) url = url1 + request col_list = ["Package" ] # to select a single column i.e. "Package" from csv file if ( x == 1 ): #to select different csv according to its category in each iteration data = pd.read_csv("D:\FYP\ServerSide\category1.csv", usecols=col_list) elif (x == 2): data = pd.read_csv("D:\FYP\ServerSide\category2.csv", usecols=col_list) else: data = pd.read_csv("D:\FYP\ServerSide\category3.csv", usecols=col_list) for y in data["Package"]: try: result = app( #fetching app data from playstore using google_play_scraper y, lang='en', # defaults to 'en', country='us' # defaults to 'us' ) payload = { # creating payload to send to server 'title': result['title'], 'description': result['description'], 'installs': result['installs'], 'score': result['score'], 'ratings': result['ratings'], 'reviews': result['reviews'], 'price': result['price'], 'genreId': result['genreId'], 'icon': result['icon'], 'size': result['size'], 'url': result['url'], 'released': result['released'], 'version': result['version'], 'summary': result['summary'] } response = requests.post(url, payload) #sending data to server print(response.text) except: print("not found")
def home(request): url = 'home.html' result = '' context = {} if (request.method == 'POST'): app_id = request.POST['appid'] try: result = app( app_id=app_id, lang='en', # defaults to 'en' country='us' # defaults to 'us' ) request.session['result'] = result return redirect('/result') except: messages.error(request, "Invalid Id . Please check") return render(request, url)