def sortPeoples(self, startSort=0): agent = WebAgent() if startSort == 0: scaningPeoples = self.scaningPeoples else: scaningPeoples = self.scaningPeoples[startSort:] for people in scaningPeoples: try: account = Account(people) agent.update(account) biography = account.biography name = account.full_name infoAcc = biography + name print(people) if self.sortByTown(infoAcc.lower()) and self.sortByActivity( infoAcc.lower()): self.sortingPeoples.append(people) self.currentSortPeople += 1 except: print('errorSort') self.offVpn() self.onVpn() self.playSortPeoples() return self.sortingPeoples
def get_account_info_json(username, num_of_posts): agent = None accountInfo = None posts = [] try: accountInfo = AccountInfoEntity(username) agent = WebAgent() account = WebAgentAccount(username) agent.update(account) accountInfo.Bio = account.biography accountInfo.TotalMediaCount = account.media_count posts = agent.get_media(account, count=num_of_posts)[0] # account.media_count except Exception as e: print("error while getting account info") if agent is None or accountInfo is None: return "Coudn't get account Info" accountInfo.posts = _get_list_of_parsed_posts(agent, posts) accountInfo.parsedPostEntities = posts.count accountInfoJson = jsonpickle.encode(accountInfo, unpicklable=False) return accountInfoJson
def get_top_location_posts_json(locationId): agent = None parsedPosts = [] try: agent = WebAgent() location = Location(locationId) agent.update(location) posts = agent.get_media(location, count=3)[0] parsedPosts = _get_list_of_parsed_posts(agent, posts, True) except: print("error while getting account info") top_location_posts_json = jsonpickle.encode(parsedPosts, unpicklable=False) return top_location_posts_json
def get_top_hashtag_posts_json(hashtag): agent = None parsedPosts = [] try: agent = WebAgent() hashtag = Tag(hashtag) agent.update(hashtag) posts = agent.get_media(hashtag, count=2)[0] parsedPosts = _get_list_of_parsed_posts(agent, posts, True) except: print("error while getting account info") topHashtagPostsJson = jsonpickle.encode(parsedPosts, unpicklable=False) return topHashtagPostsJson
import os import time import random import argparse import json import pandas as pd from instagram.entities import Media from instagram.agents import WebAgent from instagram.exceptions import InternetException from requests.exceptions import ConnectionError from datamining.files import DEFAULT_PATH AG = WebAgent() def get_post_likes(shortcode: str): """ Collects likes from instagram post by shortcode """ media = Media(shortcode) pointer = None for i in range(3): time.sleep(random.random() * 2) try: likes, pointer = AG.get_likes(media, pointer) # if pointer is None: # return media.likes except (ConnectionError, InternetException) as e:
def agent(): return WebAgent()
import instagram from instagram.agents import WebAgent from instagram import Account, Media, Location agent = WebAgent() account = Account("zuck") loc = Location(17326249) agent.update(account) media = agent.get_media(loc, count=50) print(media)
import urllib.request import instagram import threading from instagram.agents import WebAgent from instagram.entities import Tag from instagram.entities import Location from instagram.entities import Media base_url = "http://instagram.com/p/" recognition_url = "http://35.228.95.2:5000/calculate?url=" agent = WebAgent() tag = Tag("kek") agent.update(tag) media_list = [] musician_list = [] def check_media(id) -> bool: resp = urllib.request.urlopen(recognition_url + base_url + id).read() print(resp) if resp is "yes": return True #musician_list.append(Musician( ... )) def load_photos(ptr): medias = agent.get_media(tag, count=10, pointer=ptr) for media in medias[0]: