Beispiel #1
0
def InstaImageScraper():
    imgScraper = insta.InstagramScraper(usernames=[insta_profiles[x]],
                                        maximum=number_last_photos,
                                        media_metadata=True, latest=True,
                                        media_types=['image'])
    imgScraper.scrape()
    print("image scraping is running, please wait 50 seconds.")
Beispiel #2
0
def getPostsByEnglishName():
    result = {}
    instagram = instagram_scraper.InstagramScraper()
    user = instagram.get_user(INSTAGRAM_ACCOUNT)
    posts = instagram.query_media_gen(user)
    num_posts = 0
    for post in posts:
        caption = post['edge_media_to_caption']['edges'][0]['node']['text']
        english_name = getFirstHashtag(caption)
        likes = post['edge_media_preview_like']['count']
        comments = post['edge_media_to_comment']['count']
        thumbnail = post['thumbnail_resources'][0]
        result[english_name] = {
            'likes': likes,
            'num_comments': comments,
            'images': {
                'thumbnail': {
                    'url': thumbnail['src'],
                    'width': thumbnail['config_width'],
                    'height': thumbnail['config_height']
                }
            }
        }
        num_posts += 1
        # print('%s %d' % (getFirstHashtag(caption), likes))

    print('%d instagram posts' % num_posts)

    return result
Beispiel #3
0
def scraper():
    #call('instagram-scraper ' + insta_profiles + ' -m ' + number_last_photos + ' -u 0 -p 0 -t none --media-metadata', shell=True)
    imgScraper = insta.InstagramScraper(usernames=[insta_profiles[x]],
                                        login_user="******",
                                        login_pass="******",
                                        maximum=number_last_photos,
                                        media_metadata=True,
                                        latest=True,
                                        media_types=['image'])
    imgScraper.scrape()
    # Take last json image data and post in instagram images,  tags and decription
    with open(insta_profiles[x] + '/' + insta_profiles[x] + '.json', 'r') as j:
        json_data = json.load(j)
        newstr = (json_data[0]["display_url"])
        imgUrl = newstr.split('?')[0].split('/')[-1]
        imgTags = (json_data[0]["tags"])
        # imgDescription = (json_data[0]["description"])
        #Execute Instagram users with instapy. Excecute list insta_profiles
        call(
            'instapy -u yourusername -p yourpassword -f ./' +
            insta_profiles[x] + '/' + imgUrl +
            ' -t "#model #models #Modeling #modelo #modellife #modelling #modelagency #Modelos #modelphotography #modelsearch #ModelStatus #modelingagency #modelfitness #ModelsWanted #modelshoot #modella #modelmanagement #modelscout #modeltest #modelindonesia #modele #modelife #modelmayhem #modelgirl #modell #modelslife #modelkids #modelcall #modelpose #ModelBehavior"',
            shell=True)
        time.sleep(5)
        # Account number 2
        call(
            'instapy -u yourusername -p yourpassword -f ./' +
            insta_profiles[x] + '/' + imgUrl +
            ' -t "#model #models #Modeling #modelo #modellife #modelling #modelagency #Modelos #modelphotography #modelsearch #ModelStatus #modelingagency #modelfitness #ModelsWanted #modelshoot #modella #modelmanagement #modelscout #modeltest #modelindonesia #modele #modelife #modelmayhem #modelgirl #modell #modelslife #modelkids #modelcall #modelpose #ModelBehavior"',
            shell=True)
        print(imgUrl)

    print("scraped " + str(number_last_photos) + " from " + insta_profiles[x])
Beispiel #4
0
def start_like_followersai():
    x = 0
    bot.api.get_self_username_info()
    profile_pic = bot.api.last_json["user"]["profile_pic_url"]
    followers = bot.api.last_json["user"]["follower_count"]
    following = bot.api.last_json["user"]["following_count"]
    media_count = bot.api.last_json["user"]["media_count"]
    number_last_photos = 1
    following_username = request.form['following_username']
    time_sleep = request.form['time_sleep']
    user_id = bot.get_user_id_from_username(following_username)
    following = bot.get_user_followers(user_id)
    for user in following:
        pusername = bot.get_username_from_user_id(user)
        imgScraper = instagram_scraper.InstagramScraper(usernames=[pusername],
                                                        maximum=number_last_photos,
                                                        media_metadata=True,
                                                        latest=True,
                                                        media_types=['image'])
        imgScraper.scrape()
        # Open user json and if face is detected it will do command
        try:
            with open(pusername + '/' + pusername + '.json', 'r') as j:
                json_data = json.load(j)
                display_url = (json_data["GraphImages"][0]["display_url"])
                media_id = (json_data["GraphImages"][0]["id"])
                profile = (json_data["GraphImages"][0]["username"])
                imgUrl = display_url.split('?')[0].split('/')[-1]
                instapath = pusername + '/' + imgUrl
                try:
                    img = cv2.imread(instapath)
                    detector = MTCNN()
                    detect = detector.detect_faces(img)
                    bot.logger.info("Face Detected")
                except Exception as e:
                    bot.logger.info(e)

                if not detect:
                    bot.logger.info("No Face Detected")
                    j.close()
                else:
                    bot.api.like(media_id)
                    bot.logger.info("liked " + display_url + " by" + profile + "\n")
                    x += 1
                    bot.logger.info("liked " + str(x) + " images" + "\n")
                    bot.logger.info("Sleeping")
                    time_sleep = int(time_sleep)
                    time.sleep(time_sleep)
                    j.close()
        except Exception as ee:
            bot.logger.info(ee)



    return render_template("like_followersai.html", username=username,
                       profile_pic=profile_pic, followers=followers,
                       following=following, media_count=media_count);
def InstaImageScraper():
    ''' Scrape image on profiles '''
    imgScraper = insta.InstagramScraper(usernames=profiles,
                                        maximum=number_last_photos,
                                        media_metadata=True,
                                        latest=True,
                                        media_types=['image'])
    imgScraper.scrape()

    print("Images has been scraped")
def scraper():
    #  call('instagram-scraper ' + insta_profiles + ' -m ' + number_last_photos + ' -u 0 -p 0 -t none --media-metadata', shell=True)
    imgScraper = insta.InstagramScraper(usernames=[insta_profiles[x]],
                                        maximum=number_last_photos,
                                        media_metadata=True,
                                        latest=True,
                                        media_types=['none'])
    imgScraper.scrape()

    print("scraped " + str(number_last_photos) + " from " + insta_profiles[x])
Beispiel #7
0
def start_like_followersai():
    number_last_photos = 1
    following_username = request.form['following_username']
    time_sleep = request.form['time_sleep']

    user_id = bot.get_user_id_from_username(following_username)
    following = bot.get_user_followers(user_id)
    for user in following:
        pusername = bot.get_username_from_user_id(user)
        imgScraper = instagram_scraper.InstagramScraper(
            usernames=[pusername],
            maximum=number_last_photos,
            media_metadata=True,
            latest=True,
            media_types=['image'])
        imgScraper.scrape()

        # Open user json and if face is detected it will do command
        try:
            with open(pusername + '/' + pusername + '.json', 'r') as j:
                json_data = json.load(j)
                display_url = (json_data["GraphImages"][0]["display_url"])
                media_id = (json_data["GraphImages"][0]["id"])
                profile = (json_data["GraphImages"][0]["username"])
                imgUrl = display_url.split('?')[0].split('/')[-1]
                instapath = pusername + '/' + imgUrl
                img = cv2.imread(instapath)
                detector = MTCNN()
                detect = detector.detect_faces(img)

                if not detect:
                    print("no face detected")
                else:
                    bot.api.like(media_id)
                    print("liked " + display_url + " by" + profile)
                    print("=" * 30)
                    time_sleep = int(time_sleep)
                    time.sleep(time_sleep)

        except:
            pass

    return render_template("like_followersai.html",
                           username=username,
                           profile_pic=profile_pic,
                           followers=followers,
                           following=following,
                           media_count=media_count)
Beispiel #8
0
def scraper(profile, start, posts, username, passwd):
    imgScraper = insta.InstagramScraper(usernames=[profile],
                                        login_user=username,
                                        login_pass=passwd,
                                        maximum=start + posts - 1,
                                        media_metadata=True,
                                        latest=True,
                                        media_types=['image'])
    imgScraper.scrape()
    # Take last json image data and post in instagram images,  tags and decription
    with open(os.path.join(profile, profile + '.json'), 'r') as j:
        json_data = json.load(j)
        pics = json_data[start - 1:start + posts - 1]
        for pic in pics:
            newstr = (pic["display_url"])
            imgUrl = newstr.split('?')[0].split('/')[-1]
            cap = None
            try:
                cap = pic["edge_media_to_caption"]["edges"][-1]["node"][
                    "text"] + '\n'
                if caption is False:
                    cap = ""
                else:
                    print("Caption: " + cap)
            except:
                cap = ""
                print("No caption exists.")
            random.shuffle(tags)
            #Execute Instagram users with instapy. Excecute list insta_profiles
            tagString = "#" + " #".join(tags[:min(30, len(tags)) - 1])
            call('instapy -u ' + username + ' -p ' + passwd + ' -f ./' +
                 profile + '/' + imgUrl + ' -t "' + cap + tagString + '"',
                 shell=True)
            print(imgUrl)
            time.sleep(delay)

    print("Scraped " + str(posts) + "(Post no. " + str(start) + ", Post no. " +
          str(start + posts - 1) + ") posts from " + profile)
        "Make sure you have all arguments: username, password, and tag genre. "
    )
    print("Example command: `python selenium_driver.py username password tag`")
    username = input("username: "******"password: "******"tag genre: ")
else:
    username = str(sys.argv[1])
    password = str(sys.argv[2])
    try:
        tag_genre = str(sys.argv[3])
    except:
        tag_genre = ""

# set up scraper
scraper = instagram_scraper.InstagramScraper()
scraper.setUp()
scraper.login(username, password)

tag = tag_dict.get(tag_genre, ["instagram"])

if tag_genre == "ad":
    insta_users = scraper.related_tags_search_paid_promo("ad")
    scraper.write_arr_to_file("ad", insta_users)
elif tag_genre == "emails":
    dirname = input("directory name: ")
    scraper.get_insta_profile_emails(dirname)
else:
    for tt in tag:
        insta_users = scraper.related_tags_search_by_likes(tt)
Beispiel #10
0
def instascraper(bot, new_media_id, path=POSTED_MEDIAS):

    global x
    while x < len(insta_profiles):
        imgScraper = insta.InstagramScraper(usernames=[insta_profiles[x]],
                                            maximum=number_last_photos,
                                            media_metadata=True,
                                            latest=True,
                                            media_types=['image'])
        imgScraper.scrape()
        print("image scraping is running or not")
        try:
            # Open insta_profiles[x] and it's scraped json file at take first image location
            with open(insta_profiles[x] + '/' + insta_profiles[x] + '.json',
                      'r') as j:
                json_data = json.load(j)
                newstr = (json_data[0]["display_url"])
                imgUrl = newstr.split('?')[0].split('/')[-1]
                global instapath
                instapath = insta_profiles[x] + '/' + imgUrl
                # Locate Face On image scraped
                image = face_recognition.load_image_file(instapath)
                face_locations = face_recognition.face_locations(image)
                # If no face located scrape the next profile
                if not face_locations:
                    print("There is no Face Detected scraping next profile")
                    x += 1
                    instascraper()
                else:
                    print(
                        "There is a Face Detected scraping and posting this image"
                    )
                print(face_locations)
                print(instapath)
                # Append username info to csv file
                try:
                    f = open(f"{username}.tsv", "a+")
                    f.write(str(saveStats))
                    f.close
                    f = open(f"{username}.tsv", "r")
                    last_line = f.readlines()[-2].replace("False", "")
                    print("Date - Time - Followers - Following - Posts")
                    print(last_line)
                    f.close
                # Write username tsv file if it does not exist
                except:
                    f = open(f"{username}.tsv", "w+")
                    f.write(str(saveStats))
                    f.close
                    f = open(f"{username}.tsv", "r")
                    last_line = f.readlines()[-1]
                    print("Date - Time - Followers - Following - Posts")
                    print(last_line)
                    f.close
            time.sleep(2)
            time.sleep(2)
            repost_best_photos(bot, users, args.amount)
            print("Posting Instagram")
            os.remove("posted_medias.txt")
            time.sleep(900)
        except:
            print("User is set to Private scraping next user")
        x += 1
import instagram_scraper as insta
scraper = insta.InstagramScraper(hashtag='bangaloreairport',
                                 maximum=200,
                                 quiet=False,
                                 tag=True)
a = scraper.scrape_hashtag()
print(a)
parser.add_argument('users', type=str, nargs='*', help='users')
args = parser.parse_args()

bot = Bot()
bot.login()

users = None
if args.users:
    users = args.users
elif args.file:
    users = utils.file(args.file).list

while x < len(insta_profiles):
    imgScraper = insta.InstagramScraper(usernames=[insta_profiles[x]],
                                        maximum=number_last_photos,
                                        media_metadata=True,
                                        latest=True,
                                        media_types=['image'])
    imgScraper.scrape()
    print("image scraping is running or not")

    try:
        with open(insta_profiles[x] + '/' + insta_profiles[x] + '.json',
                  'r') as j:
            json_data = json.load(j)
            newstr = (json_data[0]["display_url"])
            imgUrl = newstr.split('?')[0].split('/')[-1]
            instapath = insta_profiles[x] + '/' + imgUrl
            print(instapath)
        repost_best_photos(bot, users, args.amount)
        time.sleep(600)
Beispiel #13
0
    def instagram(update, context):
        scraper = instagram_scraper.InstagramScraper()
        insta_post = scraper.get_latest_instagram_post()

        context.bot.send_message(chat_id=update.message.chat_id,
                                 text=insta_post)