Example #1
0
def main():
    minute = datetime.datetime.now().minute
    five_min_interval = (int(minute) / 5) - 1
    one_digit_minute = int(str(minute)[-1])

    print("################################")
    print("#### OPSEC Search Bootstrap ####")
    print("################################")

    print("----- User Specific Search -----")
    print("Attempting site/user specific search")

    # Twitter
    try:
        user = twitter.get_users()[one_digit_minute]
        twitter.get_user_tweets(user)
    except IndexError:
        print("No Twitter user found at index " + str(one_digit_minute))

    # Reddit
    try:
        author = reddit.get_users()[one_digit_minute]
        reddit.get_user_comments(author)
    except IndexError:
        print("No Reddit user found at index " + str(one_digit_minute))

    # StackExchange
    try:
        account_id = stackexchange.get_users()[one_digit_minute]
        stackexchange.get_user_posts(account_id)
    except IndexError:
        print("No StackExchange user found at index " + str(one_digit_minute))


    print("-------- General search --------")
    if (minute % 5) == 0:
        print("Attempting general site search...")
        try:
            keyword = opsecHeader.get_user_keywords('all',
                                                'twitter')[five_min_interval]
            twitter.search_twitter(keyword)
        except IndexError:
            print("No twitter keyword at index " + str(five_min_interval))

        try:
            keyword = opsecHeader.get_user_keywords('all',
                                                'facebook')[five_min_interval]
            facebook.search_facebook(keyword)
        except IndexError:
            print("No facebook keyword at index " + str(five_min_interval))

        try:
            keyword = opsecHeader.get_user_keywords('all',
                                                'wordpress')[five_min_interval]
            wordpress.search_wordpress(keyword)
        except IndexError:
            print("No wordpress keyword at index " + str(five_min_interval))

    else:
        print("Minute not a multiple of 5, not attempting general site search...")
Example #2
0
    def get_post(self, account_id, site, user_id, content_type):
        latest_epoch_time = self.get_latest_post(user_id, site, content_type)
        query_string = 'http://api.stackexchange.com/2.1/users/' + str(user_id) + '/' + str(content_type) + 's?fromdate=' + str(latest_epoch_time) + '&order=desc&sort=creation&site=' + site + '&key=' + self.api_key
        opsecHeader.query_website_json(str(site) + str(user_id) + str(content_type), query_string)
        opsecHeader.write_last_checked_time('stackexchange')

        results = opsecHeader.read_results_json(str(site) + str(user_id) + str(content_type))
        items = results['items']
        for i in items:

            creation_date = i['creation_date']
            if(latest_epoch_time != creation_date):

                if(content_type == 'question'):
                    url = i['link']
                    html = urllib2.urlopen(url).read()
                    soup = BeautifulSoup(html)
                    dirty_content = soup.find('div', {'class': 'post-text', 'itemprop': 'description'})
                    content = ''.join(dirty_content.findAll(text=True))

                elif(content_type == 'answer'):
                    answer_id = i['answer_id']
                    url = "http://" + str(site) + ".com/a/" + str(answer_id)
                    html = urllib2.urlopen(url).read()
                    soup = BeautifulSoup(html)
                    answer_id = 'answer-' + str(answer_id)
                    div_content = soup.find('div', {'id': answer_id})
                    dirty_content = div_content.find('div', {'class': 'post-text'})
                    content = ''.join(dirty_content.findAll(text=True))

                elif(content_type == 'comment'):
                    comment_id = i['comment_id']
                    post_id = i['post_id']
                    short_url = 'http://' + str(site) + '.com/q/' + str(post_id)
                    long_url = str(urllib2.urlopen(short_url).geturl())
                    long_url = long_url.split("#")[0]
                    url = long_url + '#comment' + str(comment_id) + '_' + str(post_id)
                    html = urllib2.urlopen(url).read()
                    soup = BeautifulSoup(html)
                    comment_id_format = 'comment-' + str(comment_id)
                    try:  # Will fail if comments need to be loaded via AJAX
                        comment_tr = soup.find('tr', {'id': comment_id_format})
                        dirty_content = comment_tr.find('span', {'class': 'comment-copy'})
                        content = ''.join(dirty_content.findAll(text=True))
                    except AttributeError:
                        content = 'See website'

                profile_image = i['owner']['profile_image']
                display_name = i['owner']['display_name']

                self.write_display_name(account_id, display_name)
                self.write_latest_post(account_id, user_id, site, content_type, creation_date, profile_image, url, content, display_name)

                keywords = opsecHeader.get_user_keywords(account_id, 'stackexchange')
                for keyword in keywords:
                    if keyword in content:
                        opsecHeader.send_email(keyword, "Stack Exchange", display_name)
Example #3
0
def add_paste(title, paste_id, paste):
    keywords = opsecHeader.get_user_keywords('all', 'pastebin')
    for keyword in keywords:
        if keyword in paste:
            now = int(time.mktime(time.localtime()))
            sql = "INSERT INTO `pastebin` (`epoch_time`, `title`, `paste`, `pasteID`, `keyword`) VALUES (%s, %s, %s, %s, %s)"
            try:
                if(opsecHeader.cur.execute(sql, (now, title, paste, paste_id, keyword))):
                    opsecHeader.db.commit()
                    print "[+] Added."
            except:
                print '''[!] DB Problem (paste_id:%s) NOT inserted''' % (paste_id)
                print sys.exc_info()[0]
                return False
            opsecHeader.send_email(keyword, "Pastebin")
Example #4
0
    def get_user_tweets(self, user):
        screen_name = urllib2.quote(user)
        opsecHeader.write_last_checked_time('twitter')

        # See https://dev.twitter.com/docs/api/1/get/statuses/user_timeline
        tweet_since_date = str(self.get_latest_tweet(screen_name, None)[0])
        epoch_time_existing = self.get_latest_tweet(screen_name, None)[1]

        twitter_query_string = 'https://api.twitter.com/1.1/statuses/user_timeline.json?screen_name=' + screen_name + '&count=10'

        if tweet_since_date != '0':  # Twitter does not play nice with invalid since_id's
            twitter_query_string += '&since_id=' + tweet_since_date

        opsecHeader.query_website_oauth_json("twitterUserTweets", twitter_query_string, self.consumer_key, self.consumer_secret, self.access_token, self.access_token_secret)

        twitter_results = opsecHeader.read_results_json('twitterUserTweets')
        if twitter_results is not None:
            twitter_all_results = twitter_results
        else:
            twitter_all_results = None

        if not twitter_all_results:
            print "No results."
        else:
            for i in twitter_all_results:
                created_at = (i['created_at']).encode('utf-8')
                epoch_time_found = calendar.timegm((email.utils.parsedate(created_at)))
                if int(epoch_time_found) > int(epoch_time_existing):
                    twitter_id = (i['id'])
                    text = (i['text']).encode('utf-8')
                    from_user = (i['user']['screen_name']).encode('utf-8')
                    created_at = (i['created_at']).encode('utf-8')
                    profile_image_url_https = (i['user']['profile_image_url_https']).encode('utf-8')

                    try:
                        location = '?'
                        lat = i['geo']['coordinates'][0]
                        lng = i['geo']['coordinates'][1]
                        print("Got coordinates!")
                    except:
                        location, lat, lng = self.gen_geo(from_user)

                    self.write_tweet(twitter_id, from_user, text, created_at, '', location, lat, lng, epoch_time_found, profile_image_url_https)
                    keywords = opsecHeader.get_user_keywords(from_user, 'twitter')
                    for keyword in keywords:
                        if keyword in text:
                            opsecHeader.send_email(keyword, "Twitter", from_user)
Example #5
0
def add_paste(title, paste_id, paste):
    keywords = opsecHeader.get_user_keywords('all', 'pastebin')
    for keyword in keywords:
        if keyword in paste:
            now = int(time.mktime(time.localtime()))
            sql = "INSERT INTO `pastebin` (`epoch_time`, `title`, `paste`, `pasteID`, `keyword`) VALUES (%s, %s, %s, %s, %s)"
            try:
                if (opsecHeader.cur.execute(
                        sql, (now, title, paste, paste_id, keyword))):
                    opsecHeader.db.commit()
                    print "[+] Added."
            except:
                print '''[!] DB Problem (paste_id:%s) NOT inserted''' % (
                    paste_id)
                print sys.exc_info()[0]
                return False
            opsecHeader.send_email(keyword, "Pastebin")
Example #6
0
    def get_user_comments(self, user):
        #http://www.reddit.com/dev/api

        user = urllib2.quote(user)

        reddit_query_string = 'http://www.reddit.com/user/' + user + '/overview.json'
        opsecHeader.query_website_json("reddit", reddit_query_string, opsecHeader.reddit_api_key)
        opsecHeader.write_last_checked_time('reddit')

        reddit_results = opsecHeader.read_results_json('reddit')
        try:
            reddit_all_results = reddit_results['data']['children']
        except KeyError:
            reddit_all_results = None
        epoch_time_existing = self.get_latest_user_epoch(user)

        if not reddit_all_results:
            print "No results."
        else:
            for i in reddit_all_results:
                epoch_time_found = str((i['data']['created_utc'])).encode('utf-8')[:-2]
                if int(epoch_time_found) > int(epoch_time_existing):
                    try:
                        link_id = (i['data']['link_id']).encode('utf-8')[3:]
                    except KeyError:
                        link_id = ''
                    comment_id = (i['data']['id']).encode('utf-8')
                    author = (i['data']['author']).encode('utf-8')
                    try:
                        body = (i['data']['body']).encode('utf-8')
                    except KeyError:
                        body = ''
                    try:
                        link_title = (i['data']['link_title']).encode('utf-8')
                    except KeyError:
                        link_title = ''
                    subreddit = (i['data']['subreddit']).encode('utf-8')
                    permalink = 'http://www.reddit.com/r/' + subreddit + '/comments/' + link_id + '/' + urllib2.quote(link_title) + '/' + comment_id
                    self.write_latest_post(author, body, link_id, comment_id, link_title, subreddit, epoch_time_found, permalink)

                    keywords = opsecHeader.get_user_keywords(author, 'reddit')
                    for keyword in keywords:
                        if keyword in body:
                            opsecHeader.send_email(keyword, "Reddit", author)
Example #7
0
def main():
    current_minute = datetime.datetime.now().minute
    five_min_interval = (int(current_minute) / 5) - 1
    one_digit_minute = int(str(current_minute)[-1])

    print(
        Color.YELLOW +
        "####################################################################################"
    )
    print(
        '''   ___  __   _____    __  _____     ___  ___  __    __  ___   _____  ___  ___  __
  /___\/ _\  \_   \/\ \ \/__   \   /___\/ _ \/ _\  /__\/ __\ /__   \/___\/___\/ /
 //  //\ \    / /\/  \/ /  / /\/  //  // /_)/\ \  /_\ / /      / /\//  ///  // /
/ \_// _\ \/\/ /_/ /\  /  / /    / \_// ___/ _\ \//__/ /___   / / / \_// \_// /
\___/  \__/\____/\_\ \/   \/     \___/\/     \__/\__/\____/   \/  \___/\___/\____/
                                                                                   '''
    )
    print("                     OSINT OPSEC Tool " + opsecHeader.version +
          " - By @hyprwired")
    print(
        "####################################################################################"
        + Color.ENDC)

    print(Color.HEADER + "[*] User Specific Search" + Color.ENDC)
    print("[-] Attempting site/user specific search...")
    print("[-] Trying user #" + str(one_digit_minute) + "...")

    # Twitter
    twitter = Twitter()
    user = twitter.get_user(one_digit_minute)
    if user is not None:
        twitter.get_user_tweets(user)
    else:
        print("[-] No Twitter user #" + str(one_digit_minute))

    # Reddit
    reddit = Reddit()
    author = reddit.get_user(one_digit_minute)
    if author is not None:
        reddit.get_user_comments(author)
    else:
        print("[-] No Reddit user #" + str(one_digit_minute))

    # StackExchange
    stack_exchange = StackExchange()
    account_id = stack_exchange.get_user(one_digit_minute)
    if account_id is not None:
        stack_exchange.get_user_posts(account_id)
    else:
        print("[-] No StackExchange user #" + str(one_digit_minute))

    print(Color.HEADER + "[*] General Search" + Color.ENDC)
    if (current_minute % 5) == 0:
        print("[-] Attempting general site search...")
        try:
            twitter_keyword = opsecHeader.get_user_keywords(
                'all', 'twitter')[five_min_interval]
            twitter.search_twitter(twitter_keyword)
        except IndexError:
            print("[-] No twitter keyword at index #: " +
                  str(five_min_interval))

        try:
            facebook_keyword = opsecHeader.get_user_keywords(
                'all', 'facebook')[five_min_interval]
            facebook = Facebook()
            facebook.search_facebook(facebook_keyword)
        except IndexError:
            print("[-] No Facebook keyword at index #: " +
                  str(five_min_interval))

        try:
            wordpress_keyword = opsecHeader.get_user_keywords(
                'all', 'wordpress')[five_min_interval]
            wordpress = Wordpress()
            wordpress.search_wordpress(wordpress_keyword)
        except IndexError:
            print("[-] No Wordpress keyword at index #: " +
                  str(five_min_interval))
    else:
        print(
            "[-] Minute not a multiple of 5, not attempting general site search to avoid throttling..."
        )