Exemplo n.º 1
0
def get_by_search(query, count):
    api = get_api()
    tweets = tweepy.Cursor(api.search, q=query).items(count)

    tweets_list = []
    for tweet in tweets:
        tweets_list.append(tweet._json)
    return tweets_list
Exemplo n.º 2
0
	def run(self):
		print("Tweet printer: Starting thread")
		logging.info("Starting thread to tweet")
		Tweet.hadTweet = False

		global api
		api = auth.get_api()

		now = datetime.datetime.now()  # get a datetime object containing current date and time
		next_time = (now + datetime.timedelta(0, Tweet.delay)).strftime(
			"%H:%M:%S")  # Get the time when the next tweet will be post, and format it to make it easier to read in the console
		Tweet.set_next_tweet_t(next_time)
		# Wait some seconds to avoid spam
		time.sleep(Tweet.delay)

		self.tweet()
Exemplo n.º 3
0
    def __init__(self, *args, **kwargs):
        GUI.__init__(self, *args, **kwargs)
        
        self.build_menu(categories.keys())
        self.build_content_frames()

        ### Grant access to users' account
        api = auth.get_api(self)
        
        ### Retrieve OVH objects and apply content to application's frames
        for cat, path in categories.items():
            objects = Nodes(api, path)
            setattr(self, cat, objects)
            self.content_frames[cat].add_entries(objects)

        ### Show 'Domains' on top
        self.content_frames['domains'].show()
Exemplo n.º 4
0
	def run(self):
		global api
		global timer_dm

		api = auth.get_api()

		try:
			global favorites_list
			if len(favorites_list) == 0:
				temp_fav = api.favorites(count=200)
				for fav in temp_fav:
					favorites_list.append(fav.id)
				logging.info("Loaded the last " + str(len(favorites_list)) + " fav tweets")
		except Exception as e:
			print("Error while trying to get the last favorites_list tweets: ", e)
			logging.warning("Couldn't recover the last favorites_list tweets: " + str(e))

		if int(MDListener.lastID) == 0:
			print("Recovering the last DM... (because lastID=0)")
			logging.info("Starting DM Listener")
			try:
				last_dm = api.list_direct_messages()[
					0]  # We recover the last MD, and get its ID, after the first time to run the bot, this won't be used

				MDListener.lastID = last_dm.id
				print("LastID =", MDListener.lastID)
				logging.info("New last DM recovered: " + MDListener.lastID)
				self.save_last_dm()

			except Exception as e:
				print("Error while trying to get the newest DM: ", e)
				logging.warning("Couldn't recover the last DM: " + str(e))

			timer_dm = threading.Timer(MDListener.read_dm, self.search)
			logging.info("Starting timer to the first DM search")
			timer_dm.start()
		else:
			logging.info("Starting search")
			self.search()
Exemplo n.º 5
0
import tweepy
from auth import get_api

api = get_api()

def get_friends_of(username):
    user = api.get_user(username)
    return user.friends()


for friend in get_friends_of("@richardadalton"):
    print friend
Exemplo n.º 6
0
import json
import tweepy
from auth import get_api

api = get_api()


def get_by_search(query, count):
    return tweepy.Cursor(api.search, q=query).items(count)


def get_my_timeline(count):
    return tweepy.Cursor(api.home_timeline).items(count)


tweets = get_by_search("Storm Brian", 10)

for tweet in tweets:
    print(tweet)
import db_utils
import auth
import time
import sys


api = auth.get_api()
with open('./candidates.txt', 'r') as f:
    candidates = f.read().split('\n')

with open('./states.csv', 'r') as f:
    states_raw = f.read().split('\n')
    states = dict()
    for state_line in states_raw:
        state_arr = state_line.split(',')
        state_data = {
            "name": state_arr[0],
            "longitude": state_arr[1],
            "latitude": state_arr[2],
            "radius": state_arr[3]
        }
        states[state_data["name"]] = state_data

def search_candidate_in_state(candidate, state_name):
    state = states[state_name]
    coordinates = state['longitude'] + ',' + state['latitude']
    queryTerms = {"q":candidate,"geocode":coordinates + "," + state['radius'] + "km", "count":100}
    query = api.request('search/tweets',queryTerms).json()

    if 'errors' in query:
        if query['errors'][0]['code'] == 88:
def search_tweets(query, count):
    api = get_api()
    tweets = tweepy.Cursor(api.search, q=query).items(count)
    return [tweet._json for tweet in tweets]
Exemplo n.º 9
0
def load_new_tweet(url, from_fav=False):
    api = auth.get_api()

    try:
        id = re.split("/", url)[-1]
        if id.find("?") > 0:
            id = id[:id.find("?")]
        status = api.get_status(id, tweet_mode="extended")
        if id in mdlistener.favorites_list:
            return
        mdlistener.favorites_list.append(id)

        if not from_fav:
            api.create_favorite(id)
            print("Tweet with id:", id, "was faved")
            logging.info("Tweet with id: " + id + " was faved")
        else:
            print("Received fav with tweet id: " + str(id))
            logging.info("Received fav with tweet id: " + str(id))
            if 'user_mentions' in status.entities:
                if len(status.entities['user_mentions']) > 0:

                    print(
                        "This could be an answer, so then this is not process\n"
                    )
                    logging.info(
                        "This could be an answer, so then this is not process")
                    print(status.full_text)
                    logging.info("Tweet text: " + status.full_text)
                    print(
                        "------------------------------------------------------------"
                    )
                    return

        logging.info("Tweet text: " + status.full_text)

        print(status.full_text)

        full_real_text = status.full_text

        media_files = []
        download_names = []
        download = False

        if 'media' in status.entities:
            logging.info("Media found")
            download = True
            for photo in status.extended_entities['media']:
                if photo['type'] == 'photo':
                    media_files.append(photo['media_url'])
                    logging.info("Getting info of photo: " +
                                 photo['media_url'])
                else:
                    logging.info(
                        "Get something that is not a photo: no download")
                    download = False
                    break

            if download:
                full_real_text = full_real_text.rsplit("https://t.co", 1)[0]
                logging.info(
                    "Download option enable: removing the last link (t.co)")

        if 'user_mentions' in status.entities:
            if len(status.entities['user_mentions']) > 0:
                logging.info("User mentions found")
            for user in status.entities['user_mentions']:
                to_remove = "@" + user['screen_name']
                full_real_text = full_real_text.replace(to_remove, "")
                logging.info("Removing: " + to_remove)

        if download is False:
            logging.info("Trying to insert without download")
            if from_fav:
                Data.access_list(mode=Data.insert_random,
                                 info=Data(full_real_text))
            else:
                Data.access_list(mode=Data.insert, info=Data(full_real_text))

        else:
            print("To download: ")
            try:
                os.mkdir("images")
                logging.info("Created images directory")
            except FileExistsError:  # If directory already exists
                pass

            for m in media_files:
                print(m)

            for media_file in media_files:
                name = wget.download(media_file)
                download_names.append(shutil.move(src=name, dst="images"))
                logging.info("Downloaded: " + name)

            print("\n")  # Add a extra line

            if from_fav:
                Data.access_list(mode=Data.insert_random,
                                 info=Data(full_real_text, download_names))
            else:
                Data.access_list(mode=Data.insert,
                                 info=Data(full_real_text, download_names))
            save()

        print("------------------------------------------------------------")

    except Exception as e:
        print("Error while trying to get the tweet:", e)
        logging.error(str(e))