def trends(channel='assignment1', scheduled=True): #consumer_key = environ.get('consumer_key', None) #consumer_secret = environ.get('consumer_secret', None) #access_token = environ.get('access_token', None) #access_token_secret = environ.get('access_token_secret', None) auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) api = API(auth) # Where On Earth ID for Philippines is 1187115. # Where On Earth ID for Worldwide 1. WOE_ID = 23424934 trends = api.trends_place(WOE_ID) trends = json.loads(json.dumps(trends, indent=1)) trendy = [] for trend in trends[0]["trends"]: trendy.append((trend["name"])) trending = ', \n'.join(trendy[:10]) if scheduled: slack_client.api_call("chat.postMessage", channel=channel, text=trending) return trending
def __init__(self, auth, listener, **options): self.auth = auth self.listener = listener self.running = False self.daemon = options.get("daemon", False) self.timeout = options.get("timeout", 300.0) self.retry_count = options.get("retry_count") # values according to # https://developer.twitter.com/en/docs/tweets/filter-realtime/guides/connecting#reconnecting self.retry_time_start = options.get("retry_time", 5.0) self.retry_420_start = options.get("retry_420", 60.0) self.retry_time_cap = options.get("retry_time_cap", 320.0) self.snooze_time_step = options.get("snooze_time", 0.25) self.snooze_time_cap = options.get("snooze_time_cap", 16) # The default socket.read size. Default to less than half the size of # a tweet so that it reads tweets with the minimal latency of 2 reads # per tweet. Values higher than ~1kb will increase latency by waiting # for more data to arrive but may also increase throughput by doing # fewer socket read calls. self.chunk_size = options.get("chunk_size", 512) self.verify = options.get("verify", True) self.api = API() self.headers = options.get("headers") or {} self.new_session() self.body = None self.retry_time = self.retry_time_start self.snooze_time = self.snooze_time_step # Example: proxies = {'http': 'http://localhost:1080', 'https': 'http://localhost:1080'} self.proxies = options.get("proxies") self.host = options.get('host', 'stream.twitter.com')
def __init__(self, topic, api=None): self.api = api or API() self.topic = topic self.producer = KafkaProducer(bootstrap_servers='localhost:9092') logging.basicConfig(level=logging.INFO) self.logger = logging.getLogger(__name__)
def create_tweepy_api_instance(self): """ Creates Tweepy API instance """ self.api = API(self.auth_handler, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)
def getInstance(): global twitter_api if not twitter_api: #auth = tweepy.OAuthHandler("xg2hLKvf1nxw1TUALvx5xA", "MkX0lDUik0mJuc6nxserddbQDWd7ZTErQN6Tf0OhOM") auth = tweepy.OAuthHandler("m32EakXMmUaGJW4rm8wXEQ", "TL5LSHWBNaUcW5MBG44V6piaF8JY9JPykMagtvfc") auth.set_access_token( "538432816-JPzoNG6xFMZ97tYthCZNKb9llfpL6rGOdNqHtZmD", "DtqSXyezDgio8JNYvHFmlML22vsonOEWCCWhZv7I") #auth.set_access_token("174566652-MOGbxytlmUHIN5tEMgl5rgqWdWaIQXYZ6XPyYKl1", "yem38OfoUbsoPZvOVr3k0n3X7JSUDYD8oxAKXvrJw6k") twitter_api = API(auth_handler=auth, host='api.twitter.com', search_host='search.twitter.com', cache=DBFileCache(DBCache( timeout=-1, conn=DBSingleton.getInstance()), FileCache("cache", timeout=-1), timeout=-1), secure=False, api_root='/1', search_root='', retry_count=0, retry_delay=0, retry_errors=None, parser=None) return twitter_api
def _get_api(self): "Initialize Tweepy API object with RateLimitHandler auth." auth = RateLimitHandler(self.consumer_key, self.consumer_secret) for key, secret in self.access_tokens.values(): auth.add_access_token(key, secret) # print 'Token pool size: %d' % len(auth.tokens) return API(auth)
def __init__(self, auth, listener, logger, **options): self.auth = auth self.listener = listener self.running = False self.timeout = options.get("timeout", 300.0) self.retry_count = options.get("retry_count") # values according to https://dev.twitter.com/docs/streaming-apis/connecting#Reconnecting self.retry_time_start = options.get("retry_time", 5.0) self.retry_420_start = options.get("retry_420", 60.0) self.retry_time_cap = options.get("retry_time_cap", 320.0) self.snooze_time_step = options.get("snooze_time", 0.25) self.snooze_time_cap = options.get("snooze_time_cap", 16) self.buffer_size = options.get("buffer_size", 1500) if options.get("secure", True): self.scheme = "https" else: self.scheme = "http" self.api = API() self.headers = options.get("headers") or {} self.parameters = None self.body = None self.retry_time = self.retry_time_start self.snooze_time = self.snooze_time_step self.logger = logger self.logger.info('TOOLKIT STREAM: Stream initialized.') print 'TOOLKIT STREAM: Stream initialized.'
def trends(channel='assignment1', scheduled=True): consumer_key = environ.get('consumer_key', None) consumer_secret = environ.get('consumer_secret', None) access_token = environ.get('access_token', None) access_token_secret = environ.get('access_token_secret', None) auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) api = API(auth) WOE_ID = 1 trends = api.trends_place(WOE_ID) trends = json.loads(json.dumps(trends, indent=1)) trendy = [] for trend in trends[0]["trends"]: trendy.append((trend["name"])) trending = ', \n'.join(trendy[:10]) if scheduled: slack_client.api_call("chat.postMessage", channel=channel, text=trending) else: return trending
def __init__(self, auth, listener, **options): self.auth = auth self.listener = listener self.running = False self.timeout = options.get("timeout", 300.0) self.retry_count = options.get("retry_count") # values according to # https://dev.twitter.com/docs/streaming-apis/connecting#Reconnecting self.retry_time_start = options.get("retry_time", 5.0) self.retry_420_start = options.get("retry_420", 60.0) self.retry_time_cap = options.get("retry_time_cap", 320.0) self.snooze_time_step = options.get("snooze_time", 0.25) self.snooze_time_cap = options.get("snooze_time_cap", 16) # The default socket.read size. Default to less than half the size of # a tweet so that it reads tweets with the minimal latency of 2 reads # per tweet. Values higher than ~1kb will increase latency by waiting # for more data to arrive but may also increase throughput by doing # fewer socket read calls. self.chunk_size = options.get("chunk_size", 512) self.verify = options.get("verify", True) self.api = API() self.headers = options.get("headers") or {} self.new_session() self.body = None self.retry_time = self.retry_time_start self.snooze_time = self.snooze_time_step
def __init__(self, auth, listener, logger, project_id, collector_id, **options): self.auth = auth self.listener = listener self.running = False self.timeout = options.get("timeout", 300.0) self.retry_count = options.get("retry_count") self.retry_time_start = options.get("retry_time", 5.0) self.retry_420_start = options.get("retry_420", 60.0) self.retry_time_cap = options.get("retry_time_cap", 320.0) self.snooze_time_step = options.get("snooze_time", 0.25) self.snooze_time_cap = options.get("snooze_time_cap", 16) self.buffer_size = options.get("buffer_size", 1500) if options.get("secure", True): self.scheme = "https" else: self.scheme = "http" self.api = API() self.headers = options.get("headers") or {} self.parameters = None self.body = None self.retry_time = self.retry_time_start self.snooze_time = self.snooze_time_step project = db.get_project_detail(project_id) self.project_config_db = db.connection[ project['project_config_db']].config self.collector_id = collector_id self.logger = logger self.logger.info('TOOLKIT STREAM: Stream initialized.') print 'TOOLKIT STREAM: Stream initialized.'
def get_verified_count(): access_token = session['user_info']['access_token'] access_secret = session['user_info']['access_secret'] user_id = session['user_info']['user_id'] auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_secret) twitter_api = API(auth_handler=auth) follower_count = 0 verified_count = 0 verified_users = [] exception = False try: for item in Cursor(twitter_api.followers, user_id=user_id, count=200).items(): data = item._json if data['verified'] == True: verified_count += 1 user = { 'handle': data['screen_name'], 'avatar': data['profile_image_url_https'] } verified_users.append(user) else: pass follower_count += 1 except: exception = True return verified_count, follower_count, verified_users, exception
def __init__(self, auth, listener, **options): super(TwicorderStream, self).__init__(auth, listener, **options) msg = 'Listener starting at {:%d %b %Y %H:%M:%S}'.format(datetime.now()) utils.message('Info', msg) self.api = API(auth) self._id_to_screenname_time = None self._id_to_screenname = {} stream_mode = self.config.get('stream_mode') or 'filter' if stream_mode == 'filter': self.filter( follow=self.follow, track=self.track, locations=self.locations, stall_warnings=self.stall_warnings, languages=self.languages, encoding=self.encoding, filter_level=self.filter_level ) elif stream_mode == 'sample': self.sample( languages=self.languages, stall_warnings=self.stall_warnings ) else: utils.message('Error', 'stream_mode must be "filter" or "sample"')
def twitter_checkin(creds, sysinfo): consumer_key, consumer_secret, access_token, access_token_secret = creds auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) api = API(auth, wait_on_rate_limit=True) api.update_status(sysinfo) return ["twitter_checkin"]
def search_tweet(query): auth = get_oauth() api = API(auth) # ツイートの検索 tweets = get_tweets(api, query) # ツイートの出力 output_tweets(tweets)
def filter_track(): track = ["python"] stream_auth = BasicAuthHandler('<USERNAME>', '<PASSWORD>') api = API() stream = Stream(stream_auth, MyStreamListener(api)) print 'start filter track ', ','.join(track) stream.filter(track=track)
def search_tweet(self, query): auth = self.get_oauth() api = API(auth) # ツイートの検索 tweets = self.get_tweets(api, query) # ツイートの出力 self.output_tweets(tweets)
def _get_rate_limit_status(self, key, secret): """ Get rate limit status for specified access token key. """ auth = OAuthHandler(self.consumer_key, self.consumer_secret) auth.set_access_token(key, secret) api = API(auth) return api.rate_limit_status()
def __init__(self, access_token, access_token_secret, consumer_key, consumer_secret): self.analyser = SentimentIntensityAnalyzer() self.auth_handler = OAuthHandler(consumer_key, consumer_secret) self.auth_handler.set_access_token(access_token, access_token_secret) self.api = API(self.auth_handler, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)
def twitter_post_response(creds, message, user): consumer_key, consumer_secret, access_token, access_token_secret = creds auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) api = API(auth, wait_on_rate_limit=True) api.send_direct_message(user, message) return True
def __init__(self, streambot, api=None): self.api = api or API() # needed ref to streambot so method can be called there self.streambot = streambot self.tw_bot_id = 841013993602863104 self.ignored_users = [ self.tw_bot_id, ]
def __init__(self, api=None): self.api = api or API() self.n = 0 #self.i = 1 self.prefix = 'twitter_stream_file' self.output = open(self.prefix + '@' + strftime("%y%m%d-%H%M%S") + '.json', 'w') #self.i = self.i+1 self.counter = 1
def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.retry_count = 2 self.api.retry_delay = 5 if use_replay: start_replay('tests/record.json', headers_key=filter_headers_key(['Authorization']))
def __init__(self, api=None, dataD=None): self.api = api or API() if dataD == None: with open('data.json') as data_file: datajson = json.load(data_file) self.dataD = datajson['Discord'] else: self.dataD = dataD
def __init__(self, api=None, bucket=None, ban_list_path=None): self.api = api or API() self.bucket = bucket or Bucket(bucket_size=20) self.count = 0 if ban_list_path is None: self.ban_list = BanList() else: self.ban_list = BanList(ban_list_path)
def __init__(self, api=None, dataD=None): self.api = api or API() if dataD is not None: with open("data.json") as data_file: datajson = json.load(data_file) self.dataD = datajson["Discord"] else: self.dataD = dataD
def __init__(self, api = None): self.api = api or API(wait_on_rate_limit=True, wait_on_rate_limit_notify=True) self.reconnection_attemps = 0 self.collected_tweets = 0 # create a engine to the database # self.engine = create_engine('sqlite:///app/tweets.sqlite') # switch to the following definition if run this code locally self.engine = create_engine('sqlite:///tweets.sqlite')
def authenticate(self): """Authenticate with Twitter's API""" if self.api == None: #if not authenticated, do it now. if self.consumer_key and self.consumer_secret and self.access_token and self.access_secret: auth = tw.OAuthHandler(self.consumer_key, self.consumer_secret) auth.set_access_token(self.access_token, self.access_secret) self.api = API(auth) self.auth = auth return self.api
def filter_track(): q = Scrapers.all().filter('', '') follow = [s.uid for s in q] auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) stream = Stream(auth, MyStreamListener()) api = API(auth) #print 'start filter track ', ','.join(track) stream.filter(follow=follow)
def __init__(self, api=None): self.api = api or API() self.current_number_of_tweets = 0 # open the file where we're going to save the data self.file = open('data'+time.strftime("%m%d%H%M")+".csv", 'w') # name the file data + the current date and time, to eliminate overwriting self.writer = csv.writer(self.file) # write the header row self.writer.writerow(('user', 'location', 'coordinates', 'text', 'time', 'hashtags', 'user mentions'))
def get_username(self): if self.username is None: api = API(self) user = api.verify_credentials() if user: self.username = user.screen_name else: raise TweepError("Unable to get username, invalid oauth token!") return self.username