def Getfrnd(self, user_id=None, screen_name=None, cursor=-1, count=200, skip_status=False, include_user_entities=False): url = '%s/friends/list.json' % (self.base_url) parameters = {'include_user_entities': include_user_entities} if user_id: parameters['user_id'] = user_id elif screen_name: parameters['screen_name'] = screen_name parameters['skip_status'] = skip_status parameters['cursor'] = cursor parameters['count'] = count else: raise TwitterError( "Specify at least one of user_id or screen_name.") resp = self._RequestUrl(url, 'GET', data=parameters) data = self._ParseAndCheckTwitter(resp.content.decode('utf-8')) return data
def _CheckForTwitterError(data): """Raises a TwitterError if twitter returns an error message. Args: data (dict): A python dict created from the Twitter json response Raises: (twitter.TwitterError): TwitterError wrapping the twitter error message if one exists. """ # Twitter errors are relatively unlikely, so it is faster # to check first, rather than try and catch the exception if 'error' in data: raise TwitterError(data['error']) if 'errors' in data: raise TwitterError(data['errors'])
def _RequestChunkedUpload(self, url, headers, data): try: return requests.post(url, headers=headers, data=data, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) except requests.RequestException as e: raise TwitterError(str(e))
def _ParseAndCheckTwitter(self, json_data): """Try and parse the JSON returned from Twitter and return an empty dictionary if there is any error. This is a purely defensive check because during some Twitter network outages it will return an HTML failwhale page. """ try: data = json.loads(json_data) except ValueError: if "<title>Twitter / Over capacity</title>" in json_data: raise TwitterError({'message': "Capacity Error"}) if "<title>Twitter / Error</title>" in json_data: raise TwitterError({'message': "Technical Error"}) if "Exceeded connection limit for user" in json_data: raise TwitterError( {'message': "Exceeded connection limit for user"}) if "Error 401 Unauthorized" in json_data: raise TwitterError({'message': "Unauthorized"}) raise TwitterError({'Unknown error': '{0}'.format(json_data)}) self._CheckForTwitterError(data) return data
def _RequestStream(self, url, verb, data=None, session=None): """Request a stream of data. Args: url: The web location we want to retrieve. verb: Either POST or GET. data: A dict of (str, unicode) key/value pairs. Returns: A twitter stream. """ session = session or requests.Session() if verb == 'POST': try: return session.post(url, data=data, stream=True, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) except requests.RequestException as e: raise TwitterError(str(e)) if verb == 'GET': url = self._BuildUrl(url, extra_params=data) try: return session.get(url, stream=True, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) except requests.RequestException as e: raise TwitterError(str(e)) return 0 # if not a POST or GET request
def Gettweetstest(self, q=None, result_type='mixed', count=200, include_user_entities=False): f = open('retweeterstest.twt', 'w', encoding='utf-8') url = '%s/search/tweets.json' % (self.base_url) parameters = {'include_user_entities': include_user_entities} max_id = None if q: parameters['q'] = q parameters['result_type'] = result_type parameters['count'] = count parameters['tweet_mode'] = 'extended' else: raise TwitterError( "Specify at least one of user_id or screen_name.") while True: if max_id: parameters['max_id'] = max_id resp = self._RequestUrl(url, 'GET', data=parameters) data = self._ParseAndCheckTwitter(resp.content.decode('utf-8')) data = data['statuses'] if len(data) == 0: return for tweet in data: if tweet['id'] == max_id: if len(data) == 1: print('helloo') return else: continue if max_id is None: max_id = tweet['id'] else: max_id = min(max_id, tweet['id']) f.write(tweet['user']['screen_name'] + tweet['text'] + '\n')
def Gettweets(self, q=None, result_type='mixed', count=200, include_user_entities=False): url = '%s/search/tweets.json' % (self.base_url) parameters = {'include_user_entities': include_user_entities} if q: parameters['q'] = q parameters['result_type'] = result_type parameters['count'] = count else: raise TwitterError( "Specify at least one of user_id or screen_name.") resp = self._RequestUrl(url, 'GET', data=parameters) data = self._ParseAndCheckTwitter(resp.content.decode('utf-8')) return data
def GetUserss(self, user_id=None, screen_name=None, include_entities=False, return_json=False): url = '%s/users/show.json' % (self.base_url) parameters = {'include_entities': include_entities} if user_id: parameters['user_id'] = user_id elif screen_name: parameters['screen_name'] = screen_name else: raise TwitterError( "Specify at least one of user_id or screen_name.") resp = self._RequestUrl(url, 'GET', data=parameters) data = self._ParseAndCheckTwitter(resp.content.decode('utf-8')) if return_json: return data else: return User.NewFromJsonDict(data)
def _EncodeParameters(parameters): """Return a string in key=value&key=value form. Values of None are not included in the output string. Args: parameters (dict): dictionary of query parameters to be converted into a string for encoding and sending to Twitter. Returns: A URL-encoded string in "key=value&key=value" form """ if parameters is None: return None if not isinstance(parameters, dict): raise TwitterError("`parameters` must be a dict.") else: params = dict() for k, v in parameters.items(): if v is not None: if getattr(v, 'encode', None): v = v.encode('utf8') params.update({k: v}) return urlencode(params)
def __init__(self, consumer_key=None, consumer_secret=None, access_token_key=None, access_token_secret=None, application_only_auth=False, input_encoding=None, request_headers=None, cache=DEFAULT_CACHE, base_url=None, stream_url=None, upload_url=None, chunk_size=1024 * 1024, use_gzip_compression=False, debugHTTP=False, timeout=None, sleep_on_rate_limit=False, tweet_mode='compat', proxies=None): # check to see if the library is running on a Google App Engine instance # see GAE.rst for more information if os.environ: if 'APPENGINE_RUNTIME' in os.environ.keys(): # Adapter ensures requests use app engine's urlfetch import requests_toolbelt.adapters.appengine requests_toolbelt.adapters.appengine.monkeypatch() # App Engine does not like this caching strategy, disable caching cache = None self.SetCache(cache) self._cache_timeout = Api.DEFAULT_CACHE_TIMEOUT self._input_encoding = input_encoding self._use_gzip = use_gzip_compression self._debugHTTP = debugHTTP self._shortlink_size = 19 if timeout and timeout < 30: warnings.warn( "Warning: The Twitter streaming API sends 30s keepalives, the given timeout is shorter!" ) self._timeout = timeout self.__auth = None self._InitializeRequestHeaders(request_headers) self._InitializeUserAgent() self._InitializeDefaultParameters() self.rate_limit = RateLimit() self.sleep_on_rate_limit = sleep_on_rate_limit self.tweet_mode = tweet_mode self.proxies = proxies if base_url is None: self.base_url = 'https://api.twitter.com/1.1' else: self.base_url = base_url if stream_url is None: self.stream_url = 'https://stream.twitter.com/1.1' else: self.stream_url = stream_url if upload_url is None: self.upload_url = 'https://upload.twitter.com/1.1' else: self.upload_url = upload_url self.chunk_size = chunk_size if self.chunk_size < 1024 * 16: warnings.warn( ("A chunk size lower than 16384 may result in too many " "requests to the Twitter API when uploading videos. You are " "strongly advised to increase it above 16384")) if (consumer_key and not (application_only_auth or all([access_token_key, access_token_secret]))): raise TwitterError( {'message': "Missing oAuth Consumer Key or Access Token"}) self.SetCredentials(consumer_key, consumer_secret, access_token_key, access_token_secret, application_only_auth) if debugHTTP: try: import http.client as http_client # python3 except ImportError: import httplib as http_client # python2 http_client.HTTPConnection.debuglevel = 1 logging.basicConfig( ) # you need to initialize logging, otherwise you will not see anything from requests logging.getLogger().setLevel(logging.DEBUG) requests_log = logging.getLogger("requests.packages.urllib3") requests_log.setLevel(logging.DEBUG) requests_log.propagate = True self._session = requests.Session()
def _RequestUrl(self, url, verb, data=None, json=None, enforce_auth=True): """Request a url. Args: url: The web location we want to retrieve. verb: Either POST or GET. data: A dict of (str, unicode) key/value pairs. Returns: A JSON object. """ if enforce_auth: if not self.__auth: raise TwitterError( "The twitter.Api instance must be authenticated.") if url and self.sleep_on_rate_limit: limit = self.CheckRateLimit(url) if limit.remaining == 0: try: stime = max(int(limit.reset - time.time()) + 10, 0) logger.debug( 'Rate limited requesting [%s], sleeping for [%s]', url, stime) time.sleep(stime) except ValueError: pass if not data: data = {} if verb == 'POST': if data: if 'media_ids' in data: url = self._BuildUrl( url, extra_params={'media_ids': data['media_ids']}) resp = self._session.post(url, data=data, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) elif 'media' in data: resp = self._session.post(url, files=data, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) else: resp = self._session.post(url, data=data, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) elif json: resp = self._session.post(url, json=json, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) else: resp = 0 # POST request, but without data or json elif verb == 'GET': data['tweet_mode'] = self.tweet_mode url = self._BuildUrl(url, extra_params=data) resp = self._session.get(url, auth=self.__auth, timeout=self._timeout, proxies=self.proxies) else: resp = 0 # if not a POST or GET request if url and self.rate_limit and resp: limit = resp.headers.get('x-rate-limit-limit', 0) remaining = resp.headers.get('x-rate-limit-remaining', 0) reset = resp.headers.get('x-rate-limit-reset', 0) self.rate_limit.set_limit(url, limit, remaining, reset) return resp