def run(args, stdout=sys.stdout, stderr=sys.stderr): """The main function.""" raw_data = args.file.read() print(f'file size = {len(raw_data) >> 20} MB') # Media uploads for images are limited to 5MB in file size, # and for videos are limited to 15MB. For chunked uploads, # the maximum chunk size is 5MB. # # MIME-types supported by this endpoint: PNG, JPEG, BMP, WEBP, # GIF, Animated GIF. kwargs = dict(media=raw_data) # A maximum of 100 additional owners may be specified. if args.additional_owners: kwargs['additional_owners'] = args.additional_owners # Uploaded media files will be available for use for 60 minutes # before they are flushed from the servers (if not associated # with a Tweet or Card). twhandler = twitter_instance(domain='upload.twitter.com') response = twhandler.media.upload(**kwargs) output(response, stdout)
def main(args=None): """The main function.""" parser = configure() args = parser.parse_args(args) raw_data = args.file.read() print('file size = {} MB'.format(len(raw_data) >> 20)) # Media uploads for images are limited to 5MB in file size, # and for videos are limited to 15MB. For chunked uploads, # the maximum chunk size is 5MB. # # MIME-types supported by this endpoint: PNG, JPEG, BMP, WEBP, # GIF, Animated GIF. kwargs = dict(media=raw_data) # A maximum of 100 additional owners may be specified. if args.additional_owners: kwargs['additional_owners'] = args.additional_owners # Uploaded media files will be available for use for 60 minutes # before they are flushed from the servers (if not associated # with a Tweet or Card). twhandler = twitter_instance(domain='upload.twitter.com') response = twhandler.media.upload(**kwargs) output(response)
def main(): """The main function. Returns: None. """ parser = configure() args = vars(parser.parse_args()) logger = make_logger('twsearch') twhandler = twitter_instance() request = twhandler.search.tweets kwargs = {k:args[k] for k in ( 'q', 'geocode', 'lang', 'locale', 'result_type', 'count', 'until', 'since_id', 'max_id', 'include_entities',) if (k in args) and (args[k] is not None)} results = None if args['full']: results = [] kwargs['count'] = COUNT_MAX try: while True: response = request(**kwargs) #metadata = response['search_metadata'] if 'statuses' in response and response['statuses']: statuses = response['statuses'] #max_id = metadata['max_id'] since_id = statuses[-1]['id'] else: logger.info("finished") break logger.info('search.tweets params={}'.format(kwargs)) results.append(response) if len(statuses) < kwargs['count']: logger.info("finished") break kwargs['max_id'] = since_id - 1 time.sleep(2) except TwitterHTTPError as ex: logger.info('{}'.format(ex)) #raise else: logger.info('search.tweets params={}'.format(kwargs)) results = request(**kwargs) output(results)
def run(args, stdout=sys.stdout, stderr=sys.stderr): """The main function.""" logger = make_logger('twsearch', stderr) twhandler = twitter_instance() request = twhandler.search.tweets kwargs = { k: args[k] for k in ( 'q', 'geocode', 'lang', 'locale', 'result_type', 'count', 'until', 'since_id', 'max_id', 'include_entities', ) if (k in args) and (args[k] is not None) } results = None if args['full']: results = [] kwargs['count'] = COUNT_MAX try: while True: response = request(**kwargs) #metadata = response['search_metadata'] if 'statuses' in response and response['statuses']: statuses = response['statuses'] #max_id = metadata['max_id'] since_id = statuses[-1]['id'] else: logger.info("finished") break logger.info(f'search.tweets params={kwargs}') results.append(response) if len(statuses) < kwargs['count']: logger.info("finished") break kwargs['max_id'] = since_id - 1 time.sleep(2) except TwitterHTTPError as ex: logger.info('exception', exc_info=ex) #raise else: logger.info(f'search.tweets params={kwargs}') results = request(**kwargs) output(results, stdout)
def run(args, stdout=sys.stdout, stderr=sys.stderr): """The main function.""" twhandler = twitter_instance() # Get the current rate limits for methods belonging to the # specified resource families. response = twhandler.application.rate_limit_status( resources=args.resources) output(response, stdout)
def run(args, stdout=sys.stdout, stderr=sys.stderr): """The main function.""" logger = make_logger('twsearch', stderr) twhandler = twitter_instance() request = twhandler.search.tweets kwargs = {k:args[k] for k in ( 'q', 'geocode', 'lang', 'locale', 'result_type', 'count', 'until', 'since_id', 'max_id', 'include_entities',) if (k in args) and (args[k] is not None)} results = None if args['full']: results = [] kwargs['count'] = COUNT_MAX try: while True: response = request(**kwargs) #metadata = response['search_metadata'] if 'statuses' in response and response['statuses']: statuses = response['statuses'] #max_id = metadata['max_id'] since_id = statuses[-1]['id'] else: logger.info("finished") break logger.info(f'search.tweets params={kwargs}') results.append(response) if len(statuses) < kwargs['count']: logger.info("finished") break kwargs['max_id'] = since_id - 1 time.sleep(2) except TwitterHTTPError as ex: logger.info('exception', exc_info=ex) #raise else: logger.info(f'search.tweets params={kwargs}') results = request(**kwargs) output(results, stdout)
def main(command_line=None): """The main function. Args: command_line: Raw command line arguments. """ parser = configure() args = parser.parse_args(command_line) twhandler = twitter_instance() # Get the current rate limits for methods belonging to the # specified resource families. response = twhandler.application.rate_limit_status( resources=args.resources) output(response)
def request_wrapper(): """Output the response received from Twitter.""" output(request(twitter_instance())())
def request_wrapper(stdout, stderr): """Output the response received from Twitter.""" output(request(twitter_instance())(), stdout)