parser.add_argument('-v', action="store_true", dest='verbose', default=False, help="Verbose output for debugging (no progress)") args = parser.parse_args() outfile = args.outfile proxies = {} user_agent = 'Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.168 Safari/535.19' scraper = PyBikesScraper() scraper.setUserAgent(user_agent) sysdef = {"system": "domoblue", "class": "Domoblue", "instances": []} if args.proxy is not None: proxies['http'] = args.proxy scraper.setProxies(proxies) scraper.enableProxy() def get_token(client_id): if 'Referer' in scraper.headers: del (scraper.headers['Referer']) url = MAIN + TOKEN_URL.format(service=client_id) data = scraper.request(url) token = re.findall(TOKEN_RE, data)
parser.add_argument('--proxy', metavar = "host:proxy", dest = 'proxy', default = None, help="Use host:port as a proxy for site calls") parser.add_argument('-v', action="store_true", dest = 'verbose', default = False, help="Verbose output for debugging (no progress)") args = parser.parse_args() outfile = args.outfile proxies = {} user_agent = 'Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.168 Safari/535.19' scraper = PyBikesScraper() scraper.setUserAgent(user_agent) sysdef = { "system": "domoblue", "class": "Domoblue", "instances": [] } if args.proxy is not None: proxies['http'] = args.proxy scraper.setProxies(proxies) scraper.enableProxy() def get_token(client_id): if 'Referer' in scraper.headers: