args = parser.parse_args() outfile = args.outfile proxies = {} user_agent = 'Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.168 Safari/535.19' scraper = PyBikesScraper() scraper.setUserAgent(user_agent) sysdef = {"system": "domoblue", "class": "Domoblue", "instances": []} if args.proxy is not None: proxies['http'] = args.proxy scraper.setProxies(proxies) scraper.enableProxy() def get_token(client_id): if 'Referer' in scraper.headers: del (scraper.headers['Referer']) url = MAIN + TOKEN_URL.format(service=client_id) data = scraper.request(url) token = re.findall(TOKEN_RE, data) scraper.headers['Referer'] = url return token[0] def get_xml(client_id): token = get_token(client_id)
proxies = {} user_agent = 'Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.168 Safari/535.19' scraper = PyBikesScraper() scraper.setUserAgent(user_agent) sysdef = { "system": "domoblue", "class": "Domoblue", "instances": [] } if args.proxy is not None: proxies['http'] = args.proxy scraper.setProxies(proxies) scraper.enableProxy() def get_token(client_id): if 'Referer' in scraper.headers: del(scraper.headers['Referer']) url = MAIN + TOKEN_URL.format(service = client_id) data = scraper.request(url) token = re.findall(TOKEN_RE, data) scraper.headers['Referer'] = url return token[0] def get_xml(client_id): token = get_token(client_id) url = MAIN + XML_URL.format(token = token, service = client_id)