def __init__(self, base_location=None, api_key=None, is_verbose=False, http_user=None, http_pass=None): if base_location is not None: self.base_location = base_location if api_key: self.api_key = api_key else: self.api_key = self._get_api_key_from_config() self.is_verbose = is_verbose if http_user and http_pass: password_mgr = HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, base_location, http_user, http_pass) handler = HTTPBasicAuthHandler(password_mgr) opener = build_opener(handler) install_opener(opener)
def run_dashboard(dashboard_name): xsl_filename = dashboard_name + "_report.xsl" feed_url = "http://" + DTSERVER + "/rest/management/reports/create/" + dashboard_name + "?type=XML&format=XML+Export&filter=tf:" + TIMEFRAME abspath = os.path.abspath(__file__) dname = os.path.dirname(abspath) os.chdir(dname) # Set up a HTTPS request with username/password authentication try: # create a password manager password_mgr = HTTPPasswordMgrWithDefaultRealm() # Add the username and password. password_mgr.add_password(None, feed_url, USERNAME, PASSWORD) opener = build_opener(HTTPBasicAuthHandler(password_mgr)) file = opener.open(feed_url) except URLError, e: print 'URLError: "%s"' % e raise
def get(self, uri, params={}, headers={}, with_status_code=False, timeout=10, user=None, password=None): data = None # always none in GET if params: uri = "%s?%s" % (uri, urlencode(params)) # SSL, user/password and basic # NOTE: currently don't manage ssl & user/password if uri.startswith('https://'): handler = HTTPSHandler(context=self.ssl_context) elif user and password: passwordMgr = HTTPPasswordMgrWithDefaultRealm() passwordMgr.add_password(None, uri, user, password) handler = HTTPBasicAuthHandler(passwordMgr) else: handler = HTTPHandler url_opener = build_opener(handler) req = Request(uri, data) req.get_method = lambda: 'GET' for (k, v) in headers.items(): req.add_header(k, v) request = url_opener.open(req, timeout=timeout) response = request.read() status_code = request.code request.close() if not with_status_code: return response else: return (status_code, response)
def query(self, query, ts_start, ts_end): # target = 'summarize({},"{}","avg")'.format( # query, '99year') @TODO remove if not needed # build graphite url args = { '__auth_token': self.token, 'target': query, 'format': 'json', 'from': ts_start, 'until': ts_end, } url = '{}/render?'.format(self.url) for k, v in args.iteritems(): print k print v url += '{}={}&'.format(quote(k), quote(v)) logger.debug('Query URL is {}'.format(url)) # Basic auth header password_mgr = HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password( None, self.graphite_url, self.username, self.password, ) auth_handler = HTTPBasicAuthHandler(password_mgr) # Ignore ssl cert check ctx = create_default_context() ctx.check_hostname = False ctx.verify_mode = CERT_NONE ssl_handler = HTTPSHandler(context=ctx) opener = build_opener(ssl_handler, auth_handler) install_opener(opener) result = json.loads(urlopen(url).read()) return result
def json_for(cls, period): """Return the JSON-formatted WebTrends stats for the given period. Make one attempt to fetch and reload the data. If something fails, it's the caller's responsibility to retry. """ auth_handler = HTTPBasicAuthHandler() auth_handler.add_password(realm=settings.WEBTRENDS_REALM, uri=settings.WEBTRENDS_WIKI_REPORT_URL, user=settings.WEBTRENDS_USER, passwd=settings.WEBTRENDS_PASSWORD) opener = build_opener(auth_handler) start, end = period_dates()[period] url = (settings.WEBTRENDS_WIKI_REPORT_URL + '&start_period=%s&end_period=%s' % (start, end)) try: # TODO: A wrong username or password results in a recursion depth # error. return opener.open(url).read() except IOError, e: raise StatsIOError(*e.args)
def remote(self, remote_base=None, username=None, password=None): """ Configures remote access Parameters ---------- remote_base : str base URL path for remote repository username : str user name for remote repository password : str password for local repository """ if remote_base is not None: self.remote_base = remote_base self._remote = True self.set_auth(username=username, password=password) if self.auth.ready(): passman = HTTPPasswordMgrWithDefaultRealm() passman.add_password(None, self.remote_base, self.auth.username, self.auth.password) authhandler = HTTPBasicAuthHandler(passman) opener = build_opener(authhandler) install_opener(opener)
def get(self, date): """ Get data """ from lxml import html from urllib2 import HTTPBasicAuthHandler, HTTPPasswordMgrWithDefaultRealm, build_opener, urlopen if not self.verify: import ssl ssl._create_default_https_context = ssl._create_unverified_context url = self.get_url(date) if self.auth: password_mgr = HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, url, self.auth['username'], self.auth['password']) handler = HTTPBasicAuthHandler(password_mgr) opener = build_opener(handler) src = opener.open(url).read() else: src = urlopen(url).read() return html.fromstring(src)
def login_stage(self): """ Using the hardcoded username and password Attempts Dekiwiki login and returns an authtoken Example authtoken: 252017_634285545555468650_3b7d87b75c5b0c0626ad8c9884e4398f """ auth_url = self.__class__.auth_url username = self.__class__.username password = self.__class__.password password_mgr = HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, auth_url, username, password) handler = HTTPBasicAuthHandler(password_mgr) opener = build_opener(handler) resp = opener.open(auth_url) authtoken = resp.read() if not authtoken or not self.authtoken_re.match(authtoken): self.fail( "Unable to retrive an authtoken for user:%s password: %s at %s got %s" % (username, password, auth_url, str(authtoken))) return authtoken
from urllib2 import HTTPBasicAuthHandler, build_opener, install_opener #Database settings database_connect = "dbname='ridprod'" kv1_database_connect = "dbname='kv1tmp'" iff_database_connect = "dbname='ifftmp'" pool_generation_enabled = False import_arriva_trains = False # Subscribe to https://ndovloket.nl/aanvragen/ # # username and password can be found here: # https://groups.google.com/forum/#!topic/ndovloket-meldingen/IxEPpXds_Qo #NDOVLoket settings ndovloket_url = "data.ndovloket.nl" ndovloket_user = None ndovloket_password = None if not ndovloket_user or not ndovloket_password: print("Subscribe to https://ndovloket.nl/aanvragen/\n\nusername and password can be found here:\nhttps://groups.google.com/forum/#!topic/ndovloket-meldingen/IxEPpXds_Qo") auth_handler = HTTPBasicAuthHandler() auth_handler.add_password(realm=ndovloket_url, uri=ndovloket_url, user=ndovloket_user, passwd=ndovloket_password) opener = build_opener(auth_handler) install_opener(opener)
printinfo("Processing %s..." % filename) result = urlsplit(filename) scheme = result.scheme if not scheme: file = open(filename) elif result.username: if result.port: netloc = '%s:%s' % (result.hostname, result.port) else: netloc = result.hostname url = urlunsplit( (scheme, netloc, result.path, result.query, result.fragment)) password_mgr = HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, url, result.username, result.password) handler = HTTPBasicAuthHandler(password_mgr) opener = build_opener(handler) file = opener.open(url) else: file = urlopen(filename) input_document = odf_get_document(file) # Page page = odf_create_draw_page(name=u"page%d" % (i + 1), master_page=first_master_page) # Title Frame title_frame = get_title_frame(first_master_page) name = unicode(filename.split('/')[-1]) source = u"filesystem" if not scheme else scheme title_frame.set_text_content(u"%s (%s)" % (name, source)) page.append(title_frame) # Get info
def main(): out = options["output"] wfs_url = options["url"] request_base = "REQUEST=GetFeature&SERVICE=WFS&VERSION=1.0.0" wfs_url += request_base if options["name"]: wfs_url += "&TYPENAME=" + options["name"] if options["srs"]: wfs_url += "&SRS=" + options["srs"] if options["maximum_features"]: wfs_url += "&MAXFEATURES=" + options["maximum_features"] if int(options["maximum_features"]) < 1: # GTC Invalid WFS maximum features parameter grass.fatal(_("Invalid maximum number of features")) if options["start_index"]: wfs_url += "&STARTINDEX=" + options["start_index"] if int(options["start_index"]) < 1: # GTC Invalid WFS start index parameter grass.fatal(_('Features begin with index "1"')) if flags["r"]: bbox = grass.read_command("g.region", flags="w").split("=")[1] wfs_url += "&BBOX=" + bbox if flags["l"]: wfs_url = options[ "url"] + "REQUEST=GetCapabilities&SERVICE=WFS&VERSION=1.0.0" tmp = grass.tempfile() tmpxml = tmp + ".xml" grass.debug(wfs_url) # Set user and password if given if options["username"] and options["password"]: grass.message(_("Setting username and password...")) if os.path.isfile(options["username"]): with open(options["username"]) as f: filecontent = f.read() user = filecontent.strip() elif options["username"] in os.environ: user = os.environ[options["username"]] else: user = options["username"] if os.path.isfile(options["password"]): with open(options["password"]) as f: filecontent = f.read() pw = filecontent.strip() elif options["password"] in os.environ: pw = os.environ[options["password"]] else: pw = options["password"] passmgr = HTTPPasswordMgrWithDefaultRealm() passmgr.add_password(None, wfs_url, user, pw) authhandler = HTTPBasicAuthHandler(passmgr) opener = build_opener(authhandler) install_opener(opener) # GTC Downloading WFS features grass.message(_("Retrieving data...")) try: inf = urlopen(wfs_url) except HTTPError as e: # GTC WFS request HTTP failure grass.fatal( _("The server couldn't fulfill the request.\nError code: %s") % e.code) except URLError as e: # GTC WFS request network failure grass.fatal(_("Failed to reach the server.\nReason: %s") % e.reason) outf = open(tmpxml, "wb") while True: s = inf.read() if not s: break outf.write(s) inf.close() outf.close() if flags["l"]: import shutil if os.path.exists("wms_capabilities.xml"): grass.fatal( _('A file called "wms_capabilities.xml" already exists here')) # os.move() might fail if the temp file is on another volume, so we copy instead shutil.copy(tmpxml, "wms_capabilities.xml") try_remove(tmpxml) sys.exit(0) grass.message(_("Importing data...")) try: grass.run_command("v.in.ogr", flags="o", input=tmpxml, output=out) grass.message(_("Vector map <%s> imported from WFS.") % out) except: grass.message(_("WFS import failed")) finally: try_remove(tmpxml)
def __init__(self, username, password, logger=None): password_mgr = HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, self.posturl, username, password) auth_handler = HTTPBasicAuthHandler(password_mgr) self.ua = UserAgent(handlers=[auth_handler]) self.log = logger
def _setup_opener(self): password_manager = HTTPPasswordMgrWithDefaultRealm() password_manager.add_password(None, self.endpoint, self.username, self.password) auth_manager = HTTPBasicAuthHandler(password_manager) opener = build_opener(auth_manager) install_opener(opener)
def auth_Connection(url): passman = HTTPPasswordMgrWithDefaultRealm() # creating a password manager passman.add_password(None, url, username, password) authhandler = HTTPBasicAuthHandler(passman) opener = build_opener(authhandler) install_opener(opener)
def __init__(self, api_url, username_provider): self._api_url = api_url self._username_provider = username_provider auth_handler = HTTPBasicAuthHandler(PasswordManager(username_provider)) self._opener = build_opener(auth_handler)
def openURL(url_base, data, method='Get', cookies=None, username=None, password=None, timeout=30): ''' function to open urls - wrapper around urllib2.urlopen but with additional checks for OGC service exceptions and url formatting, also handles cookies and simple user password authentication''' url_base.strip() lastchar = url_base[-1] if lastchar not in ['?', '&']: if url_base.find('?') == -1: url_base = url_base + '?' else: url_base = url_base + '&' if username and password: # Provide login information in order to use the WMS server # Create an OpenerDirector with support for Basic HTTP # Authentication... passman = HTTPPasswordMgrWithDefaultRealm() passman.add_password(None, url_base, username, password) auth_handler = HTTPBasicAuthHandler(passman) opener = urllib2.build_opener(auth_handler) openit = opener.open else: # NOTE: optionally set debuglevel>0 to debug HTTP connection #opener = urllib2.build_opener(urllib2.HTTPHandler(debuglevel=0)) #openit = opener.open openit = urlopen try: if method == 'Post': req = Request(url_base, data) # set appropriate header if posting XML try: xml = etree.fromstring(data) req.add_header('Content-Type', "text/xml") except: pass else: req = Request(url_base + data) if cookies is not None: req.add_header('Cookie', cookies) u = openit(req, timeout=timeout) except HTTPError as e: #Some servers may set the http header to 400 if returning an OGC service exception or 401 if unauthorised. if e.code in [400, 401]: raise ServiceException(e.read()) else: raise e # check for service exceptions without the http header set if 'Content-Type' in u.info() and u.info()['Content-Type'] in [ 'text/xml', 'application/xml' ]: #just in case 400 headers were not set, going to have to read the xml to see if it's an exception report. #wrap the url stram in a extended StringIO object so it's re-readable u = RereadableURL(u) se_xml = u.read() se_tree = etree.fromstring(se_xml) serviceException = se_tree.find( '{http://www.opengis.net/ows}Exception') if serviceException is None: serviceException = se_tree.find('ServiceException') if serviceException is not None: raise ServiceException(str(serviceException.text).strip()) u.seek(0) #return cursor to start of u return u
def request(method, url, content_type, data=None, params=None, headers=None, cookies=None, auth=None, redirection=True, timeout=60): """Creates a new HTTP request and processes it. :param method: the type of request to be created (``GET`` or ``POST``) :type method: ``str``. :param url: the url of the request. :type url: ``str``. :param content_type: the content type to be returned (``raw`` or ``json``) :type content_type: ``str``. :param data: the data to be posted. :type data: ``any``. :param params: mapping of url parameters. :type params: :class:`dict`. :param headers: the headers of the request. :type headers: :class:`dict`. :param cookies: the cookies of the request. :type cookies: :class:`dict`. :param auth: the authentication information to be used. :type auth: :class:`dict`. :param redirection: a flag indicating whether redirection is allowed or not. :type redirection: ``boolean``. :param timeout: a timeout for the request. :type timeout: ``int``. :return: the content obtained from executing the request. :rtype: ``str`` or ``json``. """ openers = [] if not redirection: openers.append(NoRedirectHttpHandler()) if auth: manager = HTTPPasswordMgrWithDefaultRealm() manager.add_password(None, url, auth['username'], auth['password']) openers.append(HTTPBasicAuthHandler(manager)) opener = build_opener(*openers) install_opener(opener) headers = headers or {} if cookies: for cookie in cookies.keys(): headers['Cookie'] = "{0}={1}".format(cookie, cookies[cookie]) if 'user-agent' not in headers: headers['user-agent'] = 'Alfred-Workflow/1.17' encodings = [s.strip() for s in headers.get('accept-encoding', '').split(',')] if 'gzip' not in encodings: encodings.append('gzip') headers['accept-encoding'] = ', '.join(encodings) if method == 'POST' and not data: data = '' if data and isinstance(data, dict): data = urlencode(format_headers(data)) headers = format_headers(headers) if isinstance(url, unicode): url = url.encode('utf-8') if params: scheme, netloc, path, query, fragment = urlparse.urlsplit(url) if query: url_params = urlparse.parse_qs(query) url_params.update(params) params = url_params query = urlencode(format_headers(params), doseq=True) url = urlparse.urlunsplit((scheme, netloc, path, query, fragment)) try: response = urlopen(Request(url, data, headers), timeout=timeout) response_headers = response.info() content = response.read() if 'gzip' in response_headers.get('content-encoding', '') \ or 'gzip' in response_headers.get('transfer-encoding', ''): content = unzip(content) if content_type.lower() == 'json': return json.loads(content, 'utf-8') return content except (HTTPError, URLError): send_notification('Workflow', 'Error while calling {0}'.format(url)) if content_type.lower() == 'json': return {} return ''
def getRegexParsed( regexs, url, cookieJar=None, forCookieJarOnly=False, recursiveCall=False, cachedPages={}, rawPost=False, cookie_jar_file=None): # 0,1,2 = URL, regexOnly, CookieJarOnly # cachedPages = {} # print 'url',url doRegexs = re.compile('\$doregex\[([^\]]*)\]').findall(url) # print 'doRegexs',doRegexs,regexs setresolved = True for k in doRegexs: if k in regexs: # print 'processing ' ,k m = regexs[k] # print m cookieJarParam = False if 'cookiejar' in m: # so either create or reuse existing jar # print 'cookiejar exists',m['cookiejar'] cookieJarParam = m['cookiejar'] if '$doregex' in cookieJarParam: cookieJar = getRegexParsed(regexs, m['cookiejar'], cookieJar, True, True, cachedPages) cookieJarParam = True else: cookieJarParam = True # print 'm[cookiejar]',m['cookiejar'],cookieJar if cookieJarParam: if cookieJar is None: # print 'create cookie jar' cookie_jar_file = None if 'open[' in m['cookiejar']: cookie_jar_file = m['cookiejar'].split( 'open[')[1].split(']')[0] # print 'cookieJar from file name',cookie_jar_file cookieJar = getCookieJar(cookie_jar_file) # print 'cookieJar from file',cookieJar if cookie_jar_file: saveCookieJar(cookieJar, cookie_jar_file) # import cookielib # cookieJar = cookielib.LWPCookieJar() # print 'cookieJar new',cookieJar elif 'save[' in m['cookiejar']: cookie_jar_file = m['cookiejar'].split('save[')[1].split( ']')[0] complete_path = os.path.join(profile, cookie_jar_file) # print 'complete_path',complete_path saveCookieJar(cookieJar, cookie_jar_file) if m['page'] and '$doregex' in m['page']: pg = getRegexParsed(regexs, m['page'], cookieJar, recursiveCall=True, cachedPages=cachedPages) if len(pg) == 0: pg = 'http://regexfailed' m['page'] = pg if 'setcookie' in m and m['setcookie'] and '$doregex' in m[ 'setcookie']: m['setcookie'] = getRegexParsed(regexs, m['setcookie'], cookieJar, recursiveCall=True, cachedPages=cachedPages) if 'appendcookie' in m and m['appendcookie'] and '$doregex' in m[ 'appendcookie']: m['appendcookie'] = getRegexParsed(regexs, m['appendcookie'], cookieJar, recursiveCall=True, cachedPages=cachedPages) if 'post' in m and '$doregex' in m['post']: m['post'] = getRegexParsed(regexs, m['post'], cookieJar, recursiveCall=True, cachedPages=cachedPages) # print 'post is now',m['post'] if 'rawpost' in m and '$doregex' in m['rawpost']: m['rawpost'] = getRegexParsed(regexs, m['rawpost'], cookieJar, recursiveCall=True, cachedPages=cachedPages, rawPost=True) # print 'rawpost is now',m['rawpost'] if 'rawpost' in m and '$epoctime$' in m['rawpost']: m['rawpost'] = m['rawpost'].replace('$epoctime$', getEpocTime()) if 'rawpost' in m and '$epoctime2$' in m['rawpost']: m['rawpost'] = m['rawpost'].replace('$epoctime2$', getEpocTime2()) link = '' if m['page'] and m[ 'page'] in cachedPages and not 'ignorecache' in m and forCookieJarOnly is False: # print 'using cache page',m['page'] link = cachedPages[m['page']] else: if m['page'] and not m['page'] == '' and m['page'].startswith( 'http'): if '$epoctime$' in m['page']: m['page'] = m['page'].replace('$epoctime$', getEpocTime()) if '$epoctime2$' in m['page']: m['page'] = m['page'].replace('$epoctime2$', getEpocTime2()) # print 'Ingoring Cache',m['page'] page_split = m['page'].split('|') pageUrl = page_split[0] header_in_page = None if len(page_split) > 1: header_in_page = page_split[1] # if # proxy = ProxyHandler({ ('https' ? proxytouse[:5]=="https":"http") : proxytouse}) # opener = build_opener(proxy) # install_opener(opener) # print 'getproxies',getproxies() current_proxies = ProxyHandler(getproxies()) # print 'getting pageUrl',pageUrl req = Request(pageUrl) if 'proxy' in m: proxytouse = m['proxy'] # print 'proxytouse',proxytouse # getproxies= lambda: {} if pageUrl[:5] == "https": proxy = ProxyHandler({'https': proxytouse}) # req.set_proxy(proxytouse, 'https') else: proxy = ProxyHandler({'http': proxytouse}) # req.set_proxy(proxytouse, 'http') opener = build_opener(proxy) install_opener(opener) req.add_header( 'User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:14.0) Gecko/20100101 Firefox/14.0.1' ) proxytouse = None if 'referer' in m: req.add_header('Referer', m['referer']) if 'accept' in m: req.add_header('Accept', m['accept']) if 'agent' in m: req.add_header('User-agent', m['agent']) if 'x-req' in m: req.add_header('X-Requested-With', m['x-req']) if 'x-addr' in m: req.add_header('x-addr', m['x-addr']) if 'x-forward' in m: req.add_header('X-Forwarded-For', m['x-forward']) if 'setcookie' in m: # print 'adding cookie',m['setcookie'] req.add_header('Cookie', m['setcookie']) if 'appendcookie' in m: # print 'appending cookie to cookiejar',m['appendcookie'] cookiestoApend = m['appendcookie'] cookiestoApend = cookiestoApend.split(';') for h in cookiestoApend: n, v = h.split('=') w, n = n.split(':') ck = cookielib.Cookie(version=0, name=n, value=v, port=None, port_specified=False, domain=w, domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) cookieJar.set_cookie(ck) if 'origin' in m: req.add_header('Origin', m['origin']) if header_in_page: header_in_page = header_in_page.split('&') for h in header_in_page: n, v = h.split('=') req.add_header(n, v) if cookieJar is not None: # print 'cookieJarVal',cookieJar cookie_handler = HTTPCookieProcessor(cookieJar) opener = build_opener(cookie_handler, HTTPBasicAuthHandler(), HTTPHandler()) opener = install_opener(opener) # print 'noredirect','noredirect' in m if 'noredirect' in m: opener = build_opener(cookie_handler, NoRedirection, HTTPBasicAuthHandler(), HTTPHandler()) opener = install_opener(opener) elif 'noredirect' in m: opener = build_opener(NoRedirection, HTTPBasicAuthHandler(), HTTPHandler()) opener = install_opener(opener) if 'connection' in m: # print '..........................connection//////.',m['connection'] from keepalive import HTTPHandler keepalive_handler = HTTPHandler() opener = build_opener(keepalive_handler) install_opener(opener) # print 'after cookie jar' post = None if 'post' in m: postData = m['post'] # if '$LiveStreamRecaptcha' in postData: # (captcha_challenge,catpcha_word,idfield)=processRecaptcha(m['page'],cookieJar) # if captcha_challenge: # postData=postData.replace('$LiveStreamRecaptcha','manual_recaptcha_challenge_field:'+captcha_challenge+',recaptcha_response_field:'+catpcha_word+',id:'+idfield) splitpost = postData.split(',') post = {} for p in splitpost: n = p.split(':')[0] v = p.split(':')[1] post[n] = v post = urlencode(post) if 'rawpost' in m: post = m['rawpost'] # if '$LiveStreamRecaptcha' in post: # (captcha_challenge,catpcha_word,idfield)=processRecaptcha(m['page'],cookieJar) # if captcha_challenge: # post=post.replace('$LiveStreamRecaptcha','&manual_recaptcha_challenge_field='+captcha_challenge+'&recaptcha_response_field='+catpcha_word+'&id='+idfield) link = '' try: if post: response = urlopen(req, post) else: response = urlopen(req) if response.info().get('Content-Encoding') == 'gzip': from StringIO import StringIO import gzip buf = StringIO(response.read()) f = gzip.GzipFile(fileobj=buf) link = f.read() else: link = response.read() if 'proxy' in m and not current_proxies is None: install_opener(build_opener(current_proxies)) link = javascriptUnEscape(link) # print repr(link) # print link This just print whole webpage in LOG if 'includeheaders' in m: # link+=str(response.headers.get('Set-Cookie')) link += '$$HEADERS_START$$:' for b in response.headers: link += b + ':' + response.headers.get( b) + '\n' link += '$$HEADERS_END$$:' # print link response.close() except: pass cachedPages[m['page']] = link # print link # print 'store link for',m['page'],forCookieJarOnly if forCookieJarOnly: return cookieJar # do nothing elif m['page'] and not m['page'].startswith('http'): if m['page'].startswith('$pyFunction:'): val = doEval(m['page'].split('$pyFunction:')[1], '', cookieJar, m) if forCookieJarOnly: return cookieJar # do nothing link = val link = javascriptUnEscape(link) else: link = m['page'] if '$doregex' in m['expres']: m['expres'] = getRegexParsed(regexs, m['expres'], cookieJar, recursiveCall=True, cachedPages=cachedPages) if not m['expres'] == '': # print 'doing it ',m['expres'] if '$LiveStreamCaptcha' in m['expres']: val = askCaptcha(m, link, cookieJar) # print 'url and val',url,val url = url.replace("$doregex[" + k + "]", val) elif m['expres'].startswith( '$pyFunction:') or '#$pyFunction' in m['expres']: # print 'expeeeeeeeeeeeeeeeeeee',m['expres'] val = '' if m['expres'].startswith('$pyFunction:'): val = doEval(m['expres'].split('$pyFunction:')[1], link, cookieJar, m) else: val = doEvalFunction(m['expres'], link, cookieJar, m) if 'ActivateWindow' in m['expres']: return if forCookieJarOnly: return cookieJar # do nothing if 'listrepeat' in m: listrepeat = m['listrepeat'] return listrepeat, eval(val), m, regexs, cookieJar try: url = url.replace(u"$doregex[" + k + "]", val) except: url = url.replace("$doregex[" + k + "]", val.decode("utf-8")) else: if 'listrepeat' in m: listrepeat = m['listrepeat'] ret = re.findall(m['expres'], link) return listrepeat, ret, m, regexs val = '' if not link == '': # print 'link',link reg = re.compile(m['expres']).search(link) try: val = reg.group(1).strip() except: traceback.print_exc() elif m['page'] == '' or m['page'] is None: val = m['expres'] if rawPost: # print 'rawpost' val = quote_plus(val) if 'htmlunescape' in m: # val=unquote_plus(val) try: from HTMLParser import HTMLParser except ImportError: from html.parser import HTMLParser val = HTMLParser().unescape(val) try: url = url.replace("$doregex[" + k + "]", val) except: url = url.replace("$doregex[" + k + "]", val.decode("utf-8")) # print 'ur',url # return val else: url = url.replace("$doregex[" + k + "]", '') if '$epoctime$' in url: url = url.replace('$epoctime$', getEpocTime()) if '$epoctime2$' in url: url = url.replace('$epoctime2$', getEpocTime2()) if '$GUID$' in url: import uuid url = url.replace('$GUID$', str(uuid.uuid1()).upper()) if '$get_cookies$' in url: url = url.replace('$get_cookies$', getCookiesString(cookieJar)) if recursiveCall: return url # print 'final url',repr(url) if url == "": return else: return url, setresolved
USERNAME = os.environ['DTUSER'] PASSWORD = os.environ['DTPASS'] xsl_filename = "report.xsl" feed_url = "http://" + DTSERVER + "/rest/management/reports/create/" + DASHBOARD + "?type=XML&format=XML+Export&filter=tf:" + TIMEFRAME abspath = os.path.abspath(__file__) dname = os.path.dirname(abspath) os.chdir(dname) # Set up a HTTPS request with username/password authentication try: # create a password manager password_mgr = HTTPPasswordMgrWithDefaultRealm() # Add the username and password. password_mgr.add_password(None, feed_url, USERNAME, PASSWORD) opener = build_opener(HTTPBasicAuthHandler(password_mgr)) file = opener.open(feed_url) except URLError, e: print 'URLError: "%s"' % e raise appdir = os.path.dirname(os.path.dirname(__file__)) xsl_file = os.path.join(appdir, "bin", xsl_filename) out_dir = os.path.join(appdir, "log") #print >> sys.stderr, "The XSL File", xsl_file dom = ET.parse(file) #print >> sys.stderr, "The Feed file" , ET.tostring(dom, pretty_print=True)
def processEpisode(dir_to_process, org_NZB_name=None, status=None): # Default values host = "localhost" port = "8081" username = "" password = "" ssl = 0 web_root = "/" default_url = host + ":" + port + web_root if ssl: default_url = "https://" + default_url else: default_url = "http://" + default_url # Get values from config_file config = RawConfigParser() config_filename = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessTV.cfg") if not os.path.isfile(config_filename): print ("ERROR: " + config_filename + " doesn\'t exist") print ("copy /rename " + config_filename + ".sample and edit\n") print ("Trying default url: " + default_url + "\n") else: try: print ("Loading config from " + config_filename + "\n") with io.open(config_filename, "r") as fp: config.readfp(fp) # Replace default values with config_file values host = config.get("sickrage", "host") port = config.get("sickrage", "port") username = config.get("sickrage", "username") password = config.get("sickrage", "password") try: ssl = int(config.get("sickrage", "ssl")) except (NoOptionError, ValueError): pass try: web_root = config.get("sickrage", "web_root") if not web_root.startswith("/"): web_root = "/" + web_root if not web_root.endswith("/"): web_root += "/" except NoOptionError: pass except EnvironmentError: e = sys.exc_info()[1] print ("Could not read configuration file: " + str(e)) # There was a config_file, don't use default values but exit sys.exit(1) params = {'quiet': 1, 'dir': dir_to_process} if org_NZB_name is not None: params['nzbName'] = org_NZB_name if status is not None: params['failed'] = status if ssl: protocol = "https://" else: protocol = "http://" url = protocol + host + ":" + port + web_root + "home/postprocess/processEpisode?" + urlencode(params) print ("Opening URL: " + url) try: password_mgr = HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, url, username, password) handler = HTTPBasicAuthHandler(password_mgr) opener = build_opener(handler) install_opener(opener) result = opener.open(url).readlines() for line in result: if line: print (line.strip()) except IOError: e = sys.exc_info()[1] print ("Unable to open URL: " + str(e)) sys.exit(1)
def request(self, host, handler, request_body, verbose=0): self.verbose = verbose if self.proxy_enabled: if self.proxy_server.startswith('http://'): proxy_server = self.proxy_server[7:] else: proxy_server = self.proxy_server if useAuthOnProxy: self.proxyurl = 'http://{0}:{1}@{2}'.format( self.proxy_user, self.proxy_pass, proxy_server) else: self.proxyurl = proxy_server else: self.proxyurl = None puser_pass = None if self.proxyurl is not None: type, r_type = splittype(self.proxyurl) phost, XXX = splithost(r_type) if '@' in phost: user_pass, phost = phost.split('@', 1) if ':' in user_pass: self.proxy_user, self.proxy_pass = user_pass.split(':', 1) puser_pass = '******' % (unquote( self.proxy_user), unquote(self.proxy_pass)) # puser_pass = base64.encodestring( # '%s:%s' % (unquote(self.proxy_user), unquote(self.proxy_pass))).strip() proxies = {'http': 'http://%s' % phost, 'https': None} host = unquote(host) address = "http://%s%s" % (host, handler) request = Request(address, request_body) # request.add_data(request_body) request.add_header('User-agent', self.user_agent) request.add_header("Content-Type", "text/xml") # HTTP Auth password_mgr = HTTPPasswordMgrWithDefaultRealm() top_level_url = serverTopLevelURL password_mgr.add_password(None, top_level_url, httpAuthName, httpAuthPassword) handler = HTTPBasicAuthHandler(password_mgr) # Cookies cj = CookieJar() if puser_pass: # NTLM passman = HTTPPasswordMgrWithDefaultRealm() passman.add_password(None, serverTopLevelURL, self.proxy_user, self.proxy_pass) authNTLM = HTTPNtlmAuthHandler.HTTPNtlmAuthHandler(passman) request.add_header('Proxy-authorization', 'Basic ' + puser_pass) proxy_support = ProxyHandler(proxies) opener = build_opener(handler, proxy_support, HTTPCookieProcessor(cj), authNTLM) elif self.proxyurl: # Proxy without auth proxy_support = ProxyHandler(proxies) opener = build_opener(proxy_support, handler, HTTPCookieProcessor(cj)) else: # Direct connection proxy_support = ProxyHandler({}) opener = build_opener(proxy_support, handler, HTTPCookieProcessor(cj)) install_opener(opener) response = urlopen(request, timeout=env_server.get_timeout()) return self.parse_response(response)
def _post_url(self, event, url=None): "Posts a URL to delicious.com" title = self._get_title(url) con_re = re.compile(r'!n=|!') connection_body = con_re.split(event.sender['connection']) if len(connection_body) == 1: connection_body.append(event.sender['connection']) ip_re = re.compile(r'\.IP$|unaffiliated') if ip_re.search(connection_body[1]) != None: connection_body[1] = '' if ibid.sources[event.source].type == 'jabber': obfusc_conn = '' obfusc_chan = event.channel.replace('@', '^') else: at_re = re.compile(r'@\S+?\.') obfusc_conn = at_re.sub('^', connection_body[1]) obfusc_chan = at_re.sub('^', event.channel) tags = u' '.join( (event.sender['nick'], obfusc_conn, obfusc_chan, event.source)) data = { 'url': url.encode('utf-8'), 'description': title.encode('utf-8'), 'tags': tags.encode('utf-8'), 'replace': 'yes', 'dt': event.time.strftime('%Y-%m-%dT%H:%M:%SZ'), 'extended': event.message['raw'].encode('utf-8'), } if self.service.lower() == 'delicious': service = ('del.icio.us API', 'https://api.del.icio.us') elif self.service.lower() == 'faves': service = ('Faves', 'https://secure.faves.com') else: log.error(u'Unknown social bookmarking service: %s', self.service) return auth_handler = HTTPBasicAuthHandler() auth_handler.add_password(service[0], service[1], self.username, self.password) opener = build_opener(auth_handler) posturl = service[1] + '/v1/posts/add?' + urlencode(data) try: resp = opener.open(posturl).read() if 'done' in resp: log.debug( u"Posted url '%s' to %s, posted in %s on %s " u"by %s/%i (%s)", url, self.service, event.channel, event.source, event.account, event.identity, event.sender['connection']) else: log.error(u"Error posting url '%s' to %s: %s", url, self.service, resp) except HTTPError, e: if e.code == 401: log.error(u"Incorrect password for %s, couldn't post", self.service)
def OP_RETURN_bitcoin_cmd(command, testnet, *args): # more params are read from here if OP_RETURN_BITCOIN_USE_CMD: sub_args = [OP_RETURN_BITCOIN_PATH] if testnet: sub_args.append('-testnet') sub_args.append(command) for arg in args: sub_args.append( json.dumps(arg) if isinstance(arg, (dict, list, tuple)) else str(arg)) raw_result = subprocess.check_output(sub_args).decode("utf-8").rstrip( "\n") try: # decode JSON if possible result = json.loads(raw_result) except ValueError: result = raw_result else: request = { 'id': str(time.time()) + '-' + str(random.randint(100000, 999999)), 'method': command, 'params': args, } port = OP_RETURN_BITCOIN_PORT user = OP_RETURN_BITCOIN_USER password = OP_RETURN_BITCOIN_PASSWORD if not (len(port) and len(user) and len(password)): conf_lines = open( os.path.expanduser('~') + '/.bitcoin/bitcoin.conf').readlines() for conf_line in conf_lines: parts = conf_line.strip().split('=', 1) # up to 2 parts if (parts[0] == 'rpcport') and not len(port): port = int(parts[1]) if (parts[0] == 'rpcuser') and not len(user): user = parts[1] if (parts[0] == 'rpcpassword') and not len(password): password = parts[1] if not len(port): port = 18332 if testnet else 8332 if not (len(user) and len(password)): return None # no point trying in this case url = 'http://' + OP_RETURN_BITCOIN_IP + ':' + str(port) + '/' try: from urllib2 import HTTPPasswordMgrWithDefaultRealm, \ HTTPBasicAuthHandler, build_opener, install_opener, urlopen except ImportError: from urllib.request import HTTPPasswordMgrWithDefaultRealm, \ HTTPBasicAuthHandler, build_opener, install_opener, urlopen passman = HTTPPasswordMgrWithDefaultRealm() passman.add_password(None, url, user, password) auth_handler = HTTPBasicAuthHandler(passman) opener = build_opener(auth_handler) install_opener(opener) raw_result = urlopen(url, json.dumps(request).encode('utf-8'), OP_RETURN_NET_TIMEOUT).read() result_array = json.loads(raw_result.decode('utf-8')) result = result_array['result'] return result
def open_url(url, config, data=None, handlers=None): """Attempts to open a connection to a specified URL. @param url: URL to attempt to open @param config: SSL context configuration @type config: Configuration @param data: HTTP POST data @type data: str @param handlers: list of custom urllib2 handlers to add to the request @type handlers: iterable @return: tuple ( returned HTTP status code or 0 if an error occurred returned message or error description response object) """ debuglevel = 1 if config.debug else 0 # Set up handlers for URL opener. if config.cookie: cj = config.cookie else: cj = cookielib.CookieJar() # Use a cookie processor that accumulates cookies when redirects occur so # that an application can redirect for authentication and retain both any # cookies for the application and the security system (c.f., # urllib2.HTTPCookieProcessor which replaces cookies). cookie_handler = AccumulatingHTTPCookieProcessor(cj) if not handlers: handlers = [] handlers.append(cookie_handler) if config.debug: http_handler = HTTPHandler(debuglevel=debuglevel) https_handler = HTTPSContextHandler(config.ssl_context, debuglevel=debuglevel) handlers.extend([http_handler, https_handler]) if config.http_basicauth: # currently only supports http basic auth auth_handler = HTTPBasicAuthHandler(HTTPPasswordMgrWithDefaultRealm()) auth_handler.add_password(realm=None, uri=url, user=config.httpauth[0], passwd=config.httpauth[1]) handlers.append(auth_handler) # Explicitly remove proxy handling if the host is one listed in the value of # the no_proxy environment variable because urllib2 does use proxy settings # set via http_proxy and https_proxy, but does not take the no_proxy value # into account. if not _should_use_proxy(url, config.no_proxy): handlers.append(urllib2.ProxyHandler({})) log.debug("Not using proxy") elif config.proxies: handlers.append(urllib2.ProxyHandler(config.proxies)) log.debug("Configuring proxies: %s" % config.proxies) opener = build_opener(*handlers, ssl_context=config.ssl_context) headers = config.headers if headers is None: headers = {} request = urllib2.Request(url, data, headers) # Open the URL and check the response. return_code = 0 return_message = '' response = None try: response = opener.open(request) return_message = response.msg return_code = response.code if log.isEnabledFor(logging.DEBUG): for index, cookie in enumerate(cj): log.debug("%s : %s", index, cookie) except urllib2.HTTPError, exc: return_code = exc.code return_message = "Error: %s" % exc.msg if log.isEnabledFor(logging.DEBUG): log.debug("%s %s", exc.code, exc.msg)