def bootOE(net): "Start the LINC optical emulator within a mininet instance" LINCSwitch.opticalJSON = {} linkConfig = [] devices = [] #setting up the controllers for LINCSwitch class LINCSwitch.controllers = net.controllers for switch in net.switches: if isinstance(switch, OpticalSwitch): devices.append(switch.json()) else: devices.append(LINCSwitch.switchJSON(switch)) LINCSwitch.opticalJSON[ 'devices' ] = devices for link in net.links: if isinstance(link, LINCLink) : linkConfig.append(link.json()) LINCSwitch.opticalJSON[ 'links' ] = linkConfig info('*** Writing Topology.json file\n') topoJSON = LINCSwitch.makeTopoJSON() with open('Topology.json', 'w') as outfile: json.dump(topoJSON, outfile, indent=4, separators=(',', ': ')) info('*** Converting Topology.json to linc-oe format (TopoConfig.json) file (no oecfg) \n') topoConfigJson = {} dpIdToName = {} topoConfigJson["switchConfig"] = LINCSwitch.getSwitchConfig(dpIdToName) topoConfigJson["linkConfig"] = LINCSwitch.getLinkConfig(dpIdToName) #Writing to TopoConfig.json with open( 'TopoConfig.json', 'w' ) as outfile: json.dump( topoConfigJson, outfile, indent=4, separators=(',', ': ') ) info('*** Creating sys.config...\n') output = quietRun('%s/config_generator TopoConfig.json %s/sys.config.template %s %s' % (LINCSwitch.configGen, LINCSwitch.configGen, LINCSwitch.controllers[ 0 ].ip, LINCSwitch.controllers[ 0 ].port), shell=True) if output: error('***ERROR: Error creating sys.config file: %s\n' % output) return False info ('*** Setting multiple controllers in sys.config...\n') searchStr = '\[{"Switch.*$' ctrlStr = '' for index in range(len(LINCSwitch.controllers)): ctrlStr += '{"Switch%d-Controller","%s",%d,tcp},' % (index, net.controllers[index].ip, net.controllers[index].port) replaceStr = '[%s]},' % ctrlStr[:-1] # Cut off last comma sedCmd = 'sed -i \'s/%s/%s/\' sys.config' % (searchStr, replaceStr) output = quietRun(sedCmd, shell=True) info('*** Copying sys.config to linc-oe directory: ', output + '\n') output = quietRun('cp -v sys.config %s/rel/linc/releases/1.0/' % LINCSwitch.lincDir, shell=True).strip('\n') info(output + '\n') info('*** Adding taps and bringing them up...\n') LINCSwitch.setupInts(LINCSwitch.getTaps()) info('*** removing pipes if any \n') quietRun('rm /tmp/home/%s/linc-oe/rel/linc/*' % LINCSwitch.user, shell=True) info('*** Starting linc OE...\n') output = quietRun('%s/rel/linc/bin/linc start' % LINCSwitch.lincDir, shell=True) if output: error('***ERROR: LINC-OE: %s' % output + '\n') quietRun('%s/rel/linc/bin/linc stop' % LINCSwitch.lincDir, shell=True) return False info('*** Waiting for linc-oe to start...\n') LINCSwitch.waitStarted(net) info('*** Adding cross-connect (tap) interfaces to packet switches...\n') for link in net.links: if isinstance(link, LINCLink): if link.annotations[ 'optical.type' ] == 'cross-connect': for intf in [ link.intf1, link.intf2 ]: if not isinstance(intf, LINCIntf): intfList = [ intf.link.intf1, intf.link.intf2 ] intfList.remove(intf) intf2 = intfList[ 0 ] intf.node.attach(LINCSwitch.findTap(intf2.node, intf2.node.ports[ intf2 ])) info('*** Waiting for all devices to be available in ONOS...\n') url = 'http://%s:8181/onos/v1/devices' % LINCSwitch.controllers[0].ip time = 0 # Set up password authentication pw_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() pw_mgr.add_password(None, url, LINCSwitch.restUser, LINCSwitch.restPass) handler = urllib2.HTTPBasicAuthHandler(pw_mgr) opener = urllib2.build_opener(handler) opener.open(url) urllib2.install_opener(opener) while True: response = json.load(urllib2.urlopen(url)) devs = response.get('devices') # Wait for all devices to be registered if (len(devices) != len(devs)): continue # Wait for all devices to available available = True for d in devs: available &= d['available'] if available: break if (time >= TIMEOUT): error('***ERROR: ONOS did not register devices within %s seconds\n' % TIMEOUT) break time += SLEEP_TIME sleep(SLEEP_TIME) info('*** Pushing Topology.json to ONOS\n') for index in range(len(LINCSwitch.controllers)): output = quietRun('%s/tools/test/bin/onos-topo-cfg %s Topology.json network/configuration/ &'\ % (LINCSwitch.onosDir, LINCSwitch.controllers[ index ].ip), shell=True) # successful output contains the two characters '{}' # if there is more output than this, there is an issue if output.strip('{}'): warn('***WARNING: Could not push topology file to ONOS: %s\n' % output)
def _create_basic_auth_handler(self): password_man = urllib2.HTTPPasswordMgrWithDefaultRealm() password_man.add_password(None, self._get_url(), self._username, self._password) auth_handler = urllib2.HTTPBasicAuthHandler(password_man) return auth_handler
def call(self): user = self.user pwd = self.pwd portalUrl = self.portalUrl serverAdminUrl = self.serverAdminUrl folderList = [self.folder] headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"} token = self.getToken() if token is None: raise ValueError('Unable fetch token, token value is None!') else: serverAdminServicesUrl = serverAdminUrl + "/services" print token for folder in folderList: passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm() passmgr.add_password(None, serverAdminUrl, user, pwd) authNTLM = HTTPNtlmAuthHandler.HTTPNtlmAuthHandler(passmgr) opener = urllib2.build_opener(authNTLM) urllib2.install_opener(opener) try: catalog = json.load(urllib2.urlopen( serverAdminServicesUrl + "/" + "?f=json&token=" + token)) print "Root" if "error" in catalog: return folders = catalog['folders'] for folder in folders: print folder if "error" in catalog: return if folder == "printtool": catalog = json.load(urllib2.urlopen( serverAdminServicesUrl + "/" + folder + "?f=json&token=" + token)) if "error" in catalog: return services = catalog["services"] for service in services: stop = "STOP" start = "START" params = urllib.urlencode( {'token': token, 'f': 'json'}) gpServiceUrl = serverAdminServicesUrl + "/" + folder + \ "/" + service["serviceName"] + \ "." + service['type'] req = urllib2.Request( gpServiceUrl + "/" + stop, params, headers=headers) response = urllib2.urlopen(req) if response.code == 200: print "Start : %s" % time.ctime() time.sleep(5) print "End : %s" % time.ctime() req = urllib2.Request( gpServiceUrl + "/" + start, params, headers=headers) response = urllib2.urlopen(req) if response.code != 200: raise IOError('Unable to perform POST request to start the {0} services'.format( service["serviceName"])) else: raise IOError('Unable to perform POST request to stop the {0} services!'.format( service["serviceName"])) except Exception as e: print e.args self.sendEmail(e.args)
def request_registration(self, repository): """ Send out a registration request to a peer repository @param repository: a sync_repository row """ db = current.db xml = current.manager.xml if not repository.url: return True _debug("S3Sync.request_registration(%s)" % (repository.url)) # Construct the URL config = self.__get_config() proxy = repository.proxy or config.proxy or None url = "%s/sync/repository/register.xml?repository=%s" % \ (repository.url, config.uuid) _debug("...send to URL %s" % url) username = repository.username password = repository.password # Generate the request import urllib2 req = urllib2.Request(url=url) handlers = [] if proxy: proxy_handler = urllib2.ProxyHandler({"http": proxy}) handlers.append(proxy_handler) if username and password: import base64 base64string = base64.encodestring('%s:%s' % (username, password))[:-1] req.add_header("Authorization", "Basic %s" % base64string) passwd_manager = urllib2.HTTPPasswordMgrWithDefaultRealm() passwd_manager.add_password(realm=None, uri=url, user=username, passwd=password) auth_handler = urllib2.HTTPBasicAuthHandler(passwd_manager) handlers.append(auth_handler) if handlers: opener = urllib2.build_opener(*handlers) urllib2.install_opener(opener) # Execute the request success = True remote = False try: f = urllib2.urlopen(req) except urllib2.HTTPError, e: result = self.log.FATAL remote = True # Peer error code = e.code message = e.read() success = False try: message_json = json.loads(message) message = message_json.get("message", message) except: pass
def _get_opener(self, server_address): passman = urllib2.HTTPPasswordMgrWithDefaultRealm() passman.add_password(None, server_address, self.username.get_text(), hashlib.md5(self.password.get_text()).hexdigest()) authhandler = urllib2.HTTPBasicAuthHandler(passman) return urllib2.build_opener(authhandler)
import cookielib import sys import config import os path=os.path.abspath(os.curdir) survey = path + "/XML/codec_call_survey.xml" #storing the Authentication token in a file in the OS vs. leaving in script username=config.codec_username password=config.codec_password f = open(survey,'r') string = f.read() location=str(sys.argv[1]) url = 'http://' + location + '/putxml' param_data = string #params = urllib.urlencode(param_data) passman = urllib2.HTTPPasswordMgrWithDefaultRealm() passman.add_password(None, url, username, password) authhandler = urllib2.HTTPBasicAuthHandler(passman) opener = urllib2.build_opener(authhandler) opener.addheaders = [('Content-Type', 'text/xml')] urllib2.install_opener(opener) urllib2.urlopen(url, param_data)
def __pull(self, repository, task): """ Outgoing pull @param repository: the repository (sync_repository row) @param task: the task (sync_task row) """ ignore_errors = True manager = current.manager xml = manager.xml resource_name = task.resource_name prefix, name = resource_name.split("_", 1) _debug("S3Sync.__pull(%s, %s)" % (repository.url, resource_name)) # Construct the URL config = self.__get_config() proxy = repository.proxy or config.proxy or None url = "%s/sync/sync.xml?resource=%s&repository=%s" % \ (repository.url, resource_name, config.uuid) username = repository.username password = repository.password last_sync = task.last_sync # Get the target resource for this task resource = manager.define_resource(prefix, name) # Add msince and deleted to the URL if last_sync and task.update_policy not in ("THIS", "OTHER"): url += "&msince=%s" % xml.encode_iso_datetime(last_sync) url += "&include_deleted=True" _debug("...pull from URL %s" % url) response = None url_split = url.split("://", 1) if len(url_split) == 2: protocol, path = url_split else: protocol, path = "http", None # Prepare the request import urllib2 req = urllib2.Request(url=url) handlers = [] # Proxy handling if proxy: _debug("using proxy=%s" % proxy) proxy_handler = urllib2.ProxyHandler({protocol: proxy}) handlers.append(proxy_handler) # Authentication handling if username and password: # Send auth data unsolicitedly (the only way with Eden instances): import base64 base64string = base64.encodestring('%s:%s' % (username, password))[:-1] req.add_header("Authorization", "Basic %s" % base64string) # Just in case the peer does not accept that, add a 401 handler: passwd_manager = urllib2.HTTPPasswordMgrWithDefaultRealm() passwd_manager.add_password(realm=None, uri=url, user=username, passwd=password) auth_handler = urllib2.HTTPBasicAuthHandler(passwd_manager) handlers.append(auth_handler) # Install all handlers if handlers: opener = urllib2.build_opener(*handlers) urllib2.install_opener(opener) remote = False output = None response = None # Execute the request try: f = urllib2.urlopen(req) except urllib2.HTTPError, e: result = self.log.ERROR remote = True # Peer error code = e.code message = e.read() try: # Sahana-Eden would send a JSON message, # try to extract the actual error message: message_json = json.loads(message) message = message_json.get("message", message) except: pass # Prefix as peer error and strip XML markup from the message # @todo: better method to do this? message = "<message>%s</message>" % message try: markup = etree.XML(message) message = markup.xpath(".//text()") if message: message = " ".join(message) else: message = "" except etree.XMLSyntaxError: pass output = xml.json_message(False, code, message, tree=None)
class Beaker: """Runtime support for Python code in Beaker.""" session_id = '' core_url = '127.0.0.1:' + os.environ['beaker_core_port'] _beaker_password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() _beaker_password_mgr.add_password(None, core_url, 'beaker', os.environ['beaker_core_password']) _beaker_url_opener = urllib2.build_opener( urllib2.HTTPBasicAuthHandler(_beaker_password_mgr), urllib2.ProxyHandler({})) def set4(self, var, val, unset, sync): args = {'name': var, 'session': self.session_id, 'sync': sync} if not unset: val = transform(val) args['value'] = json.dumps(val, cls=DataFrameEncoder) req = urllib2.Request( 'http://' + self.core_url + '/rest/namespace/set', urllib.urlencode(args)) try: conn = self._beaker_url_opener.open(req) except Exception: raise NameError("Server error, likely memory exceeded") reply = conn.read() if reply != 'ok': raise NameError(reply) def get(self, var): result = self._getRaw(var) if not result['defined']: raise NameError('name \'' + var + '\' is not defined in notebook namespace') return transformBack(result['value']) def _getRaw(self, var): req = urllib2.Request('http://' + self.core_url + '/rest/namespace/get?' + urllib.urlencode({ 'name': var, 'session': self.session_id })) try: conn = self._beaker_url_opener.open(req) except Exception: raise NameError( "Server error, the 16MB autotranslation limit has been exceeded" ) result = json.loads(conn.read()) return result def set_session(self, id): self.session_id = id def register_output(self): ip = IPython.InteractiveShell.instance() ip.display_formatter.formatters['application/json'] = MyJSONFormatter( parent=ip.display_formatter) def set(self, var, val): return self.set4(var, val, False, True) def unset(self, var): return self.set4(var, None, True, True) def isDefined(self, var): return self._getRaw(var)['defined'] def createOutputContainer(self): return OutputContainer() def showProgressUpdate(self): return "WARNING: python language plugin does not support progress updates" def evaluate(self, filter): args = {'filter': filter, 'session': self.session_id} req = urllib2.Request( 'http://' + self.core_url + '/rest/notebookctrl/evaluate', urllib.urlencode(args)) conn = self._beaker_url_opener.open(req) result = json.loads(conn.read()) return transformBack(result) def evaluateCode(self, evaluator, code): args = { 'evaluator': evaluator, 'code': code, 'session': self.session_id } req = urllib2.Request( 'http://' + self.core_url + '/rest/notebookctrl/evaluateCode', urllib.urlencode(args)) conn = self._beaker_url_opener.open(req) result = json.loads(conn.read()) return transformBack(result) def showStatus(self, msg): args = {'msg': msg, 'session': self.session_id} req = urllib2.Request( 'http://' + self.core_url + '/rest/notebookctrl/showStatus', urllib.urlencode(args)) conn = self._beaker_url_opener.open(req) result = conn.read() return result == "true" def clearStatus(self, msg): args = {'msg': msg, 'session': self.session_id} req = urllib2.Request( 'http://' + self.core_url + '/rest/notebookctrl/clearStatus', urllib.urlencode(args)) conn = self._beaker_url_opener.open(req) result = conn.read() return result == "true" def showTransientStatus(self, msg): args = {'msg': msg, 'session': self.session_id} req = urllib2.Request( 'http://' + self.core_url + '/rest/notebookctrl/showTransientStatus', urllib.urlencode(args)) conn = self._beaker_url_opener.open(req) result = conn.read() return result == "true" def getEvaluators(self): req = urllib2.Request('http://' + self.core_url + '/rest/notebookctrl/getEvaluators?' + urllib.urlencode({'session': self.session_id})) conn = self._beaker_url_opener.open(req) result = json.loads(conn.read()) return transformBack(result) def getVersion(self): req = urllib2.Request('http://' + self.core_url + '/rest/util/version?' + urllib.urlencode({'session': self.session_id})) conn = self._beaker_url_opener.open(req) return transformBack(conn.read().decode()) def getVersionNumber(self): req = urllib2.Request('http://' + self.core_url + '/rest/util/getVersionInfo?' + urllib.urlencode({'session': self.session_id})) conn = self._beaker_url_opener.open(req) result = json.loads(conn.read().decode()) return transformBack(result['version']) def getCodeCells(self, filter): req = urllib2.Request('http://' + self.core_url + '/rest/notebookctrl/getCodeCells?' + urllib.urlencode({ 'session': self.session_id, 'filter': filter })) conn = self._beaker_url_opener.open(req) result = json.loads(conn.read()) return transformBack(result) def setCodeCellBody(self, name, body): args = {'name': name, 'body': body, 'session': self.session_id} req = urllib2.Request( 'http://' + self.core_url + '/rest/notebookctrl/setCodeCellBody', urllib.urlencode(args)) conn = self._beaker_url_opener.open(req) result = conn.read() return result == "true" def setCodeCellEvaluator(self, name, evaluator): args = { 'name': name, 'evaluator': evaluator, 'session': self.session_id } req = urllib2.Request( 'http://' + self.core_url + '/rest/notebookctrl/setCodeCellEvaluator', urllib.urlencode(args)) conn = self._beaker_url_opener.open(req) result = conn.read() return result == "true" def setCodeCellTags(self, name, tags): args = {'name': name, 'tags': tags, 'session': self.session_id} req = urllib2.Request( 'http://' + self.core_url + '/rest/notebookctrl/setCodeCellTags', urllib.urlencode(args)) conn = self._beaker_url_opener.open(req) result = conn.read() return result == "true" def __setattr__(self, name, value): if 'session_id' == name: self.__dict__['session_id'] = value return return self.set(name, value) def __getattr__(self, name): return self.get(name) def __contains__(self, name): return self.isDefined(name) def __delattr__(self, name): return self.unset(name)
def _make_opener(self): mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() mgr.add_password(None, self._uri, self.user, self.passwd) handler = urllib2.HTTPBasicAuthHandler(mgr) self.opener = urllib2.build_opener(handler)
def fetch(url, cred, authdata, postlines, xc, tc, dest, nets, chans, timeout, retry_count, retry_wait, finished, lock, verbose): try: url_handlers = [] if cred and url.post_qa() in cred: # use static credentials query_url = url.post_qa() (user, passwd) = cred[query_url] mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() mgr.add_password(None, query_url, user, passwd) h = urllib2.HTTPDigestAuthHandler(mgr) url_handlers.append(h) elif authdata: # use the pgp-based auth method if supported wadl_url = url.wadl() auth_url = url.auth() query_url = url.post_qa() try: fd = retry(urllib2.urlopen, wadl_url, None, timeout, retry_count, retry_wait, verbose) try: root = ET.parse(fd).getroot() ns = "{http://wadl.dev.java.net/2009/02}" el = "resource[@path='auth']" if root.find(".//" + ns + el) is None: raise AuthNotSupported finally: fd.close() msg("authenticating at %s" % auth_url, verbose) try: fd = retry(urllib2.urlopen, auth_url, authdata, timeout, retry_count, retry_wait, verbose) try: resp = fd.read() if isinstance(resp, bytes): resp = resp.decode('utf-8') if fd.getcode() == 200: try: (user, passwd) = resp.split(':') mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() mgr.add_password(None, query_url, user, passwd) h = urllib2.HTTPDigestAuthHandler(mgr) url_handlers.append(h) except ValueError: msg("invalid auth response: %s" % resp) return msg("authentication at %s successful" % auth_url, verbose) else: msg("authentication at %s failed with HTTP status " "code %d:\n%s" % (auth_url, fd.getcode(), resp)) query_url = url.post() finally: fd.close() except urllib2.HTTPError as e: resp = e.read() if isinstance(resp, bytes): resp = resp.decode('utf-8') msg("authentication at %s failed with HTTP status " "code %d:\n%s" % (auth_url, e.code, resp)) query_url = url.post() except (urllib2.URLError, socket.error) as e: msg("authentication at %s failed: %s" % (auth_url, str(e))) query_url = url.post() except (urllib2.URLError, socket.error, ET.ParseError) as e: msg("reading %s failed: %s" % (wadl_url, str(e))) query_url = url.post() except AuthNotSupported: msg("authentication at %s is not supported" % auth_url, verbose) query_url = url.post() else: # fetch data anonymously query_url = url.post() opener = urllib2.build_opener(*url_handlers) i = 0 n = len(postlines) while i < len(postlines): if n == len(postlines): msg("getting data from %s" % query_url, verbose) else: msg( "getting data from %s (%d%%..%d%%)" % (query_url, 100 * i / len(postlines), min(100, 100 * (i + n) / len(postlines))), verbose) postdata = (''.join( (p + '=' + v + '\n') for (p, v) in url.post_params()) + ''.join(postlines[i:i + n])) if not isinstance(postdata, bytes): postdata = postdata.encode('utf-8') try: fd = retry(opener.open, query_url, postdata, timeout, retry_count, retry_wait, verbose) try: if fd.getcode() == 204: msg("received no data from %s" % query_url) elif fd.getcode() != 200: resp = fd.read() if isinstance(resp, bytes): resp = resp.decode('utf-8') msg("getting data from %s failed with HTTP status " "code %d:\n%s" % (query_url, fd.getcode(), resp)) break else: size = 0 content_type = fd.info().get('Content-Type') content_type = content_type.split(';')[0] if content_type == "application/vnd.fdsn.mseed": record_idx = 1 # NOTE: cannot use fixed chunk size, because # response from single node mixes mseed record # sizes. E.g., a 4096 byte chunk could contain 7 # 512 byte records and the first 512 bytes of a # 4096 byte record, which would not be completed # in the same write operation while True: # read fixed header buf = fd.read(FIXED_DATA_HEADER_SIZE) if not buf: break record = buf curr_size = len(buf) # get offset of data (value before last, # 2 bytes, unsigned short) data_offset_idx = FIXED_DATA_HEADER_SIZE - 4 data_offset, = struct.unpack( b'!H', buf[data_offset_idx:data_offset_idx + 2]) if data_offset >= FIXED_DATA_HEADER_SIZE: remaining_header_size = data_offset - \ FIXED_DATA_HEADER_SIZE elif data_offset == 0: # This means that blockettes can follow, # but no data samples. Use minimum record # size to read following blockettes. This # can still fail if blockette 1000 is after # position 256 remaining_header_size = \ MINIMUM_RECORD_LENGTH - \ FIXED_DATA_HEADER_SIZE else: # Full header size cannot be smaller than # fixed header size. This is an error. msg("record %s: data offset smaller than "\ "fixed header length: %s, bailing "\ "out" % (record_idx, data_offset)) break buf = fd.read(remaining_header_size) if not buf: msg("remaining header corrupt in record "\ "%s" % record_idx) break record += buf curr_size += len(buf) # scan variable header for blockette 1000 blockette_start = 0 b1000_found = False while (blockette_start < remaining_header_size): # 2 bytes, unsigned short blockette_id, = struct.unpack( b'!H', buf[blockette_start:blockette_start + 2]) # get start of next blockette (second # value, 2 bytes, unsigned short) next_blockette_start, = struct.unpack( b'!H', buf[blockette_start + 2:blockette_start + 4]) if blockette_id == \ DATA_ONLY_BLOCKETTE_NUMBER: b1000_found = True break elif next_blockette_start == 0: # no blockettes follow msg("record %s: no blockettes follow "\ "after blockette %s at pos %s" % ( record_idx, blockette_id, blockette_start)) break else: blockette_start = next_blockette_start # blockette 1000 not found if not b1000_found: msg("record %s: blockette 1000 not found,"\ " stop reading" % record_idx) break # get record size (1 byte, unsigned char) record_size_exponent_idx = blockette_start + 6 record_size_exponent, = struct.unpack( b'!B', buf[record_size_exponent_idx:\ record_size_exponent_idx+1]) remaining_record_size = \ 2**record_size_exponent - curr_size # read remainder of record (data section) buf = fd.read(remaining_record_size) if not buf: msg("cannot read data section of record "\ "%s" % record_idx) break record += buf # collect network IDs try: net = record[18:20].decode( 'ascii').rstrip() sta = record[8:13].decode('ascii').rstrip() loc = record[13:15].decode( 'ascii').rstrip() cha = record[15:18].decode( 'ascii').rstrip() except UnicodeDecodeError: msg("invalid miniseed record") break year, = struct.unpack(b'!H', record[20:22]) with lock: nets.add((net, year)) chans.add('.'.join((net, sta, loc, cha))) dest.write(record) size += len(record) record_idx += 1 elif content_type == "text/plain": # this is the station service in text format text = bytes() while True: buf = fd.readline() if not buf: break if buf.startswith(b'#'): tc.set_header(buf) else: text += buf size += len(buf) with lock: tc.combine(text) elif content_type == "application/xml": fdread = fd.read s = [0] def read(self, *args, **kwargs): buf = fdread(self, *args, **kwargs) s[0] += len(buf) return buf fd.read = read et = ET.parse(fd) size = s[0] with lock: xc.combine(et) else: msg("getting data from %s failed: unsupported " "content type '%s'" % (query_url, content_type)) break msg( "got %d bytes (%s) from %s" % (size, content_type, query_url), verbose) i += n finally: fd.close() except urllib2.HTTPError as e: if e.code == 413 and n > 1: msg("request too large for %s, splitting" % query_url, verbose) n = -(n // -2) else: resp = e.read() if isinstance(resp, bytes): resp = resp.decode('utf-8') msg("getting data from %s failed with HTTP status " "code %d:\n%s" % (query_url, e.code, resp)) break except (urllib2.URLError, socket.error, ET.ParseError) as e: msg("getting data from %s failed: %s" % (query_url, str(e))) break finally: finished.put(threading.current_thread())
def _open_url(self, url): password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, self._routes.host, '', self._api_key) handler = urllib2.HTTPBasicAuthHandler(password_mgr) opener = urllib2.build_opener(handler) return opener.open(url)
def get_url(url, host, username, password, download_files_flag): """ This function does a HTTP request of the given URL using the urllib2 python library. Returns two values: [request,response] """ global debug global verbose global auth #Vector to save time responses of each request. For now it is a global variable. global time_responses starttime = 0 endtime = 0 handler = "" try: try: starttime = time.time() url = encode_url(url) if debug: print 'Encoded URL: ' + url request = urllib2.Request(url) request.add_header( 'User-Agent', 'Mozilla/4.0 (compatible;MSIE 5.5; Windows NT 5.0)') if auth: password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm() password_manager.add_password(None, host, username, password) handler = urllib2.HTTPBasicAuthHandler(password_manager) if not download_files_flag: #First we do a head request to see the type of url we are going to crawl request.get_method = lambda: 'HEAD' if handler: opener_web = urllib2.build_opener(handler) else: opener_web = urllib2.build_opener() response = opener_web.open(request) # If it is a file, we don get the content if 'text/html' not in response.headers.typeheader: opener_web.close() endtime = time.time() time_responses.append(endtime - starttime) return [request, response] request.get_method = lambda: 'GET' if handler: opener_web = urllib2.build_opener(handler) else: opener_web = urllib2.build_opener() response = opener_web.open(request) opener_web.close() endtime = time.time() time_responses.append(endtime - starttime) return [request, response] except urllib2.HTTPError, error_code: return [request, error_code.getcode()] except urllib2.URLError, error_code: error = error_code.args[0] return [request, error[0]]
def validate_apt_sources(self, url_validation, buildtype): slist = self.create_apt_sources_list() sources_lines = slist.split('\n') urls = [] for l in sources_lines: l = re.sub(r'\[.*\] ', '', l) if l.startswith("deb copy:"): # This is a cdrom, we dont verify it pass elif l.startswith("deb-src copy:"): # This is a cdrom, we dont verify it pass elif l.startswith("deb ") or l.startswith("deb-src "): lsplit = l.split(" ") if lsplit[2].endswith('/'): s = "%s/%s" % (lsplit[1], lsplit[2]) else: s = "%s/dists/%s/" % (lsplit[1], lsplit[2]) urls.append(s + "Release") if url_validation == ValidationMode.CHECK_ALL: urls.append(s + lsplit[3] + "/source/Release") urls.append(s + lsplit[3] + "/binary-%s/Release" % buildtype) elif url_validation == ValidationMode.CHECK_BINARIES: urls.append(s + lsplit[3] + "/binary-%s/Release" % buildtype) if not self.prj: return if self.prj.has("mirror/primary_proxy"): os.environ["no_proxy"] = "10.0.2.2,localhost,127.0.0.1" proxy = self.prj.text("mirror/primary_proxy").strip().replace( "LOCALMACHINE", "10.0.2.2") os.environ["http_proxy"] = proxy os.environ["https_proxy"] = proxy passman = urllib2.HTTPPasswordMgrWithDefaultRealm() authhandler = urllib2.HTTPBasicAuthHandler(passman) opener = urllib2.build_opener(authhandler) urllib2.install_opener(opener) for u in urls: if '@' in u: t = u.split('@') if '://' in t[0]: scheme, auth = t[0].split('://') scheme = scheme + '://' else: scheme = '' auth = t[0] u = scheme + t[1] usr, passwd = auth.split(':') passman.add_password(None, u, usr, passwd) try: fp = urllib2.urlopen(u, None, 10) fp.close() except urllib2.URLError: raise ValidationError( ["Repository %s can not be validated" % u])
def processEpisode(dir_to_process, org_NZB_name=None, status=None): # Default values host = "localhost" port = "8081" username = "" password = "" ssl = 0 web_root = "/" default_url = host + ":" + port + web_root if ssl: default_url = "https://" + default_url else: default_url = "http://" + default_url # Get values from config_file config = configparser.RawConfigParser() config_filename = os.path.join(os.path.dirname(sys.argv[0]), "autoProcessTV.cfg") if not os.path.isfile(config_filename): print ("ERROR: " + config_filename + " doesn\'t exist") print ("copy /rename " + config_filename + ".sample and edit\n") print ("Trying default url: " + default_url + "\n") else: try: print ("Loading config from " + config_filename + "\n") with io.open(config_filename, "r") as fp: config.readfp(fp) # Replace default values with config_file values host = config.get("sickrage", "host") port = config.get("sickrage", "port") username = config.get("sickrage", "username") password = config.get("sickrage", "password") try: ssl = int(config.get("sickrage", "ssl")) except (configparser.NoOptionError, ValueError): pass try: web_root = config.get("sickrage", "web_root") if not web_root.startswith("/"): web_root = "/" + web_root if not web_root.endswith("/"): web_root += "/" except configparser.NoOptionError: pass except EnvironmentError: e = sys.exc_info()[1] print ("Could not read configuration file: " + str(e)) # There was a config_file, don't use default values but exit sys.exit(1) params = {'quiet': 1, 'dir': dir_to_process} if org_NZB_name is not None: params['nzbName'] = org_NZB_name if status is not None: params['failed'] = status if ssl: protocol = "https://" else: protocol = "http://" url = protocol + host + ":" + port + web_root + "home/postprocess/processEpisode?" + urlencode(params) print ("Opening URL: " + url) try: password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, url, username, password) handler = HTTPBasicAuthHandler(password_mgr) opener = urllib2.build_opener(handler) urllib2.install_opener(opener) result = opener.open(url).readlines() for line in result: if line: print (line.strip()) except IOError: e = sys.exc_info()[1] print ("Unable to open URL: " + str(e)) sys.exit(1)
def bootOE(net, domain=None): """ Start the LINC optical emulator within a mininet instance This involves 1. converting the information stored in Linc* to configs for both LINC and the network config system, 2. starting Linc, 3. connecting cross-connects, and finally pushing the network configs to ONOS. Inevitably, there are times when we have OVS switches that should not be under the control of the controller in charge of the Linc switches. We hint at these by passing domain information. """ LINCSwitch.opticalJSON = {} linkConfig = [] devices = [] #setting up the controllers for LINCSwitch class LINCSwitch.controllers = net.controllers for switch in net.switches: if domain and switch not in domain: continue if isinstance(switch, OpticalSwitch): devices.append(switch.json()) elif isinstance(switch, OVSSwitch): devices.append(switchJSON(switch)) LINCSwitch.opticalJSON[ 'devices' ] = devices for link in net.links: if isinstance(link, LINCLink) : linkConfig.append(link.json()) LINCSwitch.opticalJSON[ 'links' ] = linkConfig info('*** Writing Topology.json file\n') topoJSON = LINCSwitch.makeTopoJSON() with open('Topology.json', 'w') as outfile: json.dump(topoJSON, outfile, indent=4, separators=(',', ': ')) info('*** Converting Topology.json to linc-oe format (TopoConfig.json) file (no oecfg) \n') topoConfigJson = {} topoConfigJson["switchConfig"] = getSwitchConfig(net.switches) topoConfigJson["linkConfig"] = getLinkConfig(net.links) #Writing to TopoConfig.json with open( 'TopoConfig.json', 'w' ) as outfile: json.dump( topoConfigJson, outfile, indent=4, separators=(',', ': ') ) info('*** Creating sys.config...\n') output = quietRun('%s/config_generator TopoConfig.json %s/sys.config.template %s %s' % (LINCSwitch.configGen, LINCSwitch.configGen, LINCSwitch.controllers[ 0 ].ip, LINCSwitch.controllers[ 0 ].port), shell=True) if output: error('***ERROR: Error creating sys.config file: %s\n' % output) return False info ('*** Setting multiple controllers in sys.config...\n') searchStr = '\[{"Switch.*$' ctrlStr = '' for index in range(len(LINCSwitch.controllers)): ctrlStr += '{"Switch%d-Controller","%s",%d,tcp},' % (index, net.controllers[index].ip, net.controllers[index].port) replaceStr = '[%s]},' % ctrlStr[:-1] # Cut off last comma sedCmd = 'sed -i \'s/%s/%s/\' sys.config' % (searchStr, replaceStr) output = quietRun(sedCmd, shell=True) info('*** Copying sys.config to linc-oe directory: ', output + '\n') output = quietRun('cp -v sys.config %s/rel/linc/releases/1.0/' % LINCSwitch.lincDir, shell=True).strip('\n') info(output + '\n') info('*** Adding taps and bringing them up...\n') setupInts(LINCSwitch.getTaps()) info('*** removing pipes if any \n') quietRun('rm /tmp/home/%s/linc-oe/rel/linc/*' % LINCSwitch.user, shell=True) info('*** Starting linc OE...\n') output = quietRun('%s/rel/linc/bin/linc start' % LINCSwitch.lincDir, shell=True) if output: error('***ERROR: LINC-OE: %s' % output + '\n') quietRun('%s/rel/linc/bin/linc stop' % LINCSwitch.lincDir, shell=True) return False info('*** Waiting for linc-oe to start...\n') waitStarted(net) info('*** Adding cross-connect (tap) interfaces to packet switches...\n') for link in net.links: if isinstance(link, LINCLink) and link.isCrossConnect(): for intf in [ link.intf1, link.intf2 ]: if not isinstance(intf, LINCIntf): intfList = [ intf.link.intf1, intf.link.intf2 ] intfList.remove(intf) intf2 = intfList[ 0 ] intf.node.attach(LINCSwitch.findTap(intf2.node, intf2.node.ports[ intf2 ])) info('*** Waiting for all devices to be available in ONOS...\n') url = 'http://%s:8181/onos/v1/devices' % LINCSwitch.controllers[0].ip time = 0 # Set up password authentication pw_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() pw_mgr.add_password(None, url, LINCSwitch.restUser, LINCSwitch.restPass) handler = urllib2.HTTPBasicAuthHandler(pw_mgr) opener = urllib2.build_opener(handler) opener.open(url) urllib2.install_opener(opener) # focus on just checking the state of devices we're interested in # expected devices availability map devMap = dict.fromkeys(map( lambda x: x['uri'], devices ), False) while True: response = json.load(urllib2.urlopen(url)) devs = response.get('devices') # update availability map for d in devs: if devMap.has_key(d['id']): devMap[d['id']] = d['available'] # Check if all devices we're interested became available if all(devMap.viewvalues()): break; if (time >= TIMEOUT): error('***ERROR: ONOS did not register devices within %s seconds\n' % TIMEOUT) break time += SLEEP_TIME sleep(SLEEP_TIME) info('*** Pushing Topology.json to ONOS\n') for index in range(len(LINCSwitch.controllers)): output = quietRun('%s/onos-netcfg %s Topology.json &'\ % (LINCSwitch.runPackDir, LINCSwitch.controllers[ index ].ip), shell=True) # successful output contains the two characters '{}' # if there is more output than this, there is an issue if output.strip('{}'): warn('***WARNING: Could not push topology file to ONOS: %s\n' % output)
def open_url(url, data=None, headers=None, method=None, use_proxy=True, force=False, last_mod_time=None, timeout=10, validate_certs=True, url_username=None, url_password=None, http_agent=None, force_basic_auth=False): ''' Fetches a file from an HTTP/FTP server using urllib2 ''' handlers = [] # FIXME: change the following to use the generic_urlparse function # to remove the indexed references for 'parsed' parsed = urlparse.urlparse(url) if parsed[0] == 'https' and validate_certs: if not HAS_SSL: raise NoSSLError( 'SSL validation is not available in your version of python. You can use validate_certs=False, however this is unsafe and not recommended' ) # do the cert validation netloc = parsed[1] if '@' in netloc: netloc = netloc.split('@', 1)[1] if ':' in netloc: hostname, port = netloc.split(':', 1) port = int(port) else: hostname = netloc port = 443 # create the SSL validation handler and # add it to the list of handlers ssl_handler = SSLValidationHandler(hostname, port) handlers.append(ssl_handler) if parsed[0] != 'ftp': username = url_username if username: password = url_password netloc = parsed[1] elif '@' in parsed[1]: credentials, netloc = parsed[1].split('@', 1) if ':' in credentials: username, password = credentials.split(':', 1) else: username = credentials password = '' parsed = list(parsed) parsed[1] = netloc # reconstruct url without credentials url = urlparse.urlunparse(parsed) if username and not force_basic_auth: passman = urllib2.HTTPPasswordMgrWithDefaultRealm() # this creates a password manager passman.add_password(None, netloc, username, password) # because we have put None at the start it will always # use this username/password combination for urls # for which `theurl` is a super-url authhandler = urllib2.HTTPBasicAuthHandler(passman) # create the AuthHandler handlers.append(authhandler) elif username and force_basic_auth: if headers is None: headers = {} headers["Authorization"] = "Basic %s" % base64.b64encode( "%s:%s" % (username, password)) if not use_proxy: proxyhandler = urllib2.ProxyHandler({}) handlers.append(proxyhandler) # pre-2.6 versions of python cannot use the custom https # handler, since the socket class is lacking this method if hasattr(socket, 'create_connection'): handlers.append(CustomHTTPSHandler) opener = urllib2.build_opener(*handlers) urllib2.install_opener(opener) if method: if method.upper() not in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'TRACE', 'CONNECT', 'PATCH'): raise ConnectionError('invalid HTTP request method; %s' % method.upper()) request = RequestWithMethod(url, method.upper(), data) else: request = urllib2.Request(url, data) # add the custom agent header, to help prevent issues # with sites that block the default urllib agent string request.add_header('User-agent', http_agent) # if we're ok with getting a 304, set the timestamp in the # header, otherwise make sure we don't get a cached copy if last_mod_time and not force: tstamp = last_mod_time.strftime('%a, %d %b %Y %H:%M:%S +0000') request.add_header('If-Modified-Since', tstamp) else: request.add_header('cache-control', 'no-cache') # user defined headers now, which may override things we've set above if headers: if not isinstance(headers, dict): raise ValueError("headers provided to fetch_url() must be a dict") for header in headers: request.add_header(header, headers[header]) urlopen_args = [request, None] if sys.version_info >= (2, 6, 0): # urlopen in python prior to 2.6.0 did not # have a timeout parameter urlopen_args.append(timeout) if HAS_SSLCONTEXT and not validate_certs: # In 2.7.9, the default context validates certificates context = SSLContext(ssl.PROTOCOL_SSLv23) context.options |= ssl.OP_NO_SSLv2 context.options |= ssl.OP_NO_SSLv3 context.verify_mode = ssl.CERT_NONE context.check_hostname = False urlopen_args += (None, None, None, context) r = urllib2.urlopen(*urlopen_args) return r
import urllib2 baseUrl = 'http://localhost:8080/api' authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm() authinfo.add_password(None, baseUrl, 'admin', 'doublecloud') handler = urllib2.HTTPBasicAuthHandler(authinfo) client = urllib2.build_opener(handler) # urllib2.install_opener(client) page = baseUrl + '/ServiceInstance' client.open( page, "{\"ip\": \"192.168.0.200\",\"username\": \"root\", \"password\":\"doublecloud\"}" ) listvms = baseUrl + '/VirtualMachine' f = client.open(listvms) vms = f.read() print vms
def getRssDataAsDict(self, url, username=None, password=None): result = {} translate = self.Base_translateString # no url, no feed to read if url in ( '', None, 'None', ): # no URL return {'status': -1} # use authentication or not? handlers = [] if username is not None and password is not None: passman = urllib2.HTTPPasswordMgrWithDefaultRealm() passman.add_password(None, url, username, password) auth_handler = urllib2.HTTPBasicAuthHandler(passman) handlers.append(auth_handler) # set shorter timeouts and revert default at enf of read default_timeout = socket.getdefaulttimeout() socket.setdefaulttimeout(60.0) try: d = feedparser.parse(url, handlers=handlers) finally: socket.setdefaulttimeout(default_timeout) if d.bozo and isinstance(d.bozo_exception, urllib2.URLError): # we have an URL error return {'status': -2} elif d.bozo: # some bozo exceptions can be ignored if not isinstance(d.bozo_exception, (feedparser.CharacterEncodingOverride, )): return {'status': -5} if d.status == 401: return {'status': -3} elif d.status == 404: return {'status': -4} result['items'] = [] # some feeds may not provide logo if d.feed.get('image', None) is not None: result['logo'] = d.feed.image['href'] result['title'] = d.feed.title result['link'] = d.feed.link for entry in d.entries: entry_dict = {} entry_dict['title'] = entry['title'] entry_dict['link'] = entry['link'] entry_dict['other_links'] = [x['href'] for x in entry['links']] entry_dict['md5'] = md5(entry['link']).hexdigest() entry_dict['content'] = entry.get('summary', '') entry_dict['date'] = entry.get('updated', None) entry_dict['img'] = [x['href'] for x in entry.get('enclosures', [])] entry_dict['updated_parsed'] = entry.get('updated_parsed', None) result['items'].append(entry_dict) # sort by date result['items'] = sorted(result['items'], key=lambda k: k['updated_parsed']) result['items'].reverse() result['status'] = 0 return result
def main(): try: import urllib2 except ImportError: module.fail_json(msg="urllib2 is required") try: import cgi except ImportError: module.fail_json(msg="cgi is required") module = AnsibleModule( argument_spec=dict( subscription=dict(required=True), token=dict(required=True), room=dict(required=True), msg=dict(required=True), notify=dict(required=False, choices=["56k", "bell", "bezos", "bueller", "clowntown", "cottoneyejoe", "crickets", "dadgummit", "dangerzone", "danielsan", "deeper", "drama", "greatjob", "greyjoy", "guarantee", "heygirl", "horn", "horror", "inconceivable", "live", "loggins", "makeitso", "noooo", "nyan", "ohmy", "ohyeah", "pushit", "rimshot", "rollout", "rumble", "sax", "secret", "sexyback", "story", "tada", "tmyk", "trololo", "trombone", "unix", "vuvuzela", "what", "whoomp", "yeah", "yodel"]), ), supports_check_mode=False ) subscription = module.params["subscription"] token = module.params["token"] room = module.params["room"] msg = module.params["msg"] notify = module.params["notify"] URI = "https://%s.campfirenow.com" % subscription NSTR = "<message><type>SoundMessage</type><body>%s</body></message>" MSTR = "<message><body>%s</body></message>" AGENT = "Ansible/1.2" try: # Setup basic auth using token as the username pm = urllib2.HTTPPasswordMgrWithDefaultRealm() pm.add_password(None, URI, token, 'X') # Setup Handler and define the opener for the request handler = urllib2.HTTPBasicAuthHandler(pm) opener = urllib2.build_opener(handler) target_url = '%s/room/%s/speak.xml' % (URI, room) # Send some audible notification if requested if notify: req = urllib2.Request(target_url, NSTR % cgi.escape(notify)) req.add_header('Content-Type', 'application/xml') req.add_header('User-agent', AGENT) response = opener.open(req) # Send the message req = urllib2.Request(target_url, MSTR % cgi.escape(msg)) req.add_header('Content-Type', 'application/xml') req.add_header('User-agent', AGENT) response = opener.open(req) except urllib2.HTTPError, e: if not (200 <= e.code < 300): module.fail_json(msg="unable to send msg: '%s', campfire api" " returned error code: '%s'" % (msg, e.code))
def cgiPost(host, port, username, password, uri, verbose, secure, args=None): """Post the request to the admin server. Admin server requires authentication, so we use the auth handler classes. NOTE: the url classes in python use the deprecated base64.encodestring() function, which truncates lines, causing Apache to give us a 400 Bad Request error for the Authentication string. So, we have to tell base64.encodestring() not to truncate.""" args = args or {} prefix = 'http' if secure: prefix = 'https' hostport = host + ":" + port # construct our url url = '%s://%s:%s%s' % (prefix, host, port, uri) # tell base64 not to truncate lines savedbinsize = base64.MAXBINSIZE base64.MAXBINSIZE = 256 # create the password manager - we don't care about the realm passman = urllib2.HTTPPasswordMgrWithDefaultRealm() # add our password passman.add_password(None, hostport, username, password) # create the auth handler authhandler = urllib2.HTTPBasicAuthHandler(passman) # create our url opener that handles basic auth opener = urllib2.build_opener(authhandler) # make admin server think we are the console opener.addheaders = [('User-Agent', 'Fedora-Console/1.0')] if verbose: print "requesting url", url sys.stdout.flush() exitCode = 1 try: req = opener.open(url, urllib.urlencode(args)) for line in req: if verbose: print line ary = line.split(":") if len(ary) > 1 and ary[0] == 'NMC_Status': exitCode = ary[1].strip() break req.close() # except IOError, e: # print e # print e.code # print e.headers # raise finally: # restore binsize base64.MAXBINSIZE = savedbinsize return exitCode
def __push(self, repository, task): """ Outgoing push @param repository: a sync_repository row @param task: a sync_task row """ manager = current.manager xml = manager.xml resource_name = task.resource_name prefix, name = resource_name.split("_", 1) _debug("S3Sync.__push(%s, %s)" % (repository.url, resource_name)) # Construct the URL config = self.__get_config() proxy = repository.proxy or config.proxy or None url = "%s/sync/sync.xml?resource=%s&repository=%s" % \ (repository.url, resource_name, config.uuid) username = repository.username password = repository.password strategy = task.strategy if strategy: url += "&strategy=%s" % ",".join(strategy) update_policy = task.update_policy if update_policy: url += "&update_policy=%s" % update_policy conflict_policy = task.conflict_policy if conflict_policy: url += "&conflict_policy=%s" % conflict_policy last_sync = task.last_sync if last_sync and update_policy not in ("THIS", "OTHER"): url += "&msince=%s" % xml.encode_iso_datetime(last_sync) else: last_sync = None _debug("...push to URL %s" % url) # Export the resource as S3XML prefix, name = task.resource_name.split("_", 1) resource = manager.define_resource(prefix, name, include_deleted=True) data = resource.export_xml(msince=last_sync) remote = False output = None if data: # Find the protocol url_split = url.split("://", 1) if len(url_split) == 2: protocol, path = url_split else: protocol, path = "http", None # Generate the request import urllib2 req = urllib2.Request(url=url, data=data) req.add_header('Content-Type', "text/xml") handlers = [] if proxy: # Proxy handling _debug("using proxy=%s" % proxy) proxy_handler = urllib2.ProxyHandler({protocol: proxy}) handlers.append(proxy_handler) if username and password: # Authentication handling: # send auth credentials unsolicitedly import base64 base64string = base64.encodestring('%s:%s' % (username, password))[:-1] req.add_header("Authorization", "Basic %s" % base64string) # Just in case the peer does not accept that # => add a 401 handler: passwd_manager = urllib2.HTTPPasswordMgrWithDefaultRealm() passwd_manager.add_password(realm=None, uri=url, user=username, passwd=password) auth_handler = urllib2.HTTPBasicAuthHandler(passwd_manager) handlers.append(auth_handler) # Install all handlers if handlers: opener = urllib2.build_opener(*handlers) urllib2.install_opener(opener) # Execute the request try: f = urllib2.urlopen(req) except urllib2.HTTPError, e: result = self.log.FATAL remote = True # Peer error code = e.code message = e.read() try: # Sahana-Eden would send a JSON message, # try to extract the actual error message: message_json = json.loads(message) message = message_json.get("message", message) except: pass output = xml.json_message(False, code, message) except:
def CreateServer(server=None, conf=None, debug_queue=None): # create Server from config registered_servers = get_registered_servers() if server.type not in registered_servers: print 'Server type not supported: %s' % server.type return # give argument servername so CentreonServer could use it for initializing MD5 cache nagiosserver = registered_servers[server.type](conf=conf, name=server.name) nagiosserver.type = server.type nagiosserver.nagios_url = server.nagios_url nagiosserver.nagios_cgi_url = server.nagios_cgi_url nagiosserver.username = server.username if server.save_password or not server.enabled: nagiosserver.password = server.password else: pwdialog = GUI.PasswordDialog("Password for " + server.username + " on " + server.nagios_url + ": ") if pwdialog.password == None: nagiosserver.password = "" else: nagiosserver.password = pwdialog.password nagiosserver.use_proxy = server.use_proxy nagiosserver.use_proxy_from_os = server.use_proxy_from_os nagiosserver.proxy_address = server.proxy_address nagiosserver.proxy_username = server.proxy_username nagiosserver.proxy_password = server.proxy_password # access to thread-safe debug queue nagiosserver.debug_queue = debug_queue # use server-owned attributes instead of redefining them with every request nagiosserver.passman = urllib2.HTTPPasswordMgrWithDefaultRealm() nagiosserver.passman.add_password(None, server.nagios_url, server.username, server.password) nagiosserver.passman.add_password(None, server.nagios_cgi_url, server.username, server.password) nagiosserver.basic_handler = urllib2.HTTPBasicAuthHandler( nagiosserver.passman) nagiosserver.digest_handler = urllib2.HTTPDigestAuthHandler( nagiosserver.passman) nagiosserver.proxy_auth_handler = urllib2.ProxyBasicAuthHandler( nagiosserver.passman) if str(nagiosserver.use_proxy) == "False": # use empty proxyhandler nagiosserver.proxy_handler = urllib2.ProxyHandler({}) elif str(server.use_proxy_from_os) == "False": # if proxy from OS is not used there is to add a authenticated proxy handler nagiosserver.passman.add_password(None, nagiosserver.proxy_address, nagiosserver.proxy_username, nagiosserver.proxy_password) nagiosserver.proxy_handler = urllib2.ProxyHandler({ "http": nagiosserver.proxy_address, "https": nagiosserver.proxy_address }) nagiosserver.proxy_auth_handler = urllib2.ProxyBasicAuthHandler( nagiosserver.passman) # create permanent urlopener for server to avoid memory leak with millions of openers nagiosserver.urlopener = BuildURLOpener(nagiosserver) # server's individual preparations for HTTP connections (for example cookie creation) if str(server.enabled) == "True": nagiosserver.init_HTTP() # debug if str(conf.debug_mode) == "True": nagiosserver.Debug(server=server.name, debug="Created server.") return nagiosserver
def request(method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None, timeout=60, allow_redirects=False, stream=False): """Initiate an HTTP(S) request. Returns :class:`Response` object. :param method: 'GET' or 'POST' :type method: unicode :param url: URL to open :type url: unicode :param params: mapping of URL parameters :type params: dict :param data: mapping of form data ``{'field_name': 'value'}`` or :class:`str` :type data: dict or str :param headers: HTTP headers :type headers: dict :param cookies: cookies to send to server :type cookies: dict :param files: files to upload (see below). :type files: dict :param auth: username, password :type auth: tuple :param timeout: connection timeout limit in seconds :type timeout: int :param allow_redirects: follow redirections :type allow_redirects: bool :param stream: Stream content instead of fetching it all at once. :type stream: bool :returns: Response object :rtype: :class:`Response` The ``files`` argument is a dictionary:: {'fieldname' : { 'filename': 'blah.txt', 'content': '<binary data>', 'mimetype': 'text/plain'} } * ``fieldname`` is the name of the field in the HTML form. * ``mimetype`` is optional. If not provided, :mod:`mimetypes` will be used to guess the mimetype, or ``application/octet-stream`` will be used. """ # TODO: cookies socket.setdefaulttimeout(timeout) # Default handlers openers = [] if not allow_redirects: openers.append(NoRedirectHandler()) if auth is not None: # Add authorisation handler username, password = auth password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm() password_manager.add_password(None, url, username, password) auth_manager = urllib2.HTTPBasicAuthHandler(password_manager) openers.append(auth_manager) # Install our custom chain of openers opener = urllib2.build_opener(*openers) urllib2.install_opener(opener) if not headers: headers = CaseInsensitiveDictionary() else: headers = CaseInsensitiveDictionary(headers) if 'user-agent' not in headers: headers['user-agent'] = USER_AGENT # Accept gzip-encoded content encodings = [ s.strip() for s in headers.get('accept-encoding', '').split(',') ] if 'gzip' not in encodings: encodings.append('gzip') headers['accept-encoding'] = ', '.join(encodings) # Force POST by providing an empty data string if method == 'POST' and not data: data = '' if files: if not data: data = {} new_headers, data = encode_multipart_formdata(data, files) headers.update(new_headers) elif data and isinstance(data, dict): data = urllib.urlencode(str_dict(data)) # Make sure everything is encoded text headers = str_dict(headers) if isinstance(url, unicode): url = url.encode('utf-8') if params: # GET args (POST args are handled in encode_multipart_formdata) scheme, netloc, path, query, fragment = urlparse.urlsplit(url) if query: # Combine query string and `params` url_params = urlparse.parse_qs(query) # `params` take precedence over URL query string url_params.update(params) params = url_params query = urllib.urlencode(str_dict(params), doseq=True) url = urlparse.urlunsplit((scheme, netloc, path, query, fragment)) req = urllib2.Request(url, data, headers) return Response(req, stream)
def __init__(self, user, password): passman = urllib2.HTTPPasswordMgrWithDefaultRealm() passman.add_password(None, self.get_url, user, password) passman.add_password(None, self.add_url, user, password) authhandler = urllib2.HTTPBasicAuthHandler(passman) self.url_opener = urllib2.build_opener(authhandler)
要创建一个opener,可以实例化一个openerdirector 然后调用.add_handler 4.2.2 同样,可以使用build_opener,这是一个更加方便的函数,用来创建opener对象,只需要一次函数调用。 install_opener用来创建(全局)默认opener。这个表示调用URLopen将使用你安装的opener Opener对象有一个open方法 该方法可以像URLopen函数那样直接用来获取urls:通常不必调用install_opener。 4.2.3 为了展示创建和安装一个Handler,我们将使用HTTPBasicAuthHandle(基本验证)。 ''' # -*-conding:utf-8 -*- import urllib2 #创建一个密码管理者 password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() #添加用户名和密码 top_level_url = "http://example.com/foo/" ##top_level_url实际上可以是完整的URL #如果知道realm,我们可以使用它代替“None” #password_mgr.add_password(None,top_level_url,username,password) password_mgr.add_password(None, top_level_url, 'weimin', 'lee123') #创建了一个新的handler handler = urllib2.HTTPBasicAuthHandler(password_mgr) """默认的opener有正常情况的handlers: ProxyHandler, UnknownHandler,
def SHOWS(url): if __settings__.getSetting('proxy_use') == 'true': proxy_server = None proxy_type_id = 0 proxy_port = 8080 proxy_user = None proxy_pass = None try: proxy_server = __settings__.getSetting('proxy_server') proxy_type_id = __settings__.getSetting('proxy_type') proxy_port = __settings__.getSetting('proxy_port') proxy_user = __settings__.getSetting('proxy_user') proxy_pass = __settings__.getSetting('proxy_pass') except: pass passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm() proxy_details = 'http://' + proxy_server + ':' + proxy_port passmgr.add_password(None, proxy_details, proxy_user, proxy_pass) authinfo = urllib2.ProxyBasicAuthHandler(passmgr) proxy_support = urllib2.ProxyHandler({"http": proxy_details}) opener = urllib2.build_opener(proxy_support, authinfo) urllib2.install_opener(opener) f = urllib2.urlopen(url) buf = f.read() buf = re.sub('&', '&', buf) buf = re.sub('·', '', buf) #print "BUF %s" % buf f.close() buf = buf.split( 'grid-list__item width--one-half width--custard--one-third') for p in buf: try: linkurl = re.compile('href="(.+?)"').findall(p)[0] #print linkurl image = re.compile('srcset="(.+?)"').findall(p)[0] if '?' in image: image = image.split('?')[0] + '?w=512&h=288' #print image name = re.compile( '"tout__title complex-link__target theme__target">(.+?)</h3', re.DOTALL).findall(p)[0].strip() #print name episodes = re.compile('"tout__meta theme__meta">(.+?)</p', re.DOTALL).findall(p)[0].strip() if 'mins' in episodes: episodes = re.compile('>(.+?)</', re.DOTALL).findall(episodes)[0].strip() #print episodes if 'day left' in episodes or 'days left' in episodes or episodes == '1 episode' or 'mins' in episodes: if not 'mins' in episodes: linkurl = linkurl + '##' addDir2(name + ' - [COLOR orange]%s[/COLOR]' % episodes, linkurl, 3, '', '', image, '', isFolder=False) else: if not 'no episodes' in episodes.lower(): addDir(name + ' - [COLOR orange]%s[/COLOR]' % episodes, linkurl, 2, image) except: pass setView('tvshows', 'show')
def call_api(path): url = 'http://{0}:{1}/api/{2}'.format(hostname, port, path) password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, url, username, password) handler = urllib2.HTTPBasicAuthHandler(password_mgr) return json.loads( urllib2.build_opener(handler).open(url).read() )
def EPS(name, url): if __settings__.getSetting('proxy_use') == 'true': proxy_server = None proxy_type_id = 0 proxy_port = 8080 proxy_user = None proxy_pass = None try: proxy_server = __settings__.getSetting('proxy_server') proxy_type_id = __settings__.getSetting('proxy_type') proxy_port = __settings__.getSetting('proxy_port') proxy_user = __settings__.getSetting('proxy_user') proxy_pass = __settings__.getSetting('proxy_pass') except: pass passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm() proxy_details = 'http://' + proxy_server + ':' + proxy_port passmgr.add_password(None, proxy_details, proxy_user, proxy_pass) authinfo = urllib2.ProxyBasicAuthHandler(passmgr) proxy_support = urllib2.ProxyHandler({"http": proxy_details}) opener = urllib2.build_opener(proxy_support, authinfo) urllib2.install_opener(opener) f = urllib2.urlopen(url) buf = f.read() buf = re.sub('&', '&', buf) buf = re.sub('·', '', buf) buf = re.sub(''', '\'', buf) f.close() buf = buf.split( 'grid-list__item width--one-half width--custard--one-third') NAME = name.split('-')[0] uniques = [] for p in buf: #print p try: linkurl = re.compile('href="(.+?)"').findall(p)[0] #print linkurl image = re.compile('srcset="(.+?)"').findall(p)[0] if '?' in image: image = image.split('?')[0] + '?w=512&h=288' name = re.compile( '"tout__title complex-link__target theme__target.+?>(.+?)</h', re.DOTALL).findall(p)[0].strip() if 'datetime' in name: name = NAME #episodes = re.compile('"tout__meta theme__meta">(.+?)</p',re.DOTALL).findall (p)[0].strip() description = re.compile('tout__summary theme__subtle">(.+?)</p', re.DOTALL).findall(p)[0].strip() #print description date = re.compile('datetime="(.+?)">', re.DOTALL).findall(p)[0] try: TIME = parse_Date(str(date), '%Y-%m-%dT%H:%MZ', '%H:%M%p') DATE = parse_Date(str(date), '%Y-%m-%dT%H:%MZ', '%d/%m/%Y') ADDDATE = '%s %s' % (DATE, TIME) except Exception as e: ADDDATE = '' #print date NAME = name # + ' - ' + ADDDATE if ADDDATE not in uniques: uniques.append(ADDDATE) #xbmc.log(str(linkurl)) addDir2(NAME + ' - ' + ADDDATE, linkurl, 3, date, name, image, description, isFolder=False) except: pass setView('tvshows', 'episode')
def main(year): # The user credentials that will be used to authenticate access to the data username = "******" password = "" # The FULL url of the directory which contains the files you would like to bulk download url = "https://daacdata.apps.nsidc.org/pub/DATASETS/nsidc0116_icemotion_vectors_v3/data/north/grid/"+str(year)+"/" # Example URL # Create a password manager to deal with the 401 reponse that is returned from # Earthdata Login password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm() password_manager.add_password(None, "https://urs.earthdata.nasa.gov", username, password) # Create a cookie jar for storing cookies. This is used to store and return # the session cookie given to use by the data server (otherwise it will just # keep sending us back to Earthdata Login to authenticate). Ideally, we # should use a file based cookie jar to preserve cookies between runs. This # will make it much more efficient. cookie_jar = CookieJar() # Install all the handlers. opener = urllib2.build_opener( urllib2.HTTPBasicAuthHandler(password_manager), urllib2.HTTPHandler(debuglevel=1), # Uncomment these two lines to see #urllib2.HTTPSHandler(debuglevel=1), # details of the requests/responses urllib2.HTTPCookieProcessor(cookie_jar)) urllib2.install_opener(opener) # Create and submit the requests. There are a wide range of exceptions that # can be thrown here, including HTTPError and URLError. These should be # caught and handled. #=============================================================================== # Open a requeset to grab filenames within a directory. Print optional #=============================================================================== DirRequest = urllib2.Request(url) DirResponse = urllib2.urlopen(DirRequest) # Get the redirect url and append 'app_type=401' # to do basic http auth DirRedirect_url = DirResponse.geturl() DirRedirect_url += '&app_type=401' # Request the resource at the modified redirect url DirRequest = urllib2.Request(DirRedirect_url) DirResponse = urllib2.urlopen(DirRequest) DirBody = DirResponse.read(DirResponse) # Uses the HTML parser defined above to pring the content of the directory containing data parser = MyHTMLParser() parser.feed(DirBody) Files = parser.dataList # Display the contents of the python list declared in the HTMLParser class # print Files #Uncomment to print a list of the files #=============================================================================== # Call the function to download all files in url #=============================================================================== BatchJob(Files, cookie_jar, year, url) # Comment out to prevent downloading to your working directory
def __init__(self, password_mgr=None): if password_mgr is None: password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() self.passwd = password_mgr self.add_password = self.passwd.add_password