def __init__(self): self.backend_server = ServerProxy(BACKEND_SUPERVISOR_URI) if MON_SUPERVISOR_URI: self.mon_server = ServerProxy(MON_SUPERVISOR_URI) else: self.mon_server = None
# coding=utf-8 import psutil from base_learning import SSh, getCmd from xmlrpc.client import ServerProxy if __name__ == '__main__': s = ServerProxy("http://172.23.122.11:8080") res = s.add(3, 4) print(res) hostname = '172.23.122.11' port = 22 username = '******' password = '******' ssh = SSh(hostname, port, username, password) cmd = 'psutil.virtual_memory().percent' res = ssh.ssh_con(cmd) print('res:', res) # def getCpuRatio():
#plot (x=date) totaltickets(bar), closedtickets (line #cumulative flow?) #totals #per above from xmlrpc.client import ServerProxy from datetime import datetime, timedelta import getpass uname = raw_input("Trac username:"******"Trac (windows) password:"******"%Y-%m-%d") print(date) p = ServerProxy("https://" + uname + ":" + pwd + "@trac.psenterprise.com/login/rpc") print("Milestone 2021.1.0...") #milestone 7.1.0 new = str(len(p.ticket.query("milestone=2021.1.0&status=new&max=0"))) assigned = str(len(p.ticket.query("milestone=2021.1.0&status=assigned&max=0"))) reopened = str(len(p.ticket.query("milestone=2021.1.0&status=reopened&max=0"))) closed = str(len(p.ticket.query("milestone=2021.1.0&status=closed&max=0"))) total = str(len(p.ticket.query("milestone=2021.1.0&max=0"))) f = open('710.csv', 'a') f.write(date + "," + new + "," + assigned + "," + reopened + "," + closed + "," + total + "\n") f.close()
def _create_anonymous_server_proxy(self, url_server, url_location): import urllib.parse url = urllib.parse.urljoin(url_server, url_location) transport = DigestTransport() from xmlrpc.client import ServerProxy return ServerProxy(url, transport=transport)
def sendNZB(nzb, proper=False): """ Sends NZB to NZBGet client :param nzb: nzb object :param proper: True if this is a Proper download, False if not. Defaults to False """ if sickrage.app.config.nzbget.host is None: sickrage.app.log.warning( "No NZBGet host found in configuration. Please configure it.") return False dupe_key = "" dupe_score = 0 addToTop = False nzbgetprio = 0 category = sickrage.app.config.nzbget.category show_object = find_show(nzb.series_id, nzb.series_provider_id) if not show_object: return False if show_object.is_anime: category = sickrage.app.config.nzbget.category_anime url = "%(protocol)s://%(username)s:%(password)s@%(host)s/xmlrpc" % { "protocol": 'https' if sickrage.app.config.nzbget.use_https else 'http', "host": sickrage.app.config.nzbget.host, "username": sickrage.app.config.nzbget.username, "password": sickrage.app.config.nzbget.password } nzbget_rpc_client = ServerProxy(url) try: if nzbget_rpc_client.writelog( "INFO", "SiCKRAGE connected to drop of %s any moment now." % (nzb.name + ".nzb")): sickrage.app.log.debug("Successful connected to NZBGet") else: sickrage.app.log.warning( "Successful connected to NZBGet, but unable to send a message" ) except client.socket.error: sickrage.app.log.warning( "Please check your NZBGet host and port (if it is running). NZBGet is not responding to this combination" ) return False except ProtocolError as e: if e.errmsg == "Unauthorized": sickrage.app.log.warning( "NZBGet username or password is incorrect.") else: sickrage.app.log.warning("NZBGet Protocol Error: " + e.errmsg) return False show_object = find_show(nzb.series_id, nzb.series_provider_id) if not show_object: return False # if it aired recently make it high priority and generate DupeKey/Score for episode_number in nzb.episodes: episode_object = show_object.get_episode(nzb.season, episode_number) if dupe_key == "": dupe_key = f"SiCKRAGE-{episode_object.show.series_provider_id.name}-{episode_object.show.series_id}" dupe_key += "-" + str(episode_object.season) + "." + str( episode_object.episode) if date.today() - episode_object.airdate <= timedelta(days=7): addToTop = True nzbgetprio = sickrage.app.config.nzbget.priority else: category = sickrage.app.config.nzbget.category_backlog if show_object.is_anime: category = sickrage.app.config.nzbget.category_anime_backlog if nzb.quality != Qualities.UNKNOWN: dupe_score = nzb.quality * 100 if proper: dupe_score += 10 nzbcontent64 = None if nzb.provider_type == SearchProviderType.NZBDATA: data = nzb.extraInfo[0] nzbcontent64 = standard_b64encode(data) sickrage.app.log.info("Sending NZB to NZBGet") sickrage.app.log.debug("URL: " + url) try: # Find out if nzbget supports priority (Version 9.0+), old versions beginning with a 0.x will use the old # command nzbget_version_str = nzbget_rpc_client.version() nzbget_version = try_int( nzbget_version_str[:nzbget_version_str.find(".")]) if nzbget_version == 0: if nzbcontent64 is not None: nzbget_result = nzbget_rpc_client.append( nzb.name + ".nzb", category, addToTop, nzbcontent64) else: if nzb.provider_type == SearchProviderType.NZB: try: nzbcontent64 = standard_b64encode(WebSession().get( nzb.url).text) except Exception: return False nzbget_result = nzbget_rpc_client.append( nzb.name + ".nzb", category, addToTop, nzbcontent64) elif nzbget_version == 12: if nzbcontent64 is not None: nzbget_result = nzbget_rpc_client.append( nzb.name + ".nzb", category, nzbgetprio, False, nzbcontent64, False, dupe_key, dupe_score, "score") else: nzbget_result = nzbget_rpc_client.appendurl( nzb.name + ".nzb", category, nzbgetprio, False, nzb.url, False, dupe_key, dupe_score, "score") # v13+ has a new combined append method that accepts both (url and content) # also the return value has changed from boolean to integer # (Positive number representing NZBID of the queue item. 0 and negative numbers represent error codes.) elif nzbget_version >= 13: nzbget_result = True if nzbget_rpc_client.append( nzb.name + ".nzb", nzbcontent64 if nzbcontent64 is not None else nzb.url, category, nzbgetprio, False, False, dupe_key, dupe_score, "score") > 0 else False else: if nzbcontent64 is not None: nzbget_result = nzbget_rpc_client.append( nzb.name + ".nzb", category, nzbgetprio, False, nzbcontent64) else: nzbget_result = nzbget_rpc_client.appendurl( nzb.name + ".nzb", category, nzbgetprio, False, nzb.url) if nzbget_result: sickrage.app.log.debug("NZB sent to NZBGet successfully") return True else: sickrage.app.log.warning( "NZBGet could not add %s to the queue" % (nzb.name + ".nzb")) return False except Exception: sickrage.app.log.warning( "Connect Error to NZBGet: could not add %s to the queue" % (nzb.name + ".nzb")) return False
def update_server(self): """ Updates self.server. """ self.server = ServerProxy('http://' + self.get_data()['server'])
from electrum.i18n import _ from electrum.wallet import Multisig_Wallet, Abstract_Wallet from electrum.util import bh2u, bfh from electrum.gui.qt.transaction_dialog import show_transaction, TxDialog from electrum.gui.qt.util import WaitingDialog if TYPE_CHECKING: from electrum.gui.qt import ElectrumGui from electrum.gui.qt.main_window import ElectrumWindow ca_path = certifi.where() ssl_context = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=ca_path) server = ServerProxy('https://cosigner.electrum.org/', allow_none=True, context=ssl_context) class Listener(util.DaemonThread): def __init__(self, parent): util.DaemonThread.__init__(self) self.daemon = True self.parent = parent self.received = set() self.keyhashes = [] def set_keyhashes(self, keyhashes): self.keyhashes = keyhashes def clear(self, keyhash):
def __init__(self, email, apikey="", password=""): self.apikey = apikey self.password = password self.email = sanitize_email(email) self.email_hash = md5_hash(self.email) self._server = ServerProxy(self.API_URI.format(self.email_hash))
from PyQt5.QtWidgets import QPushButton from electrum_pac import bitcoin, util, keystore, ecc from electrum_pac import transaction from electrum_pac.plugin import BasePlugin, hook from electrum_pac.i18n import _ from electrum_pac.wallet import Multisig_Wallet from electrum_pac.util import bh2u, bfh from electrum_pac.gui.qt.transaction_dialog import show_transaction from electrum_pac.gui.qt.util import WaitingDialog import sys import traceback server = ServerProxy('https://cosigner.electrum.org/', allow_none=True) class Listener(util.DaemonThread): def __init__(self, parent): util.DaemonThread.__init__(self) self.daemon = True self.parent = parent self.received = set() self.keyhashes = [] def set_keyhashes(self, keyhashes): self.keyhashes = keyhashes def clear(self, keyhash): server.delete(keyhash)
def main(): parser = new_parser() args = parser.parse_args() s = ServerProxy("http://localhost:9999") if hasattr(args, 'height'): block_data = s.print_chain(args.height) print(block_data) if hasattr(args, 'address'): balance = s.get_balance(args.address) print("%s balance is %d" % (args.address, balance)) if hasattr(args, 'createwallet'): address = s.create_wallet() print('Wallet address is %s' % address) if hasattr(args, 'start'): start() if hasattr(args, 'printwallet'): wallets = s.print_all_wallet() print('Wallet are:') for wallet in wallets: print("\t%s" % wallet) if hasattr(args, 'genesis_block'): address = s.create_genesis_block() print('Genesis Wallet is: %s' % address) if hasattr(args, 'send_from') \ and hasattr(args, 'send_to') \ and hasattr(args, 'send_amount'): s.send(args.send_from, args.send_to, args.send_amount) # change if hasattr(args, 'start_mining'): # change print("start mining...") # change s.start_find() # change print("after start_find") # 7.10 if hasattr(args, 'print_txpool'): # change txs = s.print_txpool() # change print(type(txs[0])) # dict i = 0 for tx in txs: # change i += 1 print("transaction: ", tx) # change print("") print(i) if hasattr(args, 'sort_txpool'): txs6, no = s.sort_txpool() # for i in range(len(txs6)): # txs6[i] = Transaction.deserialize(txs6[i]) # print("transaction: ", type(txs6[i])) utxo_set = UTXOSet() txs6 = utxo_set.clear_transactions(txs6) print(txs6) if hasattr(args, 'alive'): chain_doc = [] bc1 = BlockChain() last_blo = bc1.get_last_block() last_height = last_blo.block_header.height j = 0 m_total_payoff = -11 u_total_payoff = 11.33 users = {} for i in range(0, last_height+1): j += 1 blo = bc1.get_block_by_height(i) if blo: txs = blo._transactions for tx in txs: if tx.ip: u_total_payoff -= tx.amount m_total_payoff += tx.amount if tx.ip in users.keys(): users[tx.ip] += (1.33 - tx.amount - 0.05 * j) else: users[tx.ip] = (1.33 - tx.amount - 0.05 * j) print(blo.serialize()) print("") else: print("problems in the docs") break print(j) print("m_total_payoff ", m_total_payoff) print("u_total_payoff ", u_total_payoff) for key in users: print("the user ", key, "'s pay off is ", users[key])
def setUp(self): """ Bring up the test environment """ # create our test user self.test_user1 = User.objects.create(username="******", first_name="Test", last_name="User1", email="*****@*****.**", password="******", is_staff=False, is_superuser=False) # self.test_user2 = User.objects.create(username="******", first_name="Test", last_name="User2", email="*****@*****.**", password="******", is_staff=False, is_superuser=False) self.rogue_user = User.objects.create(username="******", first_name="Rogue", last_name="User", email="*****@*****.**", password="******", is_staff=False, is_superuser=False) self.test_admin = User.objects.create(username="******", first_name="Admin", last_name="User", email="*****@*****.**", password="******", is_staff=True, is_superuser=True) self.test_blog = Blog.objects.create( title="Test User 1's Space", description="A blog for Test User 1. Slippery when wet!", owner=User.objects.get(username="******"), site=Site.objects.get_current()) self.test_category1 = Category.objects.create( title="Test Category 1", description="Category mean namely for testing", blog=self.test_blog) self.test_category2 = Category.objects.create(title="Test Category 2", description="", blog=self.test_blog) self.post = Post.objects.create( title="Test User 1 Post", body="This is some stuff.\n\nSome stuff, you know.", blog=self.test_blog, author=self.test_user1.author) self.post.save() # enable remote access for test_user1 self.test_user1.author.remote_access_enabled = True self.test_user1.author.save() # disable remote access for test_user2 self.test_user2.author.remote_access_enabled = False self.test_user2.author.save() self.rogue_user.author.remote_access_enabled = True self.rogue_user.author.save() self.test_admin.author.remote_access_enabled = True self.test_admin.author.save() self.s = ServerProxy('http://localhost:8000/xmlrpc/', transport=TestTransport(), verbose=0)
def __init__(self, ecosystem): """Initialize instance.""" super(PypiReleasesFetcher, self).__init__(ecosystem) self._rpc = ServerProxy(self.ecosystem.fetch_url)
def sendNZB(nzb, proper=False): """ Sends NZB to NZBGet client :param nzb: nzb object :param proper: True if a Proper download, False if not. """ if app.NZBGET_HOST is None: log.warning('No NZBget host found in configuration.' ' Please configure it.') return False addToTop = False nzbgetprio = 0 category = app.NZBGET_CATEGORY if nzb.series.is_anime: category = app.NZBGET_CATEGORY_ANIME url = 'http{}://{}:{}@{}/xmlrpc'.format( 's' if app.NZBGET_USE_HTTPS else '', app.NZBGET_USERNAME, app.NZBGET_PASSWORD, app.NZBGET_HOST) if not NZBConnection(url): return False nzbGetRPC = ServerProxy(url) dupekey = '' dupescore = 0 # if it aired recently make it high priority and generate DupeKey/Score for cur_ep in nzb.episodes: if dupekey == '': if cur_ep.series.indexer == 1: dupekey = 'Medusa-' + text_type(cur_ep.series.indexerid) elif cur_ep.series.indexer == 2: dupekey = 'Medusa-tvr' + text_type(cur_ep.series.indexerid) dupekey += '-' + text_type(cur_ep.season) + '.' + text_type( cur_ep.episode) if datetime.date.today() - cur_ep.airdate <= datetime.timedelta( days=7): addToTop = True nzbgetprio = app.NZBGET_PRIORITY else: category = app.NZBGET_CATEGORY_BACKLOG if nzb.series.is_anime: category = app.NZBGET_CATEGORY_ANIME_BACKLOG if nzb.quality != Quality.UNKNOWN: dupescore = nzb.quality * 100 if proper: dupescore += 10 nzbcontent64 = None if nzb.result_type == 'nzbdata': data = nzb.extra_info[0] nzbcontent64 = standard_b64encode(data).decode() log.info('Sending NZB to NZBget') log.debug('URL: {}', url) try: # Find out if nzbget supports priority (Version 9.0+), # old versions beginning with a 0.x will use the old command nzbget_version_str = nzbGetRPC.version() nzbget_version = try_int( nzbget_version_str[:nzbget_version_str.find('.')]) if nzbget_version == 0: if nzbcontent64: nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category, addToTop, nzbcontent64) else: if nzb.result_type == 'nzb': if not nzb.provider.login(): return False # TODO: Check if this needs exception handling data = nzb.provider.session(nzb.url).content if data is None: return False nzbcontent64 = standard_b64encode(data).decode() nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category, addToTop, nzbcontent64) elif nzbget_version == 12: if nzbcontent64 is not None: nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category, nzbgetprio, False, nzbcontent64, False, dupekey, dupescore, 'score') else: nzbget_result = nzbGetRPC.appendurl(nzb.name + '.nzb', category, nzbgetprio, False, nzb.url, False, dupekey, dupescore, 'score') # v13+ has a new combined append method that accepts both (url and # content) also the return value has changed from boolean to integer # (Positive number representing NZBID of the queue item. 0 and negative # numbers represent error codes.) elif nzbget_version >= 13: nzbget_result = nzbGetRPC.append( nzb.name + '.nzb', nzbcontent64 if nzbcontent64 is not None else nzb.url, category, nzbgetprio, False, False, dupekey, dupescore, 'score') > 0 else: if nzbcontent64 is not None: nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category, nzbgetprio, False, nzbcontent64) else: nzbget_result = nzbGetRPC.appendurl(nzb.name + '.nzb', category, nzbgetprio, False, nzb.url) if nzbget_result: log.debug('NZB sent to NZBget successfully') return True else: log.warning('NZBget could not add {name}.nzb to the queue', {'name': nzb.name}) return False except Exception: log.warning( 'Connect Error to NZBget: could not add {name}.nzb to the' ' queue', {'name': nzb.name}) return False
import sys from util import estimate_pi from statistics import mean from math import pi from time import time # Import the synchronous version of ServerProxy from xmlrpc.client import ServerProxy # Create the proxy in a nice way so it gets closed when we are done. with ServerProxy('http://localhost:9000') as proxy: # Ensure we got enough arguments coming in assert len(sys.argv) >= 2, "Must supply at least 1 argument.\n" + \ "Usage: rpc_sync_pi_master.py N [argument2 ...]" # Split incoming arguments into the number of throws to use. # Note that sys.argv[0] is the name of the script itself. scriptname, N, *arguments = sys.argv # split the workload between ourselves and the remote # note: // is integer division N = int(N) N_remote = N // 2 N_local = N - N_remote start_time = time() print(f"Requesting that slave estimate pi with {N_remote} throws.") pi_remote = proxy.estimate_pi(N_remote) print(f"Result of remote estimation: pi={pi_remote:.010f}") print(f"Master begins estimating pi with {N_local} throws.") pi_local = estimate_pi(N_local) print(f"Result of local estimation: pi={pi_local:.010f}")
from xmlrpc.client import ServerProxy import sys def help(): print("Usage : remote_finger [-lmsp] user..") if __name__ == '__main__': sys.argv = sys.argv[1:] if len(sys.argv) == 0: help() sys.exit(1) client = ServerProxy('http://localhost:8000') print(client.finger(sys.argv)) sys.exit(0)
rospy.logerr("[cpu monitor] failed to get api of node %s (%s)" % (node, node_api)) continue ros_ip = node_api[7:] # strip http:// ros_ip = ros_ip.split(':')[0] # strip :<port>/ local_node = "localhost" in node_api or \ "127.0.0.1" in node_api or \ (this_ip is not None and this_ip == ros_ip) or \ subprocess.check_output("hostname").decode('utf-8').strip() in node_api if not local_node: ignored_nodes.add(node) rospy.loginfo("[cpu monitor] ignoring node %s with URI %s" % (node, node_api)) continue try: resp = ServerProxy(node_api).getPid('/NODEINFO') except: rospy.logerr("[cpu monitor] failed to get pid of node %s (api is %s)" % (node, node_api)) else: try: pid = resp[2] except: rospy.logerr("[cpu monitor] failed to get pid for node %s from NODEINFO response: %s" % (node, resp)) else: node_map[node] = Node(name=node, pid=pid) rospy.loginfo("[cpu monitor] adding new node %s" % node) for node_name, node in list(node_map.items()): if node.alive(): node.publish() else:
def getProxy(self, server_url): return ServerProxy(server_url)
continue m = re.search(s, r.text) if m is not None: n = m.group(1) else: n = None print(result) first = 12345 r = "BZh91AY%26SY%94%3A%E2I%00%00%21%19%80P%81%11%00%AFg%9E%A0+%00hE%3DM%B5%23%D0%D4%D1%E2%8D%06%A9%FA%26S%D4%D3%21%A1%EAi7h%9B%9A%2B%BF%60%22%C5WX%E1%ADL%80%E8V%3C%C6%A8%DBH%2632%18%A8x%01%08%21%8DS%0B%C8%AF%96KO%CA2%B0%F1%BD%1Du%A0%86%05%92s%B0%92%C4Bc%F1w%24S%85%09%09C%AE%24%90" print(bz2.decompress(unquote_to_bytes(r.replace("+", " ")))) conn = ServerProxy("http://www.pythonchallenge.com/pc/phonebook.php") print(conn.phone("Leopold")) url = "http://www.pythonchallenge.com/pc/stuff/violin.php" msg = "the flowers are on their way" req = requests.get(url, headers = { "Cookie": "info=" + quote_plus(msg)}) print(req.text) # solve(first)
def get_node_by_addr(addr): return ServerProxy('http://' + addr).Api
def connServerProxy(url, ProxyServer): transport = ProxiedTransport(ProxyServer) transport.set_proxy(url) print("走代理") return ServerProxy(url, transport=transport)
# DESC : # AUTHOR : Alex Stocks # VERSION : 1.0 # LICENCE : Apache License 2.0 # EMAIL : [email protected] # MOD : 2019-02-01 14:27 # FILE : multicall.py # ****************************************************** # import xmlrpc.client from xmlrpc.client import ServerProxy def testMulticall(proxy): marshalled_list = [ {'methodName': 'supervisor.getSupervisorVersion', 'params': []}, {'methodName': 'supervisor.getAPIVersion', 'params': []}, {'methodName': 'supervisor.getState', 'params': []}, {'methodName': 'supervisor.getPID', 'params': []}, ] supervisor_version, api_version, state, pid = proxy.system.multicall(marshalled_list) def testMethods(proxy): # methods = proxy.supervisor.listMethods() methods = proxy.system.listMethods() print("supervisord list methods:%s " % (methods)) if __name__ == '__main__': proxy = ServerProxy('http://192.168.11.110:19001') testMethods(proxy)
bootstrap_servers=['localhost:9092'], auto_offset_reset='earliest', enable_auto_commit=True, group_id='pollsystemTest5-group', value_deserializer=lambda x: loads(x.decode('utf-8'))) consumer.subscribe( ['pollelonresponsetopic']) consumer1.subscribe( ['polltoryresponsetopic']) consumer2.subscribe( ['launcherTopic']) consumer3.subscribe( ['anomalyTopic']) s = ServerProxy('http://*****:*****@cluster0.ibhol.mongodb.net/blueOrigin?retryWrites=true&w=majority") db = client.get_database('blueOrigin') db.payloads.delete_one({"satellite": "CATACOMBE"}) @given('Marseille un site où la pression du vent est actuellement normale') def step_impl(context): pass
fp.close() with bz2.open("my.bz2", "rb") as f: hint = f.read() print(hint) # This gave: # b'is it the 26th already? call his father and inform him that "the flowers are on their way". he\'ll understand.' # 26th? Mozart's father? Call him? Wat? # Level 13 had code for calling using the RPC method. Let's copy the code and call Mozart. from xmlrpc.client import ServerProxy, Error with ServerProxy("http://www.pythonchallenge.com/pc/phonebook.php") as proxy: try: r = proxy.phone("Mozart") except Error as ev: print(f"Error happened: {ev}") print(f"Answer: {r}") # Answer: He is not the evil # We googled Mozart's father. His name was Leopold. try: r = proxy.phone("Leopold") except Error as ev: print(f"Error happened: {ev}") print(f"Answer: {r}") # Answer: 555-VIOLIN
# epoch to and from readable timestamp timeformat = "%Y-%m-%dT%H:%M:%S" def readable_to_epoch(string): return int(datetime.strptime(string, timeformat).timestamp()) def epoch_to_readable(epoch): return f"{time.strftime(timeformat, time.localtime(epoch))}" ### r2lab_url = "https://r2labapi.inria.fr/PLCAPI/" # where to send API calls proxy = ServerProxy(r2lab_url, allow_none=True) # expected by all API calls def build_api_auth(account, password): return { 'Username' : account, 'AuthString' : password, 'AuthMethod' : 'password', } def print_future_leases(account, password): auth = build_api_auth(account, password) leases = proxy.GetLeases(auth, {'day': 0})
def add_new_user(self, username, email, image_filename, password): url = f"http://{self.honeypot_ip}:50000" with ServerProxy(url, allow_none=True) as s: s.add_new_user(username, email, image_filename, password)
from electrum_acm import bitcoin, util from electrum_acm import transaction from electrum_acm.plugins import BasePlugin, hook from electrum_acm.i18n import _ from electrum_acm.wallet import Multisig_Wallet from electrum_acm.util import bh2u, bfh from electrum_acm_gui.qt.transaction_dialog import show_transaction import sys import traceback PORT = 12344 HOST = 'cosigner.electrum.org' server = ServerProxy('http://%s:%d' % (HOST, PORT), allow_none=True) class Listener(util.DaemonThread): def __init__(self, parent): util.DaemonThread.__init__(self) self.daemon = True self.parent = parent self.received = set() self.keyhashes = [] def set_keyhashes(self, keyhashes): self.keyhashes = keyhashes def clear(self, keyhash): server.delete(keyhash)
def __init__(self, address): address = "http://%s" % address self.server = ServerProxy(address, allow_none=True)
def getProxy(self, url): return ServerProxy(url, use_builtin_types=True)
def larch_server_cli(): """command-line program to control larch XMLRPC server""" command_desc = """ command must be one of the following: start start server on specified port stop stop server on specified port restart restart server on specified port next start server on next avaialable port (see also '-n' option) status print a short status message: whether server< is running on port report print a multi-line status report """ parser = ArgumentParser(description='run larch XML-RPC server', formatter_class=RawDescriptionHelpFormatter, epilog=command_desc) parser.add_argument("-p", "--port", dest="port", default='4966', help="port number for remote server [4966]") parser.add_argument( "-n", "--next", dest="next", action="store_true", default=False, help="show next available port, but do not start [False]") parser.add_argument("-q", "--quiet", dest="quiet", action="store_true", default=False, help="suppress messaages [False]") parser.add_argument("command", nargs='?', help="server command ['status']") args = parser.parse_args() port = int(args.port) command = args.command or 'status' command = command.lower() def smsg(port, txt): if not args.quiet: print('larch_server port=%i: %s' % (port, txt)) if args.next: port = get_next_port(port=port) print(port) sys.exit(0) server_state = test_server(port=port) if command == 'start': if server_state == CONNECTED: smsg(port, 'already running') elif server_state == NOT_IN_USE: spawn_server(port=port) smsg(port, 'started') else: smsg(port, 'port is in use, cannot start') elif command == 'stop': if server_state == CONNECTED: ServerProxy('http://localhost:%d' % (port)).shutdown() smsg(port, 'stopped') elif command == 'next': port = get_next_port(port=port) spawn_server(port=port) smsg(port, 'started') elif command == 'restart': if server_state == CONNECTED: ServerProxy('http://localhost:%d' % (port)).shutdown() sleep(POLL_TIME) spawn_server(port=port) elif command == 'status': if server_state == CONNECTED: smsg(port, 'running') sys.exit(0) elif server_state == NOT_IN_USE: smsg(port, 'not running') sys.exit(1) else: smsg(port, 'port is in use by non-larch server') elif command == 'report': if server_state == CONNECTED: s = ServerProxy('http://localhost:%d' % (port)) info = s.get_client_info() last_event = info.get('last_event', 0) last_used = ctime(last_event) serverid = int(info.get('pid_server', 0)) serverport = int(info.get('port', 0)) procid = int(info.get('pid', 0)) appname = info.get('app', 'unknown') machname = info.get('machine', 'unknown') username = info.get('user', 'unknown') keepalive_time = info.get('keepalive_time', -1) keepalive_time += (last_event - time()) keepalive_units = 'seconds' if keepalive_time > 150: keepalive_time = round(keepalive_time / 60.0) keepalive_units = 'minutes' if keepalive_time > 150: keepalive_time = round(keepalive_time / 60.0) keepalive_units = 'hours' print('larch_server report:') print(' Server Port Number = %s' % serverport) print(' Server Process ID = %s' % serverid) print(' Server Last Used = %s' % last_used) print(' Server will expire in %d %s if not used.' % (keepalive_time, keepalive_units)) print(' Client Machine Name = %s' % machname) print(' Client Process ID = %s' % str(procid)) print(' Client Application = %s' % appname) print(' Client User Name = %s' % username) elif server_state == NOT_IN_USE: smsg(port, 'not running') sys.exit(1) else: smsg(port, 'port is in use by non-larch server') else: print("larch_server: unknown command '%s'. Try -h" % command)
def get(self): """ :returns:: XMLRPC proxy for communicating with master, ``xmlrpc.client.ServerProxy`` """ return ServerProxy(self.uri)