def processMessage(self, msg, fromAddr): '''processes the received messages''' if isinstance(msg, message.HeloMessage): # set your own ip if you dont know it if self.ip == "null": self.ip = msg.recipientIP self.key = Host.constructKey(self.ip, self.port) logging.info("You're now connected to a Hachat-network. Your key is " + self.key) senderIP = fromAddr[0] key = Host.constructKey(senderIP, msg.senderPort) logging.debug("received: HELO from " + str(key)) if key in self.hosts: # if you know Host set status that Host had contact host = self.hosts[key] host.lastSeen = 1 logging.debug(key + " already in hostlist - refreshing lastSeen") else: # add new host to hostlist self.addToHosts(key) # only accept Messages from Peers in self.hosts else: try: sender = msg.origin except Exception, e: logging.debug("Msg needs origin, but doesn't have one " + str(msg)) try: # overriding sender with lastHop sender = msg.lastHop except Exception, e: pass
class TestCases(unittest.TestCase): def setUp(self): self.localhost_ip = Host(ip="127.54.78.124") self.localhost_hostname = Host(hostname="localhost") self.remotehost_ip = Host(ip="192.0.0.1") self.remotehost_hostname = Host(hostname="www.google.com") def test_unknown_hostname(self): self.assertRaises(ValueError, Host, hostname="unknownhostname") def test_wrong_args(self): #self.assertRaises(ValueError, Host, ip=None, hostname=None) with self.assertRaises(ValueError): Host(ip=None, hostname=None) def test_localhost(self): self.assertTrue(self.localhost_ip.is_localhost()) self.assertTrue(self.localhost_hostname.is_localhost()) def test_remote_hosts(self): self.assertFalse(self.remotehost_ip.is_localhost()) self.assertFalse(self.remotehost_hostname.is_localhost()) def test_localhost_equality(self): self.assertEqual(Host(ip="127.0.0.1"), Host(ip="127.53.1.245")) def test_get_real_ip(self): #self.assertFalse( # Host(ip=self.localhost_ip.get_real_ip()).is_localhost()) self.assertRaises(NotImplementedError, self.localhost_ip.get_real_ip) self.assertEqual( self.remotehost_ip.ip, self.remotehost_ip.get_real_ip())
def test_glop(self): h = Host() glop_contents = h.read(h.join(h.dirname(h.path_to_host_module()), 'test_grammars', 'glop.g')) files = {'glop.g': glop_contents} output_files = files.copy() output_files['new_glop.g'] = glop_contents self.check_cmd(['-p', '-g', 'glop.g', '-o', 'new_glop.g'], files=files, returncode=0, output_files=output_files)
def exportgroup_create(self, name, project, tenant, varray, exportgrouptype, export_destination=None): ''' This function will take export group name and project name as input and It will create the Export group with given name. parameters: name : Name of the export group. project: Name of the project path. tenant: Container tenant name. return returns with status of creation. ''' # check for existance of export group. try: status = self.exportgroup_show(name, project, tenant) except SOSError as e: if(e.err_code == SOSError.NOT_FOUND_ERR): if(tenant == None): tenant = "" fullproj = tenant + "/" + project projuri = Project(self.__ipAddr, self.__port).project_query(fullproj) nhuri = VirtualArray(self.__ipAddr, self.__port).varray_query(varray) parms = { 'name' : name, 'project' : projuri, 'varray' : nhuri, 'type' :exportgrouptype } if(exportgrouptype and export_destination): if (exportgrouptype == 'Cluster'): cluster_obj = Cluster(self.__ipAddr, self.__port) try: cluster_uri = cluster_obj.cluster_query(export_destination, fullproj) except SOSError as e: raise e parms['clusters'] = [cluster_uri] elif (exportgrouptype == 'Host'): host_obj = Host(self.__ipAddr, self.__port) try: host_uri = host_obj.query_by_name(export_destination) except SOSError as e: raise e parms['hosts'] = [host_uri] # else: # exportgrouptype == Exclusive # TODO: add code for initiator body = json.dumps(parms) (s, h) = common.service_json_request(self.__ipAddr, self.__port, "POST", self.URI_EXPORT_GROUP, body) o = common.json_decode(s) return o else: raise e if(status): raise SOSError(SOSError.ENTRY_ALREADY_EXISTS_ERR, "Export group with name " + name + " already exists")
def exportgroup_remove_host(self, exportgroupname, tenantname, projectname, hostlabels, sync): exportgroup_uri = self.exportgroup_query(exportgroupname, projectname, tenantname) host_uris = [] hostObject = Host(self.__ipAddr, self.__port) for hostlabel in hostlabels: host_uris.append(hostObject.query_by_name(hostlabel, tenantname)) parms = {} parms["host_changes"] = self._remove_list(host_uris) o = self.send_json_request(exportgroup_uri, parms) return self.check_for_sync(o, sync)
class HostTest(unittest.TestCase): def setUp(self): self.host = Host('188.184.9.234:1000') self.host.addBlock('s1') self.host.addBlock('s1') self.host.addBlock('s2') self.host.addBlock('s2') self.host.addBlock('s3') def testHostname(self): self.assertEqual(self.host.hostname, 'webrlb01.cern.ch') host = Host('288.184.9.234:1000') self.assertEqual(host.hostname, '288.184.9.234') def test_addBlock(self): self.assertEqual(self.host.storages['s1'].blocks, 2) self.assertEqual(self.host.storages['s2'].blocks, 2) self.assertEqual(self.host.storages['s3'].blocks, 1) def test_blocksPerDiskAsString(self): self.assertEqual(self.host.blocksPerDiskAsString(), '1 2 2 ') def test_totalBlocks(self): self.assertEqual(self.host.totalBlocks(), 5) def test_avgBlocks(self): self.assertAlmostEqual(self.host.avgBlocks(), 1.66666, 4)
def exportgroup_create(self, name, project, tenant, varray, exportgrouptype, export_destination=None): """ This function will take export group name and project name as input and it will create the Export group with given name. parameters: name : Name of the export group. project: Name of the project path. tenant: Container tenant name. return returns with status of creation. """ # check for existance of export group. try: status = self.exportgroup_show(name, project, tenant) except SOSError as e: if e.err_code == SOSError.NOT_FOUND_ERR: if tenant is None: tenant = "" fullproj = tenant + "/" + project projObject = Project(self.__ipAddr, self.__port) projuri = projObject.project_query(fullproj) varrayObject = VirtualArray(self.__ipAddr, self.__port) nhuri = varrayObject.varray_query(varray) parms = {"name": name, "project": projuri, "varray": nhuri, "type": exportgrouptype} if exportgrouptype and export_destination: if exportgrouptype == "Cluster": cluster_obj = Cluster(self.__ipAddr, self.__port) try: cluster_uri = cluster_obj.cluster_query(export_destination, fullproj) except SOSError as e: raise e parms["clusters"] = [cluster_uri] elif exportgrouptype == "Host": host_obj = Host(self.__ipAddr, self.__port) try: host_uri = host_obj.query_by_name(export_destination) except SOSError as e: raise e parms["hosts"] = [host_uri] body = json.dumps(parms) (s, h) = common.service_json_request(self.__ipAddr, self.__port, "POST", self.URI_EXPORT_GROUP, body) o = common.json_decode(s) return o else: raise e if status: raise SOSError(SOSError.ENTRY_ALREADY_EXISTS_ERR, "Export group with name " + name + " already exists")
def __init__(self): import random from host import Host lett = ['s', 'w', 'a', 'e', 'l', 'v', 'm', 'i'] namelis = [lett[random.randint(0, x)] for x in range(1,len(lett))] self._name = namelis[0].capitalize() + ''.join(namelis[1:]) self._description = Host.getDescription() self._disappear = Host.getDisappearance() self._pointsGiven = random.randint(1, 10)
def __init__(self, name, mymac=(0,0,0,0,0,0), myip=(0,0,0,0), destip=(0,0,0,0), pkts=1): Host.__init__(self, name, mymac, myip) self.pkts_to_send = pkts self.pkts_sent = 0 self.received_pkt_count = 0 self.current_destip_idx = 0 if isinstance(destip, list): self.destip = map(lambda x: IpAddress(x), destip) else: self.destip = [IpAddress(destip)] self.flows_started = 0 self.connections = {}
def add_host(self, name, port, healthcheck): h = Host(name,port) all_hosts = self.hosts if h in all_hosts: host = all_hosts[all_hosts.index(h)] if healthcheck not in host.healthchecks: host.add_healthcheck(healthcheck) host.reset() else: return True else: h.add_healthcheck(healthcheck) self.known_hosts[h] = datetime.datetime.now()
def main(): # Set exit handler set_exit_handler(on_exit) # setup loggin logging.basicConfig( level=logging.DEBUG, format='%(asctime)s.%(msecs)d %(levelname)s %(module)s - %(funcName)s: %(message)s', datefmt="%Y-%m-%d %H:%M:%S" ) # Create objects host = Host() host.start_server(80)
def run(self): """ Metodo run del thread, viene richiamato tramite start(). Viene eseguito un loop che cerca a intervalli di 30 secondi nuovi hosts sulla rete e per ogni host che trova inizializza un thread che ne raccoglie le informazioni. I vari thread vengono raccolti all'interno di una lista. L'indirizzo della rete viene preso dalla linea di comando o se non fornito si cerca di indovinarlo a partire dall'ip della macchina (assumendo che la netmask sia 255.255.255.0 come spesso si verifica). """ self.known_hosts = [] if '-n' in argv: network_address = argv[argv.index('-n') + 1] elif '--network' in argv: network_address = argv[argv.index('--network') + 1] else: network_address = get_network_address() if not network_address: print("Cannot find network address... program will continue without network scanning!\n" + "If this trouble persist, try providing the network address in the launch command!\n" + "Press CTRL-C to terminate!") exit() while(True): hosts = host_discovery(network_address) for host in hosts: if not (host in self.known_hosts): self.known_hosts.append(host) print("Starting thread for host %s" % host) thread = Host(host) self.threads.append(thread) thread.start() for thread in self.threads: if not thread.is_alive: self.known_hosts.remove(thread.info['ip']) sleep(30)
def __init__(self, key, access, cluster): try: url = "http://169.254.169.254/latest/meta-data/" public_hostname = urlopen(url + "public-hostname").read() zone = urlopen(url + "placement/availability-zone").read() region = zone[:-1] except: sys.exit("We should be getting user-data here...") # the name (and identity) of the cluster (the master) self.cluster = cluster self.redis = redis.StrictRedis(host='localhost', port=6379) endpoint = "monitoring.{0}.amazonaws.com".format(region) region_info = RegionInfo(name=region, endpoint=endpoint) self.cloudwatch = CloudWatchConnection(key, access, region=region_info) self.namespace = '9apps/redis' self.events = Events(key, access, cluster) # get the host, but without the logging self.host = Host(cluster) self.node = self.host.get_node()
def initialize(self): if self.subnet: address_range = self.get_address_range(self.subnet) print('Size of network: ' + str(address_range.size())) self.alive_hosts = self.get_alive_hosts(address_range) for counter, ip in enumerate(self.alive_hosts): host = Host() host.ip = ip host.id += str(counter) self.host_list.append(host) else: host = Host() host.ip = self.address self.add_host(host)
def post(self): self.set_header("Content-Type", 'application/json; charset="utf-8"') switch = self.request.headers.get("X-switch-name") params = json.loads(self.request.body.decode()) print "Discovered %s from %s with %s" % (params["ip"], switch, params["mac"]) host = None # host already known if params["ip"] in self.topology.hosts.keys(): self.set_status(200) host = self.topology.hosts[params["ip"]] host.update(mac=params["mac"], switch=params["switch"], switch_port=params["switch_port"]) # host just discovered else: self.set_status(201) host = Host(ip=params["ip"], mac=params["mac"], switch=params["switch"], switch_port=params["switch_port"]) self.topology.hosts[params["ip"]] = host ready(self.topology) # return the uuid for later use result = {"uuid": str(host.uuid)} self.finish(json.dumps(result))
def current_host(config): hosts = {} import sys import inspect import pkgutil import importlib from host import Host # Try to get the current host as specified in the config file # Otherwise resort to the Default host h_config = config.get_global_config()['host'] host_class = h_config['type'] #Iterate through all the members of this class module_names = [name for _, name, _ in pkgutil.iter_modules([__name__])] for mod in module_names: importlib.import_module(__name__ + '.' + mod) for mod in module_names: for name, obj in inspect.getmembers(sys.modules[__name__ + '.' + mod]): if inspect.isclass(obj): hosts[obj.__name__] = obj if host_class in hosts: return hosts[host_class](h_config) else: host = Host(h_config) return host
def __init__(self, default_target: BuildTarget, hosts_filename: str, compressor: Compressor, parser: Parser): self.compressor = compressor self.root_target = default_target self.hosts = [] try: hosts_file_lines = open(hosts_filename).readlines() for line in hosts_file_lines: if line.strip().startswith('#'): continue try: line = line.strip() self.hosts.append(Host(line.split()[0], line.split()[1])) except KeyError: pass except FileNotFoundError: raise FileNotFoundError(f"Error! Hosts file cannot be found") self.active_hosts: List[Host] = [ HostLocal() for _ in range(config.MAX_LOCAL_PROCESSES) ] self.available_hosts: List[Host] = [ HostLocal() for _ in range(config.MAX_LOCAL_PROCESSES) ] self.busy_hosts: List[Host] = [] self.lock = asyncio.Lock() self.host_cond_var = asyncio.Condition() self.parser = parser
def __init__(self, task_id, hostname, host, rank, task_conf, shared_dir=None, user_name="beeuser"): # Basic configurations self.__status = "" self.__hostname = hostname self.rank = rank self.master = "" # Job configuration self.task_id = task_id self.task_conf = task_conf # Host machine self.__node = host self.host = Host(host) # Shared resourced self.shared_dir = shared_dir self.user_name = user_name # Output color list self.__output_color_list = [ "magenta", "cyan", "blue", "green", "red", "grey", "yellow" ] self.output_color = "cyan" self.error_color = "red"
def setUp(self): self.host = Host('188.184.9.234:1000') self.host.addBlock('s1') self.host.addBlock('s1') self.host.addBlock('s2') self.host.addBlock('s2') self.host.addBlock('s3')
def main(): fetcher = DataFetcher("cookies.txt") data = fetcher.getItems( "https://www.couchsurfing.com/members/hosts?utf8=%E2%9C%93&search_query=Curitiba%2C+Brazil&latitude=-25.4244287&longitude=-49.2653819&country=Brazil®ion=south-america&date_modal_dismissed=true&arrival_date=&departure_date=&num_guests=1&has_references=1&can_host%5Baccepting_guests%5D=1&last_login=Anytime&join_date=Anytime&gender=All&min_age=&max_age=&languages_spoken=&interests=&smoking=No+Preference&radius=10&keyword=&host_sort=Best+Match&button=&perPage=100", "h3", className="-name") usuarios = [ Host(u.a.string, u.a['href'][len("/users/"):], u.a['href']) for u in data ] arquivo = open("usuarios.csv", "w") arquivo_usuarios = csv.DictWriter(arquivo, fieldnames=["nome", "id", "endereco"], lineterminator='\n') arquivo_usuarios.writeheader() for user in usuarios: arquivo_usuarios.writerow({ 'nome': user.nome, 'id': user.id, 'endereco': user.endereco.strip() }) arquivo.close()
def create_host(self, environment, immune_halftime=2): """ This creates a host inside a specified environment. """ from host import Host host = Host(environment=environment, immune_halftime=immune_halftime)
def fileinfo(self, f): if f == "emplrec0451.log": output.openline() output.scr.addstr("looks like ol' 0451 here was supposed to be at SenseNet earlier.") if self.state == 2: self.state = 3 return Host.fileinfo(self,f)
def __init__(self): Gtk.Window.__init__(self) self.host = Host() self.cpu = CPU() self.gpu_integrated = GPU_Integrated() self.gpu_discrete = GPU_Discrete() self.ssd = SSD() self.bat = Battery() self.notebook = Gtk.Notebook() self.add(self.notebook) self.page_1 = Gtk.Box() self.__fill_page_1() self.notebook.append_page(self.page_1, Gtk.Label('Hardware Monitor')) self.page_2 = Gtk.Box() self.__fill_page_2() self.notebook.append_page(self.page_2, Gtk.Label('Hardware Info')) self.page_3 = Gtk.Box() self.__fill_page_3() self.notebook.append_page(self.page_3, Gtk.Label('About'))
def __init__(self, subnet, starting_host, ending_host): self.working_queue = multiprocessing.Queue() self.done_queue = multiprocessing.Queue() for host in xrange(starting_host, ending_host + 1): address = '%s.%d' % (subnet, host) # Add the host scanner to the working queue self.working_queue.put(Host(host, address))
def __init__(self, host=None): if not host: host = Host() self._parser = None self._host = host self._buildenv = None self._device = None
def __init__(self): self.width = WIDTH self.height = HEIGHT self.host_quantity = HOSTS_QUANTITY self.signal_range = SIGNAL_RANGE self.hosts = list() ## HOSTS INSTANTIATION ## # generates a random position and creates a host with it print(f"\nGenerating positions for {HOSTS_QUANTITY} hosts in a map {WIDTH}x{HEIGHT} Km...", end=" ") positions = utils.generate_coordinates(HOSTS_QUANTITY, WIDTH, HEIGHT) for position in positions: self.hosts.append(Host(position, SIGNAL_RANGE)) print(f"{HOSTS_QUANTITY} hosts were created.") ## ROUTER INSTANTIATION ## print("Creating router...", end=" ") router = Router(self.hosts) print("Router created.") # gives an list of near active hosts for each host print("Running Hello for each host...", end=" ") router.hello() print("Host Hello status complete.\n")
def getHostList(self, cidade, kwargs={}, output="hosts.csv", per_page=100): """ Obtem uma lista de hosts de um local no expedia """ with open(output, "w", encoding='utf-8') as arquivo: arquivo_usuarios = csv.DictWriter(arquivo, fieldnames=["nome","id","endereco","cidade"], lineterminator='\n') arquivo_usuarios.writeheader() # url para pegat via html V1 # url = u"https://www.couchsurfing.com/members/hosts?utf8=%E2%9C%93&search_query={}&latitude={}&longitude={}&country=Brazil®ion=south-america&date_modal_dismissed=true&arrival_date=&departure_date=&num_guests=1&has_references=1&can_host%5Baccepting_guests%5D=1&last_login=Anytime&join_date=Anytime&gender=All&min_age=&max_age=&languages_spoken=&interests=&smoking=No+Preference&radius=10&keyword=&host_sort=Best+Match&button=&perPage={}".format(urllib.parse.quote(cidade.nome), cidade.latitude, cidade.longitude, per_page) url = u"https://www.couchsurfing.com/api/web/users/search?controller=user_profiles&action=hosts&city={}&page=1&perPage={}&latitude={}&longitude={}&has_references=1&search_query={}".format(urllib.parse.quote(cidade.nome), per_page, cidade.latitude, cidade.longitude, urllib.parse.quote(cidade.endereco)) # url = u"https://www.couchsurfing.com/api/web/users/search?utf8=%E2%9C%93&search_query={}&latitude={}&longitude={}&date_modal_dismissed=true&arriva%20l_date=&departure_date=&num_guests=1&has_references=1&can_host%5Baccepting_guests%5D=1&last_login=Anytime&join_date=Anytime&gender=All&min_age=&max_age=&languages_spoken=&interests=&smoking=No%20%20Preference&radius=10&keyword=&host_sort=Best%20Match&button=&perPage={}&controller=user_profiles&action=hosts&page=1&city={}".format(urllib.parse.quote(cidade.nome), cidade.latitude, cidade.longitude, per_page, urllib.parse.quote(cidade.nome)) print(cidade.nome) data = self.getJson(url)["users"] for user in data: nome = user["publicName"] id = user["id"] endereco = user["profileLink"] # for user in data: # nome = user.find("span", "user-card__name").string # id = user.find("a", "user-card__content")['href'][len("/people/"):] # endereco = user.find("a", "user-card__content")['href'] host = Host(nome, id, endereco, cidade) print("{} {} {} {} ".format(host.nome, host.id, host.endereco, host.cidade.nome)) arquivo_usuarios.writerow({"nome" : host.nome, "id" : host.id, "endereco" : host.endereco, "cidade" : host.cidade.nome})
def __init__(self, name, mymac=(0, 0, 0, 0, 0, 0), myip=(0, 0, 0, 0), destip=(0, 0, 0, 0), pkts=1): Host.__init__(self, name, mymac, myip) self.pkts_to_send = pkts self.pkts_sent = 0 self.received_pkt_count = 0 self.current_destip_idx = 0 if isinstance(destip, list): self.destip = map(lambda x: IpAddress(x), destip) else: self.destip = [IpAddress(destip)] self.flows_started = 0 self.connections = {}
def get_arp_cache(self, host): """Get ARP table from host""" results = Crawler.snmp.walk(host.ip, '.1.3.6.1.2.1.4.22.1.2.' + str(host.interface)) if results is not None: for result in results: for name, val in result: print(str(name) + ' = ' + hex_to_mac(val)) if str(name).find('1.3.6.1.2.1.4.22.1.2.' + str(host.interface)) != -1: new_host = Host() new_host.ip = str(name).split('1.3.6.1.2.1.4.22.1.2.' + str(host.interface) + '.',1)[1] #print('new host ip: ' + new_host.ip) new_host.mac = hex_to_mac(val) #print('new host mac: ' + new_host.mac) self.add_host(new_host)
def vcenterdatacenter_get_hosts(self, label, vcenter, tenantname, xml=False): ''' Makes a REST API call to retrieve details of a vcenterdatacenter based on its UUID ''' uri = self.vcenterdatacenter_query(label, vcenter, tenantname) (s, h) = common.service_json_request(self.__ipAddr, self.__port, "GET", VcenterDatacenter.URI_DATACENTER_HOSTS.format(uri), None, None, xml) from host import Host obj = Host(self.__ipAddr, self.__port) o = common.json_decode(s) hostsdtls = obj.show(o['host']) return hostsdtls
def disconnect(self): if self.state == 1: self.state = 2 global gamestate gamestate["localhost"].users.append("v0rt3x") queueconv(conversations["vortex2"]) return True return Host.disconnect(self)
def message(self, u): if self.state == 0 and u == "v0rt3x": self.state = 1 queueconv(conversations["vortex1"]) global gamestate gamestate["localhost"].state = 2 return None return Host.message(self, u)
def get_list_host(response_query_str: List[Dict[Text, Any]]): if response_query_str is None: return list() response_query: List[Host] = list( map( lambda h: Host(h['ip'], h['mac'], h['vendor'], h['date'], h[ 'network'], h['description'], h['id']), response_query_str)) return sorted(list(set(response_query))) # REMOVE DUPLICATES
def from_json(data): hostlist = HostList() for l in data: # host = hostlist.create_host() host = Host() d = eval(l) host.update(d) if 'id' in d: host._id = int(d['id']) else: log.warning('No host ID in data') hostlist.append(host) return hostlist
def create_host(self): # with self.lock: self.count = self.count + 1 host = Host(self.count) super(HostList, self).append(host) return host
def __init__(self, ipAddr, port): ''' Constructor: takes IP address and port of the ViPR instance. These are needed to make http requests for REST API ''' self.__ipAddr = ipAddr self.__port = port self.__hostObject = Host(self.__ipAddr, self.__port)
def test_host_class(self): host_name = "hostname" host = Host(host_name) self.assertTrue(host.name == host_name) self.assertTrue( dateutil.parser.parse(host.last_updated).day == datetime.today().day) self.assertTrue(len(host.software) == 0)
def main(host=None, argv=None): host = host or Host() args = parse_args(argv) if args.version: host.print_out(VERSION) return 0 if args.grammar_cmd is None and args.grammar_file is None: host.print_err('must specify one of -c or -g') return 1 try: grammar_txt, grammar_fname, err = grammar_from_args(host, args) if err: host.print_err(err) return 1 if args.pretty_print or args.compile_grammar: if args.pretty_print: out, err = print_grammar(grammar_txt, grammar_fname) else: if args.inline_compiled_parser_base: base = (host.read(host.dirname(__file__), 'compiled_parser_base.py')) else: base = None out, err = compile_grammar(grammar_txt, grammar_fname, args.compiled_parser_class_name, args.compiled_parser_package_name, base) if err: host.print_err(err) return 1 if args.output: host.write(args.output, out) else: host.print_out(out, end='') return 0 input_txt, input_fname, err = input_from_args(host, args) if err: host.print_err(err) return 1 out, err = parse(grammar_txt, input_txt, grammar_fname, input_fname, args.use_compiled_grammar_parser) if err: host.print_err(err) if out: if args.output: host.write(args.output, str(out)) else: host.print_out(str(out), end='') return 0 if err is None else 1 except KeyboardInterrupt: host.print_err('Interrupted, exiting ..') return 130 # SIGINT
def __get_hostdetails(self, dns_blockip): """ Get Host Name and IP address for __dnsserver_blacklist and __dnsserver_whitelist """ host = Host(dns_blockip) #Declare host class logger.info(f'Hostname: {host.name}, IP Address: {host.ip}') #Setup the template strings for writing out to black/white list files [self.__dnsserver_blacklist, self.__dnsserver_whitelist] = self.__services.get_dnstemplatestr(host.name, host.ip)
def from_args(cls, host, args): """Constructs a Device from command line arguments.""" netaddr_cmd = ['netaddr', '--fuchsia', '--nowait'] default_device = '{}.device'.format(host.build_dir) if args.device: netaddr_cmd.append(args.device) elif os.path.exists(default_device): with open(default_device) as f: netaddr_cmd.append(f.read().strip()) try: netaddr = host.zircon_tool(netaddr_cmd) except subprocess.CalledProcessError: raise RuntimeError('Unable to find device; try `fx set-device`.') device = cls(host, netaddr) if not host.build_dir: raise Host.ConfigError('Unable to find SSH configuration.') device.set_ssh_config(Host.join(host.build_dir, 'ssh-keys', 'ssh_config')) return device
def loadHostList(self, filename): """ Carrega um arquivo csv com dados de cidades """ with open(filename, "r", encoding="utf-8") as arquivo: reader = csv.DictReader(arquivo) self.hosts = [ Host(row["nome"], row["id"], row["endereco"], row["cidade"]) for row in reader ] print("{} hosts loaded".format(len(self.hosts)))
def configure(self, config): if not self.port_map: config['ncounters'] = 4 config['nDIs'] = 4 config['nrelays'] = 2 config['ndallas_busses'] = 4 config['nGPIOs'] = 1 config['port_map'] = {'console':'/dev/ttyS0', 'com1':'/dev/ttyS8', 'com2':'/dev/ttyS9', 'com3':'/dev/ttySa', 'com4':'/dev/ttySb', 'com5':'/dev/ttySc', 'com6':'/dev/ttySd'} config['eth_map'] = {'eth0':'eth0', 'eth1':'eth1'} config['modem_map'] = {} Host.configure(self, config)
def delete(self, f): if f == "onyx.lock": self.removefile("access.log") self.removefile("onyx.lock") queueconv(conversations["onyxsensenet4"]) unlockcommand("disconnect") return True else: return Host.delete(self, f)
def __init__(self, **kwargs): """Constructor Initialize object's private data. rpcdebug: Set RPC kernel debug flags and save log messages [default: ''] nfsdebug: Set NFS kernel debug flags and save log messages [default: ''] dbgname: Base name for log messages files to create [default: 'dbgfile'] tracename: Base name for trace files to create [default: 'tracefile'] notrace: Debug option so a trace is not actually started [default: False] tcpdump: Tcpdump command [default: '/usr/sbin/tcpdump'] messages: Location of file for system messages [default: '/var/log/messages'] tmpdir: Temporary directory where trace files are created [default: '/tmp'] """ # Arguments self.rpcdebug = kwargs.pop("rpcdebug", '') self.nfsdebug = kwargs.pop("nfsdebug", '') self.dbgname = kwargs.pop("dbgname", 'dbgfile') self.tracename = kwargs.pop("tracename", 'tracefile') self.notrace = kwargs.pop("notrace", False) self.tcpdump = kwargs.pop("tcpdump", c.NFSTEST_TCPDUMP) self.messages = kwargs.pop("messages", c.NFSTEST_MESSAGESLOG) self.tmpdir = kwargs.pop("tmpdir", c.NFSTEST_TMPDIR) self._nfsdebug = False Host.__init__(self) # Initialize object variables self.dbgidx = 1 self.dbgfile = '' self.traceidx = 1 self.tracefile = '' self.tracefiles = [] self.clients = [] self.traceproc = None self.nii_name = '' # nii_name for the client self.nii_server = '' # nii_name for the server
def __init__(self, filename, duration): self.filename = filename # the duration of the simulation, in number of timesteps self.duration = int(duration / globals.dt) # Import the network object parameters with open(self.filename) as f: network_objects = json.load(f) # Create links for l in network_objects['links']: # Clear the variable link = None # Add to idmapping link = Link(l['id'], l['connection1'], l['connection2'], \ l['rate'], l['delay'], l['buffersize'], l['track1'] == 1, \ l['track2'] == 1) globals.idmapping['links'][l['id']] = link # Create hosts for h in network_objects['hosts']: # Clear the variable host = None # Add to idmapping host = Host(h['id'], h['linkid']) globals.idmapping['hosts'][h['id']] = host # Create routers if network_objects['routers'] != [{}]: for r in network_objects['routers']: # Clear the variable router = None # Get the list of links connected to each router link_list = [] for lin_id in r['links']: link_list.append(globals.idmapping['links'][lin_id]) # Initialize router and add to idmapping router = Router(r['id'], link_list) globals.idmapping['routers'][r['id']] = router # Create flows for f in network_objects['flows']: # Clear the variable flow = None # add to idmapping if f['congestion_control'] == 'reno': flow = Flow(f['id'], f['source'], f['destination'], f['amount'], \ f['start'], f['track'] == 1) else: flow = Flow_FAST(f['id'], f['source'], f['destination'], f['amount'], \ f['start'], f['track'] == 1) globals.idmapping['flows'][f['id']] = flow
def cluster_get_hosts(self, label, tenantname): ''' Makes a REST API call to retrieve details of a hosts associated with cluster ''' uri = self.cluster_query(label, tenantname) (s, h) = common.service_json_request( self.__ipAddr, self.__port, "GET", Cluster.URI_CLUSTER_HOSTS.format(uri), None, None, False) from host import Host obj = Host(self.__ipAddr, self.__port) o = common.json_decode(s) hostsdtls = obj.show(o['host']) return hostsdtls
def generate_executor(cls, type=0, **kwargs): """ Produce executor by its type :param type: 0 - physical host, 1 - virtual machine :param kwargs: executor properties :return: instance of executor """ if type == 0: from host import Host return Host(**kwargs)
def vcenter_get_hosts(self, label, tenantname): """ Makes a REST API call to retrieve details of a vcenter based on its UUID """ uri = self.vcenter_query(label, tenantname) (s, h) = common.service_json_request( self.__ipAddr, self.__port, "GET", VCenter.URI_VCENTER_HOSTS.format(uri), None, None, False ) from host import Host obj = Host(self.__ipAddr, self.__port) o = common.json_decode(s) hostsdtls = obj.show(o["host"]) return hostsdtls
def configure(self, config): if not self.port_map: config['ncounters'] = 4 config['nDIs'] = 4 config['nrelays'] = 2 config['ndallas_busses'] = 4 config['nGPIOs'] = 1 config['port_map'] = { 'console': '/dev/ttyS0', 'com1': '/dev/ttyS8', 'com2': '/dev/ttyS9', 'com3': '/dev/ttySa', 'com4': '/dev/ttySb', 'com5': '/dev/ttySc', 'com6': '/dev/ttySd' } config['eth_map'] = {'eth0': 'eth0', 'eth1': 'eth1'} config['modem_map'] = {} Host.configure(self, config)
def vcenter_get_hosts(self, label, tenantname): ''' Makes a REST API call to retrieve details of a vcenter based on its UUID ''' uri = self.vcenter_query(label, tenantname) (s, h) = common.service_json_request( self.__ipAddr, self.__port, "GET", VCenter.URI_VCENTER_HOSTS.format(uri), None, None, False) from host import Host obj = Host(self.__ipAddr, self.__port) o = common.json_decode(s) hostsdtls = obj.show(o['host']) return hostsdtls
def ReadFile(name, cnt, cert, delay, prec): f = open(name, "r") arr = [] bd = False for line in f: opt = line.split() if line == "\n" or len(opt) == 0: if (bd == False): bd = True else: k = Host(method, url, header, body, cnt, prec) arr.append(k) bd = False elif opt[0] == "GET" or opt[0] == "POST" or opt[0] == "HEAD" or opt[ 0] == "OPTIONS" or opt[0] == "PUT" or opt[0] == "DELETE": method = opt[0] url = opt[1] header = {} body = "" else: if bd == False: i = 1 str = "" while i < len(opt): if i == len(opt) - 1: str = str + opt[i] else: str = str + opt[i] + " " i += 1 header[opt[0].replace(":", "")] = str else: body += line k = Host(method, url, header, body, cnt, prec) arr.append(k) ''' for i in arr: g=SendData(i,delay,cnt,cert) i.time=g ''' return arr
def add_host(self, host_id, link, flows): """Adds a new host to the network. Args: host_id (str): The network address of the host. link (Link): The link that the host is connected to. flows (dict): All the flows that are going out of the host. """ host = Host(self, host_id) self.nodes[host_id] = host
def __init__(self, device, pkg, tgt, output=None, foreground=False): self.device = device self.host = device.host self.pkg = pkg self.tgt = tgt if output: self._output = output else: self._output = Host.join('test_data', 'fuzzing', self.pkg, self.tgt) self._foreground = foreground
def disconnect(self): if self.state == 2 or self.state == 3: error("User is affected by linklock. May not disconnect") return False elif self.state == 4: self.state = 5 queueconv(conversations["vortex6"]) global gamestate gamestate["localhost"].state = 8 return True return Host.disconnect(self)
def create_host(self, ip_addr): """Create a new host""" #print('CREATING NEW HOST WITH IP ' + str(ip_addr)) if self.ip_exists(ip_addr): #print('\tIP ALREADY EXISTS') return self.get_host_by_ip(ip_addr) new_host = Host() new_host.id += str(self.host_counter) new_host.ip = ip_addr new_host.add_ip(ip_addr) self.host_counter += 1 self.all_hosts.append(new_host) self.add_to_host_list(new_host) return new_host
def _rows(self): self._hosts = [] for row in self._readlines(self.file_path): if row.startswith(r'#'): continue # skip comments for now if len(row.split()) < 2: continue # skip invalid rows ipaddress, hostname, aliases, comments = Hosts._process_row(row) host = Host(ipaddress, hostname, aliases, comments) self._hosts.append(host) return self._hosts
def configure(self, config): #print 'configure megatron' if not self.port_map: config['ncounters'] = 4 config['nAIs'] = 4 config['nAOs'] = 3 config['nDIs'] = 4 config['nrelays'] = 2 config['ndallas_busses'] = 2 config['nCAN_busses'] = 1 config['nGPIOs'] = 0 config['port_map'] = {'console':'/dev/ttyS0', 'com1':'/dev/ttyS2', 'com2':'/dev/ttyS3', 'com3':'/dev/ttyS4', 'com4':'/dev/ttyS5', 'com5':'/dev/ttyS6', 'com6':'/dev/ttyS7'} config['eth_map'] = {'eth0':'eth0', 'eth1':'eth1'} Host.configure(self, config)
def addHost(): print "Enter Host Ipv4 Address" ipadr = str(raw_input(prompt)) print "Enter Comment/Note" note = str(raw_input(prompt)) ip4 = buildIp(ipadr) host = Host(ip4, note) dal = Dal() dal.addHost(host) dal.save()
def prepare_host(self, params): host = Host(1) host.hMin = params[0] host.hMax = params[1] host.hSum = params[2] host.hCount = params[3] return host