def externaladdress(port): """ Should return a string of either way to get connected to the machine running the game for display on screen. Need to look in to making this detect if we are on wlan or ethernet. :param port: :return: """ lan_ip_for_message = None interfaces = netifaces.interfaces() if LANIP: lan_ip_for_message = LANIP else: for i in interfaces: if i == 'lo': continue if 'docker' in i: continue iface = netifaces.ifaddresses(i).get(netifaces.AF_INET) print (iface) if iface is not None: for j in iface: if not lan_ip_for_message: lan_ip_for_message = j['addr'] global LANIP LANIP = j['addr'] message = 'http://' + str(myip()) + ':' + str(port) + '/' if lan_ip_for_message: message += ' or ' + 'http://' + str(lan_ip_for_message) + ':' + str(port) + '/' return message
def __init__(self, filepath): self.filepath = filepath torrent_file = '' self.file = open(self.filepath, 'rb') for line in self.file: torrent_file += line #not bounded by system memory/size of file self.file.close() self.torrent_file = bencode.bdecode(torrent_file) self.ID = "PT" self.VERSION = "0000" self.RESERVED = "00000000" self.PROTOCOL = "BitTorrent protocol" self.trackers = self.get_trackers() self.info_hash = self.get_info_hash() self.peer_id = self.gen_peer_id() self.length = self.get_length() self.left = self.length#TODO remove this and pertinent unit tests self.creation_date = self.get_creation_date() self.IP = ipgetter.myip() self.key = self.get_key() self.info_hash_hex = self.get_info_hash_hex() self.port = self.get_port() self.handshake = self.get_handshake() self.ipv6 = socket.has_ipv6 self.timeout = socket.getdefaulttimeout()
def get_new_jobs(self, job_title, location, min_date=None): jobs = [] max_job_chunks = 25 search_start = 0 indeed_params = { 'q': job_title, 'limit': max_job_chunks, 'latlong': 1, 'sort': 'date', 'userip': ipgetter.myip(), 'useragent': 'Python'} indeed_response = self.indeed_client.search(**indeed_params, l=location, start=search_start) job_chunk = indeed_response['results'] total_jobs = indeed_response['totalResults'] # Log sample_max_city_name_length = 35 debug_log_string = 'Scraped location {:<' + str(sample_max_city_name_length) + '} found {:>3} jobs.' self.logger.debug(debug_log_string.format(location, len(job_chunk))) if job_chunk: while search_start < total_jobs and ( not min_date or min_date < parser.parse(job_chunk[0]['date']).timestamp()): jobs.extend(job_chunk) search_start += indeed_params['limit'] job_chunk = self.indeed_client.search(**indeed_params, l=location, start=search_start)['results'] return jobs
def display_eedata(epd): w = epd.width h = epd.height myip = ipgetter.myip() # Uncomment this line to display the true external IP #ext_ip = 'External: ' + myip # Comment this line to remove the fake external IP ext_ip = 'External: 255.255.255.255' int_ip = get_lan_ip() int_ip = 'Internal: ' + int_ip #int_ip = int_ip.rstrip(5) f = open('/home/pi/speedtest-cron/speedtest.txt') lines = f.readlines() f.close() speedline1 = lines[-5].strip() speedline2 = lines[-4].strip() speedline3 = lines[-3].strip() speedline4 = lines[-2].strip() # initially set all white background image = Image.new('1', epd.size, WHITE) # prepare for drawing draw = ImageDraw.Draw(image) draw.rectangle((1, 1, w - 1, h - 1), fill=WHITE, outline=BLACK) draw.rectangle((2, 2, w - 2, h - 2), fill=WHITE, outline=BLACK) # text draw.text((5, 5), speedline1, fill=BLACK, font=text_font1) draw.text((10, 35), speedline2, fill=BLACK, font=text_font2) draw.text((10, 65), speedline3, fill=BLACK, font=text_font2) draw.text((10, 95), speedline4, fill=BLACK, font=text_font2) draw.text((10, 125), ext_ip, fill=BLACK, font=text_font2) draw.text((10, 155), int_ip, fill=BLACK, font=text_font2) # display image on the panel epd.display(image) epd.update()
def _wait_for_hololence(self): print 'looking for hololence on ip {} in port {}...'.format( self.address[0], self.address[1]) while True: try: self._spinner() self.serversocket.sendto('ip: {}'.format(myip()), self.address) sleep(1) buf, address = self.serversocket.recvfrom(1024) if len(buf) > 0: print buf try: if buf.lower() == conf.START: break except Exception: print traceback.format_exc() continue sleep(0.5) except IOError as e: # and here it is handeled if e.errno == errno.EWOULDBLOCK: sleep(0.5) continue except: print traceback.format_exc() sleep(2) continue print 'Ready to go!'
def getAddress(): # global ip address of device globalIp = ipgetter.myip() # local ip address of device localIp = socket.gethostbyname(socket.gethostname()) output = 'Global_IP:' + globalIp + '//' + 'Local_Ip:' + localIp return output
def retranslateUi(self, MainWindow): MainWindow.setWindowTitle(_translate("MainWindow", "Network", None)) self.enroll_btn.setText(_translate("MainWindow", "OK", None)) self.external_network.setVisible(True) self.external_network.setText("External : " + str(ipgetter.myip())) self.modifier = self.labelModifier(self)
def send_email(attachment=[]): #发送邮件模块 sub_header = strftime("%Y-%m-%d %H:%M:%S", gmtime()) msg = MIMEMultipart() #以下是发送带附件邮件,首先构造MIMEMultipart对象做为根容器 msg['From'] = sub_header msg['To'] = username msg['Subject'] = sub_header msgtext = ipgetter.myip() msg.attach(MIMEText(str(msgtext))) # 构造MIMEText对象做为邮件显示内容并附加到根容器 for attach in attachment: if os.path.exists(attach) == True: part = MIMEBase('application', 'octet-stream') # 构造MIMEText对象做为邮件显示内容并附加到根容器 part.set_payload(open(attach, 'rb').read()) #读入文件内容 Encoders.encode_base64(part) #格式化文件内容 part.add_header('Content-Disposition', 'attachment; filename="{}"'.format( os.path.basename(attach))) #设置附件头 msg.attach(part) smtp = smtplib.SMTP() smtp.connect(smtpserver, port) smtp.ehlo() smtp.starttls() smtp.set_debuglevel(1) smtp.login(username, password) smtp.sendmail(sender, receiver, msg.as_string()) smtp.quit()
def __init__(self, filepath): self.filepath = filepath torrent_file = '' self.file = open(self.filepath, 'rb') for line in self.file: torrent_file += line #not bounded by system memory/size of file self.file.close() self.torrent_file = bencode.bdecode(torrent_file) self.ID = "PT" self.VERSION = "0000" self.RESERVED = "00000000" self.PROTOCOL = "BitTorrent protocol" self.trackers = self.get_trackers() self.info_hash = self.get_info_hash() self.peer_id = self.gen_peer_id() self.length = self.get_length() self.left = self.length #TODO remove this and pertinent unit tests self.creation_date = self.get_creation_date() self.IP = ipgetter.myip() self.key = self.get_key() self.info_hash_hex = self.get_info_hash_hex() self.port = self.get_port() self.handshake = self.get_handshake() self.ipv6 = socket.has_ipv6 self.timeout = socket.getdefaulttimeout()
def getip(try_count, blacklist): "Function to return the current, external, IP address" good_ip = 0 counter = 0 pattern = re.compile("^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$") #try up to config.try_count servers for an IP while (good_ip == 0) and (counter < try_count): #get an IP currip = ipgetter.myip() #check to see that it has a ###.###.###.### format if pattern.match(currip) and currip not in blacklist: good_ip = 1 print("GetIP: Try %d: Good IP: %s" % (counter + 1, currip)) else: if currip in blacklist: print("GetIP: Try %d: Bad IP (in Blacklist): %s" % (counter + 1, currip)) else: print("GetIP: Try %d: Bad IP (malformed): %s" % (counter + 1, currip)) #increment the counter counter = counter + 1 #print ("My IP = %s\r\n" % currip) return currip
def __init__(self): import miniupnpc print('Finding router ip...') upnp = miniupnpc.UPnP() upnp.discoverdelay = 10 upnp.discover() upnp.selectigd() self.local_host = upnp.lanaddr self.external_host = ipgetter.myip() self.local_port = 4455 self.external_port = 43210 while True: upnp.addportmapping(self.external_port, 'TCP', self.local_host, self.local_port, 'Vokiso', '') try: self.sock = mesh.MeshSocket(self.local_host, self.local_port, out_addr=(self.external_host, self.external_port)) except OSError: self.local_port += 1 self.external_port += 1 continue break self.connected_event = Event() self.sock.once('connect')(lambda sock: self.connected_event.set()) self.send_lock = Lock()
def getConguration(): with open(db) as f: config["database"] = json.load(f) try: con = MySQLdb.connect( config.get("database").get("host"), config.get("database").get("user"), config.get("database").get("passwd"), config.get("database").get("db"), cursorclass=MySQLdb.cursors.DictCursor, ) if db: cur = con.cursor() cur.execute( "SELECT * FROM `configurations` WHERE id = (SELECT active FROM `states` ORDER BY id DESC limit 1)" ) config["fingerprint"] = cur.fetchone() config["session"] = shortuuid.uuid() config["remote_ip"] = ipgetter.myip() config["vpn_ip"] = ni.ifaddresses("tun0")[2][0]["addr"] print "Connected to " + str(config["vpn_ip"]) config["fingerprint"]["amp_min"] = 10 config["fingerprint"]["plot"] = 0 config["verbose"] = False config["soundcard"] = {"chunksize": 8096, "channels": 1} return config else: print "Connection unsuccessful" except MySQLdb.Error, e: print "MySQL Error [%d]: %s" % (e.args[0], e.args[1]) print "MySQL Error: %s" % str(e)
def getConguration(): db = os.path.dirname(__file__) + "/conf/database.json" with open(db) as f: config['database'] = json.load(f) try: con = MySQLdb.connect( config.get('database').get('host'), config.get('database').get('user'), config.get('database').get('passwd'), config.get('database').get('db'), cursorclass=MySQLdb.cursors.DictCursor ) if (db): cur = con.cursor() cur.execute( "SELECT * FROM `configurations` WHERE id = (SELECT active FROM `states` ORDER BY id DESC limit 1)") config['fingerprint'] = cur.fetchone() config['session'] = shortuuid.uuid() config['remote_ip'] = ipgetter.myip() config['vpn_ip'] = ni.ifaddresses('tun0')[2][0]['addr'] config['fingerprint']['amp_min'] = 10 config['fingerprint']['plot'] = 0 config['verbose'] = False config['soundcard'] = { "chunksize": 8096, "channels": 1 } return config else: print "Connection unsuccessful" except MySQLdb.Error, e: print "MySQL Error [%d]: %s" % (e.args[0], e.args[1]) print "MySQL Error: %s" % str(e)
def ip_check(): myip = ipgetter.myip() print('Running script from:', myip) if myip in ipcalc.Network( '192.0.163.0/24'): # using netmask /24 for Teksavvy return False return True
def do(self): # do your thing here #get ip ip = ipgetter.myip() with open('redecasd_info.json') as f: data = json.load(f) atual = data['redecasd_status']['origin'] change = False if ip == '161.24.24.1': if atual != 'ITA': data['redecasd_status']['origin'] = 'ITA' change = True elif ip == '191.242.240.146': if atual != 'HORIZON': data['redecasd_status']['origin'] = 'HORIZON' change = True else: if atual != 'SEM REDE': data['redecasd_status']['origin'] = 'SEM REDE' change = True if change: with open('redecasd_info.json', 'w') as outfile: json.dump(data, outfile)
def update_domain(): client = dns.Client(project=config['project_id']) zone = client.zone(config['zone_name'], config['dns_name']) ip = ipgetter.myip(); changes = zone.changes() records = zone.list_resource_record_sets() for record in records: if record.name == config['dns_name']: if record.record_type == config['record_type'] and record.ttl == config['record_ttl'] and record.rrdatas[0] == ip: #no update needed print('No update needed for ' + config['dns_name'] + ' in zone ' + config['zone_name'] + ' rrdata: ' + record.rrdatas[0]) return else: #delete out of date record bfore adding new print('Delete out of date record set ' + config['dns_name'] + ' in zone ' + config['zone_name'] + ' rrdata: ' + record.rrdatas[0]) record_delete = zone.resource_record_set(record.name, record.record_type, record.ttl, record.rrdatas) changes.delete_record_set(record_delete) break print('Add record set ' + config['dns_name'] + ' in zone ' + config['zone_name'] + ' rrdata: ' + ip) record_set = zone.resource_record_set(config['dns_name'], config['record_type'], config['record_ttl'], [ip,]) changes.add_record_set(record_set) changes.create() # API request while changes.status != 'done': print('Waiting for changes for ' + config['dns_name'] + ' in zone ' + config['zone_name'] + ' rrdata: ' + ip) time.sleep(10) # or whatever interval is appropriate changes.reload() # API request print('Change ' + config['dns_name'] + ' in zone ' + config['zone_name'] + ' rrdata: ' + ip + ' updated')
def main(): ap = argparse.ArgumentParser(prog='ovh_dynhost', description='Updates the OVH DynHost entry to the current IP address.') ap.add_argument('-c', '--config', help='The configuration file location.') args = ap.parse_args() if args.config: config_file = args.config else: config_file = 'config.ini' cp = configparser.ConfigParser() if len(cp.read(config_file)) == 0: loge("Config file is missing or empty.") sys.exit(-1) for section in cp.sections(): domain = section if cp.has_option(section, 'Username'): username = cp.get(domain, 'Username') if cp.has_option(section, 'Password'): password = cp.get(domain, 'Password') if cp.has_option(section, 'IP'): ip = cp.get(domain, 'IP') else: ip = ipgetter.myip() if not check_dns(domain, ip): update_dns(domain, ip, username, password) else: logi(domain + ": IP did not change.")
def main(): parser = argparse.ArgumentParser( prog='ovh_dynhost', description='Updates the OVH DynHost entry to the current IP address.') parser.add_argument('domain', help='The FQDN to be updated.') parser.add_argument('user', help='The user id that has access to update the FQDN.') parser.add_argument('password', help='The password for the user id.') parser.add_argument( '-ip', help= 'Update to this ip address instead of the current external address.', default=ipgetter.myip(), metavar='x.x.x.x') args = parser.parse_args() domain = args.domain ip = args.ip user = args.user password = args.password if not check_dns(domain, ip): update_dns(domain, ip, user, password) else: sys.stdout.write("IP did not change.\n")
def main(): """ Update IP on route53 """ logger = createLogger() current_ip = ipgetter.myip() logger.info('Current IP: %s', current_ip) # Hosted Zone => Domain List domains_to_update = {'palmr.me.': ['palmr.me.', 'git.palmr.me.']} client = boto3.client('route53') # Get HostedZone IDs for hosted_zone in client.list_hosted_zones_by_name()['HostedZones']: if hosted_zone['Name'] in domains_to_update: logger.debug('Hosted Zone found: %s', hosted_zone['Name']) change_batch = {'Comment': 'Refreshing IP', 'Changes': []} for record_set in client.list_resource_record_sets(HostedZoneId=hosted_zone['Id'])['ResourceRecordSets']: if record_set['Name'] in domains_to_update[hosted_zone['Name']] and record_set['Type'] == 'A' and record_set['ResourceRecords'][0]['Value'] != current_ip: logger.info('Updating: %s :: old=%s new=%s', record_set['Name'], record_set['ResourceRecords'][0]['Value'], current_ip) change = {'Action': 'UPSERT', 'ResourceRecordSet': record_set} change['ResourceRecordSet']['ResourceRecords'][0]['Value'] = current_ip change_batch['Changes'].append(change) # Only apply changes if there were any if len(change_batch['Changes']) > 0: response = client.change_resource_record_sets(HostedZoneId=hosted_zone['Id'], ChangeBatch=change_batch) logger.info('Update Response: %s', response) else: logger.debug('Hosted Zone already up to date')
def experiment_tor(history): results_tor = open("results_tor.txt", "a") myIP = ipgetter.myip(); my_Address = geolite2.lookup(socket.gethostbyname(myIP)) for url in history: dest_Address = geolite2.lookup(socket.gethostbyname(url)) if (dest_Address == None): print("Couldn't get location of ", url) continue url = 'https://www.' + url test = controller.get_circuits() for circuit in test: if (len(circuit.path) > 2): path = circuit.path circ = circuit break print path # test = path res_list = [controller.get_network_status(x[0]).address for x in path] # Get ip addresses from fingerprints # print res_list locations_relay = [geolite2.lookup(x).location for x in res_list] # Do lookups # print locations_relay locations = [my_Address.location, dest_Address.location] + locations_relay distance = totalDistance(locations) time = scan_head(controller, circ, url) if (time != -1): results_tor.write(str(distance) + "," + str(time))
def __init__(self, loaded_wallet): self.super_ips = ["18.222.145.60"] self.active_ips = [] self.local_host = (socket.gethostbyname(socket.gethostname())) self.network_ip = ipgetter.myip() self.thread_master = [] self.loaded_wallet = loaded_wallet
def display_eedata(epd): w = epd.width h = epd.height myip = ipgetter.myip() # Uncomment this line to display the true external IP #ext_ip = 'External: ' + myip # Comment this line to remove the fake external IP ext_ip = 'External: 255.255.255.255' int_ip = get_lan_ip() int_ip = 'Internal: ' + int_ip #int_ip = int_ip.rstrip(5) f = open('/home/pi/speedtest-cron/speedtest.txt') lines = f.readlines() f.close() speedline1 = lines[-5].strip() speedline2 = lines[-4].strip() speedline3 = lines[-3].strip() speedline4 = lines[-2].strip() # initially set all white background image = Image.new('1', epd.size, WHITE) # prepare for drawing draw = ImageDraw.Draw(image) draw.rectangle((1, 1, w - 1, h - 1), fill=WHITE, outline=BLACK) draw.rectangle((2, 2, w - 2, h - 2), fill=WHITE, outline=BLACK) # text draw.text((5,5), speedline1, fill=BLACK, font=text_font1) draw.text((10,35), speedline2, fill=BLACK, font=text_font2) draw.text((10,65), speedline3, fill=BLACK, font=text_font2) draw.text((10,95), speedline4, fill=BLACK, font=text_font2) draw.text((10,125), ext_ip, fill=BLACK, font=text_font2) draw.text((10,155), int_ip, fill=BLACK, font=text_font2) # display image on the panel epd.display(image) epd.update()
def experiment_smartor(history): results_smartor = open("results_smartor.txt", "a") relays = get_relays(controller) entry = relays[0]; middle = relays[1]; exit = relays[2]; myIP = ipgetter.myip(); my_Address = geolite2.lookup(socket.gethostbyname(myIP)) for url in history: dest_Address = geolite2.lookup(socket.gethostbyname(url)) if (dest_Address == None): print("Couldn't get location of ", url) continue # Get list of fingerprints for exit nodes exit_nodes = get_relays_fingerprint(num_relays, exit, dest_Address.location) entry_nodes = get_relays_fingerprint(num_relays, entry, my_Address.location) middleLocation = midpointCalculator(dest_Address.location, my_Address.location) middle_nodes = get_relays_fingerprint(num_relays, middle, my_Address.location) url = 'https://www.' + url path_with_locations = get_best_circuit(url, controller, entry_nodes, middle_nodes, exit_nodes, 10) if path_with_locations == -1: continue locations = [my_Address.location, dest_Address.location] + [x[1] for x in path_with_locations] distance = totalDistance(locations) best_path = [x[0] for x in path_with_locations] print("best path ", best_path) circuit_id = controller.new_circuit(best_path, await_build = True) test = controller.get_circuit(circuit_id) print 'Accessing url: ' + url get_page(url, controller, test, results_smartor, distance)
def first_start(prod = True): ''' for first start - write to BD table "nmap_config" nmap config parameters ''' engine = create_engine("mysql://root:@127.0.0.1/tokio") base.metadata.create_all(engine) base.metadata.bind = engine db_session = sessionmaker(bind=engine) session = db_session() session.query(ConfigNmap).delete() if prod: new_config_nmap = ConfigNmap(property='ext_ip', value = str(ipgetter.myip())) ###uncoment for prod else: new_config_nmap = ConfigNmap(property='ext_ip', value = '127.0.0.1') session.add(new_config_nmap) port_config_nmap = ConfigNmap(property='ports1', value = '20-443') session.add(port_config_nmap) port_config_nmap = ConfigNmap(property='ports2', value = '1-1024') session.add(port_config_nmap) port_config_nmap = ConfigNmap(property='ports3', value = '1-65565') session.add(port_config_nmap) session.commit() #debug. delete for prod print('SELECT * FROM config_nmap') s = session.query(ConfigNmap).all() for sa in s: print(sa.property, '=', sa.value)
def run(self): api_key = self.config_parser.get(self.section, 'api_key') dns_api = DreamhostDNS(api_key) domain = self.config_parser.get(self.section, 'domain') delay_str = self.config_parser.get(self.section, 'delay') delay = int(delay_str) last_ip = None while self.can_continue: current_ip = ipgetter.myip() records = dns_api.list(record=domain) record = records[0] if len(records) > 0 else None configured_ip = record.get('value', None) if record is not None else None if record is None or configured_ip is None: logging.info('ip needs to be created', current_ip) response = dns_api.add(domain, current_ip, 'A') logging.info(response.text) elif configured_ip != current_ip: logging.info('ip needs to be updated', configured_ip) result = dns_api.remove(record) response = dns_api.add(domain, current_ip, 'A') logging.info(response.text) else: logging.info('no need to update') last_ip = current_ip time.sleep(delay)
def _get_external_ip(self): ''' Use the ipgetter Library to pull out our External IP from a random choice of 44 services. ''' self.new_external_ip = ipgetter.myip() logger.debug('New External IP: %s', self.new_external_ip)
def send_email(): current_ip = None if os.path.exists(file_path): current_ip = open(file_path, "r").read() new_ip = ipgetter.myip() if current_ip is None or new_ip != current_ip: print 'New IP detected {}'.format(new_ip) msg = MIMEMultipart() msg['Subject'] = subject msg['From'] = credentials.email_login['user'] msg['To'] = credentials.email_login['user'] msg.attach(MIMEText(new_ip, 'text')) server = smtplib.SMTP(host, port) server.ehlo() server.starttls() server.ehlo() server.login(credentials.email_login['user'], credentials.email_login['pass']) server.sendmail(credentials.email_login['user'], credentials.email_login['user'], msg.as_string()) server.quit() print 'New IP sent' if os.path.exists(file_path): out_file = open(file_path, 'w+') out_file.write(new_ip) out_file.close() print 'New IP saved' else: print 'No changes in IP'
def music_playlist_url(server_id): global ext_ip try: if not ext_ip: ext_ip = ipgetter.myip() except Exception: ext_ip = "0.0.0.0" return "http://{}:{}/{}".format(ext_ip, config.get("http_port", 8080), server_id)
def getAddress(): # global ip address of device globalIp = ipgetter.myip() # local ip address of device localIp = socket.gethostbyname(socket.gethostname()) my_time = time.strftime('%Y-%m-%d %H:%M:%S') output = 'Global_IP:' + globalIp + '//Local_Ip:' + localIp + '//Time:' + my_time + '//Raspberry_Product_Key:' + getRaspberryPiID() return output
def get_ip_address(): """ Get the ip address of current system :param: None :return: None """ ip = ipgetter.myip() print ip
def check_file(): data = {} data['server_port'] = 8080 data['content'] = "<center><h1>Haruki's WebServer cog for Red-DiscordBot</h1></center>" data['url'] = "{}:{}".format(ipgetter.myip(), data['server_port']) if not dataIO.is_valid_json('data/webserver/settings.json'): print('Creating settings.json...') dataIO.save_json('data/webserver/settings.json', data)
def __init__(self, bot): self.bot = bot bot.loop.create_task(self.make_webserver()) self.server = None self.app = web.Application() self.handler = None self.dispatcher = {} self.settings = dataIO.load_json('data/webserver/settings.json') self.ip = ipgetter.myip()
def displayCreateGameMenu(self): self.ui.stackedWidget.setCurrentIndex(2) self.ui.gameLobbyMenu.hide() self.ui.createGameMenu.show() ipAddress = ipgetter.myip() self.ui.ipAddressLabel.setText(ipAddress) self.ui.createButton.clicked.connect( lambda: self.displayGameLobby("gameHost")) self.ui.cancelButton.clicked.connect( lambda: self.goBack(self.ui.createGameMenu, self.ui.gameLobbyMenu))
def getLoc(): yoga = getoutput('termux-location -p network -r last') if 'not found' in yoga: import ipgetter ip = ipgetter.myip() return geocoder.ip(ip) else: import json data = json.loads(yoga) return geocoder.osm([data['latitude'], data['longitude']], method='reverse')
def main(): # Get the list of arguments arg_list = sys.argv # Ensure sufficient arguments if len(arg_list) < 3: print('Usage: ./google_image_scraper <save_path> <keywords>...') sys.exit() # Get the location to store the images path = arg_list[1] # Join keywords with %20 to comply with Google API requirements keywords = '%20'.join(arg_list[2:]) # The base url and path. Appends public IP as per Google API request URL = 'https://ajax.googleapis.com/ajax/services/search/images?v=1.0&q=' \ + keywords + '&start=%d&userip=' + ipgetter.myip() PATH = os.path.join(path, arg_list[2] + '-dataset') # Check if directory already exists if not os.path.exists(PATH): os.makedirs(PATH) count = 0 while count < 60: request = requests.get(URL % count) for image_info in json.loads(request.text)['responseData']['results']: url = image_info['unescapedUrl'] try: image = requests.get(url) except requests.exceptions.ConnectionError: print('Could not download %s' % url) continue title = image_info['titleNoFormatting'].replace('/', '').replace( '\\', '') file = open(os.path.join(PATH, '%s.jpg') % title, 'w') try: Image.open(BytesIO(image.content)).save(file, 'JPEG') except IOError: print('Could not save %s' % url) continue finally: file.close() print(count) count = count + 4 # Sleep to prevent IP blocking from Google time.sleep(1.5)
def onConnect(self,e): self.Logger('Connect Requested') self.out_ip = str(self.ip_box.GetValue()).replace(' ','') local_ip = ipgetter.myip().replace('\'', '') self.Logger('Attempting connection to ' + self.out_ip) if self.SendData('Connection established with ' + local_ip): self.DisableDISCON(False) self.StatusBar.SetStatusText('Ready: Connected to' + self.out_ip) self.update_chat('Connection established with ' + self.out_ip,'system') else: self.update_chat('Failed to Connect to ' + self.out_ip,'system')
def __init__(self, window, info): self.room, self.name = info[0], info[1] self.massege = ('my name is %s in %s' % (self.name, self.room)).encode() self.window = window self.data = sheet.GetIP('IP') self.client_data = sheet.GetIP('client') self.lan = self.get_LAN() self.port = randint(50000, 60000) self.wan = ipgetter.myip() self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.socket.bind((self.lan, self.port))
def main(): # Get the list of arguments arg_list = sys.argv # Ensure sufficient arguments if len(arg_list) < 3: print ('Usage: ./google_image_scraper <save_path> <keywords>...') sys.exit() # Get the location to store the images path = arg_list[1] # Join keywords with %20 to comply with Google API requirements keywords = '%20'.join(arg_list[2:]) # The base url and path. Appends public IP as per Google API request URL = 'https://ajax.googleapis.com/ajax/services/search/images?v=1.0&q=' \ + keywords + '&start=%d&userip=' + ipgetter.myip() PATH = os.path.join(path, arg_list[2] + '-dataset') # Check if directory already exists if not os.path.exists(PATH): os.makedirs(PATH) count = 0 while count < 60: request = requests.get(URL % count) for image_info in json.loads(request.text)['responseData']['results']: url = image_info['unescapedUrl'] try: image = requests.get(url) except requests.exceptions.ConnectionError: print ('Could not download %s' % url) continue title = image_info['titleNoFormatting'].replace('/', '').replace('\\', '') file = open(os.path.join(PATH, '%s.jpg') % title, 'w') try: Image.open(BytesIO(image.content)).save(file, 'JPEG') except IOError: print ('Could not save %s' % url) continue finally: file.close() print (count) count = count + 4 # Sleep to prevent IP blocking from Google time.sleep(1.5)
def get_networking(self): # Timing: # - 0.1b1: 23s # - 0.1b2: self.audit += "\n\n:: Networking Info ::\n" try: # /System/Library/PrivateFrameworks/Apple80211.framework/Versions/Current/Resources/airport -I | grep SSID ssid = subprocess.Popen( ('/System/Library/PrivateFrameworks/Apple80211.framework/Versions/Current/Resources/airport', '-I'), stdout=subprocess.PIPE) ssidPipe = subprocess.check_output(('grep', 'SSID'), stdin=ssid.stdout) self.audit += " SSID: " + ssidPipe.split()[3] except subprocess.CalledProcessError: self.audit += " SSID: N/A" # try: # linkAuth = subprocess.Popen(('/System/Library/PrivateFrameworks/Apple80211.framework/Versions/Current/Resources/airport', '-I'), stdout=subprocess.PIPE) # linkAuthPipe = subprocess.check_output(('grep', 'link'), stdin=linkAuth.stdout) # self.audit += "\n Link Auth: " + linkAuthPipe.split()[2] # except subprocess.CalledProcessError: # self.audit += "\n Link Auth: N/A" ipEN0 = subprocess.Popen(["ipconfig", "getifaddr", "en0"], stdout=subprocess.PIPE) ipEN0Value = ipEN0.communicate()[0].strip("\n") if ipEN0Value == "": self.audit += "\n IP Addresses:\n en0: N/A" else: self.audit += "\n IP Addresses:\n en0: " + ipEN0Value print(ipEN0Value) ipEN1 = subprocess.Popen(["ipconfig", "getifaddr", "en1"], stdout=subprocess.PIPE) ipEN1Value = ipEN1.communicate()[0].strip("\n") if ipEN1Value == "": self.audit += "\n en1: N/A" else: self.audit += "\n en1: " + ipEN1Value print(ipEN1Value) # extIP = subprocess.Popen(["curl", "ifconfig.me"], stdout=subprocess.PIPE) extIP = ipgetter.myip() self.audit += "\n External IP: " + extIP # extHost = subprocess.Popen(["curl", "ifconfig.me/host"], stdout=subprocess.PIPE) # self.audit += " External Host: " + extHost.communicate()[0] ethernetDNS = subprocess.Popen(["networksetup", "-getdnsservers", "Ethernet"], stdout=subprocess.PIPE) self.audit += "\n Ethernet DNS: " + ethernetDNS.communicate()[0].replace("\n", " ") wifiDNS = subprocess.Popen(["networksetup", "-getdnsservers", "Wi-Fi"], stdout=subprocess.PIPE) self.audit += "\n Wi-Fi DNS: " + wifiDNS.communicate()[0].replace("\n", " ")
def add(self, wallet): """ Adds this client to the servers. wallet -- The wallet address used. """ from agent.DNA import DNA from agent.CountryGetter import CountryGetter import ipgetter dna = DNA().json ip = ipgetter.myip() country = CountryGetter.get_country() self.server.add(ip, wallet, country, dna)
def header_generate(): ''' This function is needed by instagram for security. Encrypt the secret key with your IP address so it can only be decrypted if you have both. ''' ips = ipgetter.myip() print "my ip %r".format(ips) secret = client_secret signature = hmac.new(secret, ips, sha256).hexdigest() headers_sha256 = "|".join([ips, signature]) return headers_sha256
def update(self): current_external_ip = ipgetter.myip() logging.info( 'Updating - Current external IP = {0}'.format(current_external_ip)) for dns_id in self._current_dns_ips: logging.debug(' {0} = {1}'.format( dns_id, self._current_dns_ips[dns_id])) if self._current_dns_ips[dns_id] != current_external_ip: logging.info(' Updating DNS entry for {0} to {1}'.format( dns_id, current_external_ip)) self.call('put', 'dns/' + dns_id, {'content': current_external_ip}) # Update cache self._current_dns_ips[dns_id] = current_external_ip
def get_external_ip_address(): logging.info("Getting external IP address...") external_ip = None try: external_ip = ipgetter.myip() except: pass if external_ip: logging.info("The external IP address is {0}".format(external_ip)) else: logging.error("Unable to identify the external IP. Do you have internet connectivity?") return external_ip
def send_monitor_data(): windows_usage = { 'os_name': platform.system(), # string 'os_version': platform.version(), # string 'cpu_model': platform.processor(), # CPU Model 'cpu_architecture': platform.architecture(), # CPU Archi.. 'cpu_cores': psutil.cpu_count(), # Number of cpus 'cpu_threads': psutil.cpu_count(), # Number of threads 'cpu_percentage': psutil.cpu_percent(), # CPU Perc 'pid_running': len(psutil.pids()), # Number of active PIDs 'hostname': socket.gethostname(), # string 'internal_ip': socket.gethostbyname(socket.gethostname()), # string 'external_ip': ipgetter.myip(), # string 'ram_total': psutil.virtual_memory()[0], # GB 'ram_used': psutil.virtual_memory()[3], # GB 'ram_free': psutil.virtual_memory()[4], # GB 'ram_shared': psutil.virtual_memory()[2], # GB 'ram_available': psutil.virtual_memory()[1], # GB 'ram_buff': psutil.virtual_memory()[4], # GB 'swap_total': psutil.swap_memory()[0], # GB 'swap_used': psutil.swap_memory()[1], # GB 'swap_free': psutil.swap_memory()[2], # GB 'total_hdd': psutil.disk_usage('/')[0], # All SDx partitions total in GB 'used_hdd': psutil.disk_usage('/')[1], # All SDx partitions usage in GB 'available_hdd': psutil.disk_usage('/')[2], # Free space in GB 'uptime': datetime.datetime.fromtimestamp(time.time()-psutil.boot_time()).strftime('%H:%M'), # Uptime in H:M format }, ensure_ascii='False') # Print value just for testing print(windows_usage) # Post data to server url = "http://localhost.monitorbeta.com/rest/v1/endpoint" headers = {'content-type': 'application/json'} r = post(url, data=windows_usage, headers=headers) # Print response just for testing print(r.text) sleep(5) # Recursive send_monitor_data()
def parse_metadata(cloud, vo): result = {'metadata':{}} result.update({'_timestamp': int(time.time())}) result['metadata'].update({'ip': ipgetter.myip()}) result['metadata'].update({'cloud': cloud}) result['metadata'].update({'UID': generate_id()}) result['metadata'].update({'VO': vo}) result['metadata'].update({'osdist':commands.getoutput("lsb_release -d 2>/dev/null").split(":")[1][1:]}) result['metadata'].update({'pyver': sys.version.split()[0]}) result['metadata'].update({'cpuname': commands.getoutput("cat /proc/cpuinfo | grep '^model name' | tail -n 1").split(':')[1].lstrip()}) result['metadata'].update({'cpunum' : int(commands.getoutput("cat /proc/cpuinfo | grep '^processor' |wc -l"))}) result['metadata'].update({'bogomips': float(commands.getoutput("cat /proc/cpuinfo | grep '^bogomips' | tail -n 1").split(':')[1].lstrip())}) result['metadata'].update({'meminfo': float(commands.getoutput("cat /proc/meminfo | grep 'MemTotal:'").split()[1])}) return result
def run(): arguments = docopt.docopt(__doc__) print(arguments) if arguments['detectip']: print('Finding external IP address...') ip = ipgetter.myip() if ip: print(ip) else: print('Could not find external IP!') return match_function = make_match_function(arguments) url = arguments['<url>'] target_parameter = arguments['<target_parameter>'] parameters = arguments['<parameters>'] oob_ip = arguments["--oob-ip"] oop_port = arguments["--oob-port"] shell = arguments['--shell'] shell_cmd = arguments['--shellcmd'] fast = arguments['--fast'] stats = arguments['--stats'] concurrency = arguments['--concurrency'] method = arguments['--method'] if concurrency: if not concurrency.isdigit(): print('Error: Concurrency is not an integer', file=sys.stderr) return concurrency = int(concurrency) only_features = arguments['--features'] body = arguments['--body'] cookie = arguments['--cookie'] loop = asyncio.get_event_loop() try: loop.run_until_complete( start_action(url, target_parameter, parameters, match_function, oob_ip, oop_port, shell, shell_cmd, fast, stats, concurrency, only_features, body, cookie, method)) except KeyboardInterrupt: loop.stop()
def generate_data(traceroute, writer): logger.info('Backbone Tracer is tracing IPs..') source = ipgetter.myip() logger.info('My IP address is: %s' % source) with gzip.open(IN_FILENAME) as reader: offset = writer.count() itr = enumerate(skip_beginning(reader, offset)) logger.info("Skipping past the first %d addresses." % offset) threads = [] for i in range(10): thread = Thread(target=tracer, args=(source, itr)) threads.append(thread) thread.start() for thread in threads: thread.join()
def encrypt_ip(public_key_path): """This function will encrypt a message using the users public key and return a set of bytes representing the encrypted IP :returns: The IP Address of the machine calling this function, encrypted :rtype: bytes = b'...' a sequence of octets """ # Open the Public Key - the public key of the client computer public_key = RSA.importKey(open(public_key_path).read()) cipher = PKCS1_OAEP.new(public_key) message = ipgetter.myip() encrypted_message = cipher.encrypt(message.encode("utf-8")) return encrypted_message
def getIP(): """ get the IP address """ IP = ipgetter.myip() return IP #db = connect() #engine = db.connect() #init_db(engine) ## TEST #update() # # # #Base = declarative_base(metadata=MetaData(schema='public')) #class Events(Base): # __tablename__ = 'events' # event_id = Column(Integer, primary_key=True, autoincrement=True, nullable=True) # event_name = Column(Text) # player1 = Column(JSON) # player2 = Column(JSON) # #SessionFactory = sessionmaker(engine) # # #session = SessionFactory() #u = Events( # #event_id=6, # event_name=events['event_name'], # player2={'name' : 'test2'}) #session.add(u) #session.commit() # # #uu = session.query(Events).first() # #uu = session.query(Events).filter( # Events.player1[ # ("name") # ].cast(Text) == 'test' #).one()
def parse_metadata(id, cloud, vo): start_time = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.localtime(int(os.environ['init_tests']))) result = {'metadata':{}} result.update({'_id': "%s_%s" % (id, start_time)}) result.update({'_timestamp': start_time}) result['metadata'].update({'ip': ipgetter.myip()}) result['metadata'].update({'classification': os.environ['HWINFO']}) result['metadata'].update({'cloud': cloud}) result['metadata'].update({'UID': id}) result['metadata'].update({'VO': vo}) result['metadata'].update({'osdist':commands.getoutput("lsb_release -d 2>/dev/null").split(":")[1][1:]}) result['metadata'].update({'pyver': sys.version.split()[0]}) result['metadata'].update({'cpuname': commands.getoutput("cat /proc/cpuinfo | grep '^model name' | tail -n 1").split(':')[1].lstrip()}) result['metadata'].update({'cpunum' : int(commands.getoutput("cat /proc/cpuinfo | grep '^processor' |wc -l"))}) result['metadata'].update({'bogomips': float(commands.getoutput("cat /proc/cpuinfo | grep '^bogomips' | tail -n 1").split(':')[1].lstrip())}) result['metadata'].update({'meminfo': float(commands.getoutput("cat /proc/meminfo | grep 'MemTotal:'").split()[1])}) return result
def main(): parser = argparse.ArgumentParser(description=__description__) #parser.add_argument('-q', '--quiet', action='store_true', # help='quiet output') parser.add_argument('password') args = parser.parse_args() if args.password: fromaddr = '*****@*****.**' toaddrs = '*****@*****.**' IP = ipgetter.myip() msg = IP username = '******' password = args.password server = smtplib.SMTP_SSL('smtp.gmail.com', 465) server.ehlo() server.login(username,password) server.sendmail(fromaddr, toaddrs, msg) server.quit()
def update_upload(self): """Updates the upload capacity.""" regex = "[A-z0-9]+:(?:\W+[0-9]+){8}\W+([0-9]+)" import ipgetter import re ip = ipgetter.myip() filename = "/proc/net/dev" read = None with open(filename, "r") as f: read = f.read() if read is not None: matches = re.findall(regex, read) print("%s matches found." % str(len(matches))) total = 0L for upload in matches: total += long(upload) print('total is: %s' % str(total)) self.server.update(ip, str(upload)) else: print("Read of %s failed to produce any matches." % filename)
def run(self): while True: prev_ip = "" try: with open(prev_ip_f,"r") as f: prev_ip = f.readline().replace("\n","") except IOError: pass ip = ipgetter.myip().replace("\n","") if ip != prev_ip: try: with open(prev_ip_f,"w") as f: f.write(ip) except IOError: pass self.mail([mail], "ip message", "your ip has changed: %s"%ip) prev_ip = ip if ip == "": time.sleep(empty_wait_t) else: time.sleep(wait_t)
def _parsePeers(self, response): """Return a list of peers for this torrent. Currently function only works for binary model tracker response. Args: response (dict): bdecoded tracker response Returns: peers (list): list of (ip, port) tuples, one for each peer and empty if no peers are found Raises: ManagerError if peers binary data is wrong len Spec: Peers value is a string consisting of multiples of 6 bytes. First 4 bytes are the IP address and last 2 bytes are the port number. All in network (big endian) notation. """ myip = ipgetter.myip() # myip = MY_IP_ADDRESS # TODO: Hardcoded for testing purposes peers = [] if 'peers' not in response.keys(): return peers binary_data = response['peers'] if len(binary_data) % 6 != 0: raise ManagerError("Peers binary data is not a multiple of 6.") for i in xrange(len(binary_data) / 6): # each peer is 6 bytes (ip1, ip2, ip3, ip4, port) = struct.unpack('>4BH', binary_data[:6]) ip_address = ".".join([str(ip1), str(ip2), str(ip3), str(ip4)]) if ip_address != myip: peers.append((ip_address, port)) binary_data = binary_data[6:] return peers
def compare(): """Compare the current IP and the stored IP. Update the DynHost IP if different. """ stored_ip = get_stored_ip() logger.info("Stored IP: %s", stored_ip) current_ip = ipgetter.myip() logger.info("Current IP: %s", current_ip) # Check if there is no difference between stored IP and current IP if not stored_ip or (stored_ip != current_ip): logger.info("DynHost IP updated! [New IP]") dynhost_ip = set_dynhost_ip(current_ip) if dynhost_ip: store_ip(current_ip) else: # This will force update next call store_ip('Error') # Set each 15 minutes the Dynhost IP if (time.gmtime().tm_min % MIN_UPDATE_TIME) == 0: logger.info("DynHost IP updated! [15 min]") set_dynhost_ip(current_ip)
import socket from settings import * import ipgetter sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((TCP_IP, TCP_PORT)) sock.send(ipgetter.myip()) data = sock.recv(BUFFER_SIZE) sock.close()
def get_wan_ip(): IP = ipgetter.myip() return IP