def modified_run(self): import sys try: try: from urllib2 import HTTPHandler, build_opener from urllib2 import urlopen, Request from urllib import urlencode except ImportError: from urllib.request import HTTPHandler, build_opener from urllib.request import urlopen, Request from urllib.parse import urlencode os_ver = platform.system() py_ver = "_".join(str(x) for x in sys.version_info) now_ver = __version__.replace(".", "_") code = "os:{0},py:{1},now:{2}".format(os_ver, py_ver, now_ver) action = command_subclass.action cid = getnode() payload = {"v": "1", "tid": "UA-61791314-1", "cid": str(cid), "t": "event", "ec": action, "ea": code} url = "http://www.google-analytics.com/collect" data = urlencode(payload).encode("utf-8") request = Request(url, data=data) request.get_method = lambda: "POST" connection = urlopen(request) except: pass orig_run(self)
def getMacAddress(self): mac = uuid.getnode() if uuid.getnode() == mac: mac = ':'.join('%02X' % ((mac >> 8 * i) & 0xff) for i in reversed(xrange(6))) else: mac = 'UNKNOWN' return mac
def machine_id: # If uuid.getnode() fails to get a valid MAC-adress it returns a random number. # The function is run twice to check for this. id = uuid.getnode() if id == uuid.getnode(): return id else raise OSError('machine_id: No MAC-address found')
def getBaseInfo(self): #pythoncom.CoInitialize() try: info={} info["infoid"] = str(uuid.uuid1()) info["machineid"] = str(uuid.getnode()) info["timestamp"] = time.strftime("%Y%m%d%H%M%S", time.localtime()) info["appkey"] = self.AppKey hardware = {} hardware["cpu"] = self.getCpus() hardware["memory"] = self.getMemory() hardware["disk"] = self.getDrives() software = {} software["os"] = self.getOs() info["machineid"] = str(uuid.getnode()) info["hardware"] = hardware info["software"] = software return info finally: #pythoncom.CoUninitialize() pass
def write_domogik_configfile(advanced_mode, intf): # read the sample config file newvalues = False config = configparser.RawConfigParser() config.read( ['/etc/domogik/domogik.cfg.sample'] ) itf = ['bind_interface', 'interfaces'] for sect in config.sections(): info("Starting on section {0}".format(sect)) if sect != "metrics": for item in config.items(sect): if item[0] in itf and not advanced_mode: config.set(sect, item[0], intf) debug("Value {0} in domogik.cfg set to {1}".format(item[0], intf)) elif is_domogik_advanced(advanced_mode, sect, item[0]): print("- {0} [{1}]: ".format(item[0], item[1])), new_value = sys.stdin.readline().rstrip('\n') if new_value != item[1] and new_value != '': # need to write it to config file config.set(sect, item[0], new_value) newvalues = True # manage metrics section else: config.set(sect, "id", uuid.getnode()) # set an unique id which is hardware dependent print("Set [{0}] : {1} = {2}".format(sect, id, uuid.getnode())) debug("Value {0} in domogik.cfg > [metrics] set to {1}".format(id, uuid.getnode())) # write the config file with open('/etc/domogik/domogik.cfg', 'wb') as configfile: ok("Writing the config file") config.write(configfile)
def test_getnode(self): node1 = uuid.getnode() self.assertTrue(0 < node1 < (1 << 48), '%012x' % node1) # Test it again to ensure consistency. node2 = uuid.getnode() self.assertEqual(node1, node2, '%012x != %012x' % (node1, node2))
def test_getnode(self): node1 = uuid.getnode() self.check_node(node1, "getnode1") # Test it again to ensure consistency. node2 = uuid.getnode() self.check_node(node2, "getnode2") self.assertEqual(node1, node2)
def register(request): if request.method == "POST": form = EntryForm(request.POST) if form.is_valid(): entry = form.save(commit=False) entry.mac = ':'.join(re.findall('..', '%012x' % uuid.getnode())) entry.save() return redirect('registration.views.thanks') else: mac = ':'.join(re.findall('..', '%012x' % uuid.getnode())) return render(request, 'registration/register.html', {'form': EntryForm(), 'mac':mac, 'var':Entry.objects.filter(mac=mac).exists()})
def gen_session_hash(user): salt = os.urandom(16) if PY3: key = b''.join([uuid.getnode().to_bytes(), user, time().to_bytes()]) else: key = b''.join([str(uuid.getnode()), user, str(time())]) s = hashlib.sha1() s.update(key) s.update(salt) return s.digest()
def test_getnode(self): import sys print(""" WARNING: uuid.getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly.""", file=sys.__stdout__) return node1 = uuid.getnode() self.check_node(node1, "getnode1") # Test it again to ensure consistency. node2 = uuid.getnode() self.check_node(node2, "getnode2") self.assertEqual(node1, node2)
def _get_key_default(): 'Uses uuid to get a system identifier.' mac_address = uuid.getnode() # in accordance with the RFC, the UUID module may return a random # number if unable to discover the machine's MAC address. this doesn't # make for a very good key. if mac_address == uuid.getnode(): return str(mac_address) else: # this value is dependent on the computer's hostname. a weak import platform return os.environ.get('processor_identifier', 'OMG WHERE AM I') + ''.join(platform.uname())
def __init__(self): """Create an echo gadget. """ device_desc = usb_descriptors.DeviceDescriptor( idVendor=usb_constants.VendorID.GOOGLE, idProduct=usb_constants.ProductID.GOOGLE_ECHO_GADGET, bcdUSB=0x0200, iManufacturer=1, iProduct=2, iSerialNumber=3, bcdDevice=0x0100) feature = EchoCompositeFeature( endpoints=[(0, 4, 0x81, 0x01), (1, 5, 0x82, 0x02), (2, 6, 0x83, 0x03)]) super(EchoGadget, self).__init__(device_desc, [feature]) self.AddStringDescriptor(1, 'Google Inc.') self.AddStringDescriptor(2, 'Echo Gadget') self.AddStringDescriptor(3, '{:06X}'.format(uuid.getnode())) self.AddStringDescriptor(4, 'Interrupt Echo') self.AddStringDescriptor(5, 'Bulk Echo') self.AddStringDescriptor(6, 'Isochronous Echo') # Enable Microsoft OS Descriptors for Windows 8 and above. self.EnableMicrosoftOSDescriptorsV1(vendor_code=0x01) # These are used to force Windows to load WINUSB.SYS for the echo functions. self.SetMicrosoftCompatId(0, 'WINUSB') self.SetMicrosoftCompatId(1, 'WINUSB') self.SetMicrosoftCompatId(2, 'WINUSB') self.AddDeviceCapabilityDescriptor(usb_descriptors.ContainerIdDescriptor( ContainerID=uuid.uuid4().bytes_le))
def get_localmac(): try: tmp = uuid.UUID(int=uuid.getnode()).hex[-12:] mac = ":".join([tmp[e:e+2] for e in range(0,11,2)]) return mac except Exception,e: print u"获取本机mac地址失败。原因如下:%s"%e
def gen_id(): d = datetime.now() sec_since_epoc = str(int((d-d.utcfromtimestamp(0)).total_seconds())) # Getting the mac address for the computer mac = str( getnode() ) # Combine the 6 most significant numbers of the mac address and the time return int(mac[0:6]+sec_since_epoc)
def send(self, msg): try: log = {} log['log'] = {} log['log']['metadata'] = {} log['log']['fields'] = {} log['log']['fields']['raw_msg'] = msg if self.whitelist: log['log']['field_whitelist'] = self.whitelist else: log['log']['field_whitelist'] = [] try: mac_addr = getnode() mac_addr = ':'.join(("%012X" % mac_addr)[i:i+2] for i in range(0, 12, 2)) log['log']['metadata']['loghost_mac'] = mac_addr log['log']['metadata']['loghost_ip'] = socket.gethostbyname(socket.gethostname()) except: pass r = requests.post(self.url, json=log) if r.status_code == 200: return True else: return False except: return False
def getID(self): """ Returns a unique id for this module. Normally this is defined in the settings and has a well known value. This module ID is used for example when storing assets using multiple archivers on different systems. Then a given asset can only be retrieved by asking a well-known archiver and we need thus to store the archiver ID along with the asset in the database. If the ID is not defined in the settings, then a warning is issued, but a calculated ID is returned. The ID is calculated using an hash of the MAC address. The MAC address is retrieved using the uuid.getnode function; please note that this function does not always return the same value (refer to the python documentation: http://docs.python.org/library/uuid.html#uuid.getnode for further information). """ if settings.module_id is not None: return settings.module_id # The ID was not defined in the settings, issue a warning and calculate # an (hopefully) fixed value. warnings.warn("Undefined module ID. Trying to guess one from the MAC" \ " address.", RuntimeWarning) return hashlib.sha256(str(uuid.getnode())).hexdigest()
def handle_system_information(username, password): mac = uuid.getnode().__str__() system = rest_client.get_system_information(mac) system_name = None # Register a new System if this one isn't recognized if system is None: hostname = socket.gethostname() name_input = raw_input("What do you want to call this system? " + "For example Home, File Server, ect. [%s]: " % hostname) name = name_input or hostname system_name = rest_client.register_system(RegisterSystem( name, mac, hostname, __version__)) if system_name: print("Registered a new system " + name) else: return (None, None) # Login with this new system access_token = rest_client.login_user(LoginForm(username, password, mac)) if access_token is None: print("Failed to login with system.") return (None, None) # If this system is already registered if system is not None: system_name = system.name print("Welcome back! Looks like this box is already registered as " + system.name + ".") return (access_token, system_name)
def __init__(self): super(EarthReaderApp, self).__init__(application_id=APP_ID) self.session = Session('er-gtk-{0:x}'.format(uuid.getnode())) self.repository = FileSystemRepository( '/home/dahlia/Dropbox/Earth Reader') # FIXME self.stage = Stage(self.session, self.repository) self.connect('activate', self.on_activate)
def _get_mac(self): """ Try to get a unique identified, Some providers may change mac on stop/start Use a low timeout to speed up agent start when no meta-data url """ uuid = None try: import urllib socket.setdefaulttimeout(2) urlopen = urllib.urlopen("http://169.254.169.254/latest/meta-data/instance-id") socket.setdefaulttimeout(10) for line in urlopen.readlines(): if "i-" in line: uuid = hex(line) urlopen.close() except: pass # Use network mac if not uuid: from uuid import getnode uuid = getnode() return uuid
def get_mac_address(): """ 获得mac地址 """ node = uuid.getnode() mac = uuid.UUID(int=node).hex[-12:] return mac
def from_datetime(self, dt): """ generates a UUID for a given datetime :param dt: datetime :type dt: datetime :return: """ global _last_timestamp epoch = datetime(1970, 1, 1, tzinfo=dt.tzinfo) offset = epoch.tzinfo.utcoffset(epoch).total_seconds() if epoch.tzinfo else 0 timestamp = (dt - epoch).total_seconds() - offset node = None clock_seq = None nanoseconds = int(timestamp * 1e9) timestamp = int(nanoseconds // 100) + 0x01b21dd213814000L if clock_seq is None: import random clock_seq = random.randrange(1 << 14L) # instead of stable storage time_low = timestamp & 0xffffffffL time_mid = (timestamp >> 32L) & 0xffffL time_hi_version = (timestamp >> 48L) & 0x0fffL clock_seq_low = clock_seq & 0xffL clock_seq_hi_variant = (clock_seq >> 8L) & 0x3fL if node is None: node = getnode() return pyUUID(fields=(time_low, time_mid, time_hi_version, clock_seq_hi_variant, clock_seq_low, node), version=1)
def mac(self): """ Get the MAC address of the current container. """ imac = getnode() mac = ':'.join(("%012X" % imac)[i:i+2] for i in range(0, 12, 2)) return mac.lower()
def save_pangenome_and_report(self, genome_refs, orthologs): self.log_line("Saving pangenome object") output_obj_name = self.params["output_pangenome_id"] pangenome = {"genome_refs": genome_refs, "id": output_obj_name, "name": output_obj_name, "orthologs": orthologs, "type": "orthomcl"} input_ws_objects = [] if "input_genomeset_ref" in self.params and self.params["input_genomeset_ref"] is not None: input_ws_objects.append(self.params["input_genomeset_ref"]) if "input_genome_refs" in self.params and self.params["input_genome_refs"] is not None: for genome_ref in self.params["input_genome_refs"]: if genome_ref is not None: input_ws_objects.append(genome_ref) self.provenance[0]["input_ws_objects"] = input_ws_objects self.provenance[0]["description"] = "Orthologous groups construction using OrthoMCL tool" info = self.ws.save_objects({"workspace": self.params["output_workspace"], "objects": [{"type": "KBaseGenomes.Pangenome", "name": output_obj_name, "data": pangenome, "provenance": self.provenance}]})[0] pangenome_ref = str(info[6]) + "/" + str(info[0]) + "/" + str(info[4]) report = "Input genomes: " + str(len(genome_refs)) + "\n" + \ "Output orthologs: " + str(len(orthologs)) + "\n" report_obj = {"objects_created": [{"ref": pangenome_ref, "description": "Pangenome object"}], "text_message": report} report_name = "orthomcl_report_" + str(hex(uuid.getnode())) report_info = self.ws.save_objects({"workspace": self.params["output_workspace"], "objects": [{"type": "KBaseReport.Report", "data": report_obj, "name": report_name, "meta": {}, "hidden": 1, "provenance": self.provenance}]})[0] return {"pangenome_ref": pangenome_ref, "report_name": report_name, "report_ref": str(report_info[6]) + "/" + str(report_info[0]) + "/" + str(report_info[4])}
def initialize_urandom(): """ This function and the web2py_uuid follow from the following discussion: http://groups.google.com/group/web2py-developers/browse_thread/thread/7fd5789a7da3f09 At startup web2py compute a unique ID that identifies the machine by adding uuid.getnode() + int(time.time() * 1e3) This is a 48-bit number. It converts the number into 16 8-bit tokens. It uses this value to initialize the entropy source ('/dev/urandom') and to seed random. If os.random() is not supported, it falls back to using random and issues a warning. """ node_id = uuid.getnode() microseconds = int(time.time() * 1e6) ctokens = [((node_id + microseconds) >> ((i%6)*8)) % 256 for i in range(16)] random.seed(node_id + microseconds) try: os.urandom(1) try: # try to add process-specific entropy frandom = open('/dev/urandom','wb') try: frandom.write(''.join(chr(t) for t in ctokens)) finally: frandom.close() except IOError: # works anyway pass except NotImplementedError: logger.warning( """Cryptographically secure session management is not possible on your system because your system does not provide a cryptographically secure entropy source. This is not specific to web2py; consider deploying on a different operating system.""") return ctokens
def __flush__(self): if DEBUG: print "class Experiment_Sync_Group, func __flush__" lsx=open(file_locations.file_locations['last_xtsm'][uuid.getnode()]+"last_xtsm.xtsm","w") lsx.write(self.last_successful_xtsm) lsx.close() self.compiled_xtsm.flush() self.compiled_xtsm.filestream.__flush__()
def merge_sorted_orig(arrs): """ merges a list of pre-sorted 64-bit floating point (numpy) arrays, discarding duplicate items returns the merged array. equivalent to numpy.unique(numpy.concatenate(arrs)) for pre-sorted arrays without self-duplicates, but ~x6 faster this routine calls a precompiled dll library function """ if type(arrs)!=type([]): arrs=[arrs] num_arr=len(arrs) lens = (ctypes.c_ulong*(len(arrs)+1))() totsiz=0 for arr in range(len(arrs)): if len(arrs[arr].shape)!=1: arrs[arr]=arrs[arr].flatten() #arrs[arr]=numpy.asarray(arrs[arr],dtype=numpy.float64).astype(numpy.float64) lens[arr]=arrs[arr].size totsiz+=lens[arr] outarr=numpy.empty(totsiz+1,dtype=numpy.float64) arrs.append(outarr) ctypes_arrays = [numpy.ctypeslib.as_ctypes(array) for array in arrs] pointer_ar = (ctypes.POINTER(ctypes.c_longdouble) * len(arrs))(*ctypes_arrays) ctypes_lens=ctypes.POINTER(ctypes.c_uint32)(lens) ctypes_arrnum = ctypes.c_uint16(len(arrs)); ctypes.CDLL(file_locations.file_locations['repasint_dll'][uuid.getnode()]).merge_sorted_drop_dup(pointer_ar,ctypes_arrnum,ctypes_lens) cc=(outarr[1:ctypes_lens[num_arr]]) del arrs[-1] #pdb.set_trace() return cc
def get_boxid(): mac = hex(uuid.getnode()).encode('ascii') #return base64.b64encode(hashlib.sha512(mac).hexdigest()) hashlib.sha512(mac) hashlib.sha512(mac).hexdigest() hashlib.sha512(mac).hexdigest()#.decode('ascii') return base64.b64encode(hashlib.sha512(mac).hexdigest().encode('ascii'))
def setUID(): logging.debug("setUID Function Initiated") import uuid user = str(os.getuid()) system = str(uuid.getnode()) output = hasher(user + system) return output;
def handle_exception(cls, exc): (etype, evalue, tb), canvas = exc exception = traceback.format_exception_only(etype, evalue)[-1].strip() stacktrace = ''.join(traceback.format_exception(etype, evalue, tb)) def _find_last_frame(tb): while tb.tb_next: tb = tb.tb_next return tb frame = _find_last_frame(tb) err_module = '{}:{}'.format( frame.tb_frame.f_globals.get('__name__', frame.tb_frame.f_code.co_filename), frame.tb_lineno) def _find_widget_frame(tb): while tb: if isinstance(tb.tb_frame.f_locals.get('self'), OWWidget): return tb tb = tb.tb_next widget_module, widget, frame = None, None, _find_widget_frame(tb) if frame: widget = frame.tb_frame.f_locals['self'].__class__ widget_module = '{}:{}'.format(widget.__module__, frame.tb_lineno) # If this exact error was already reported in this session, # just warn about it if (err_module, widget_module) in cls._cache: QMessageBox(QMessageBox.Warning, 'Error Encountered', 'Error encountered{}:<br><br><tt>{}</tt>'.format( ' in widget <b>{}</b>'.format(widget.name) if widget else '', stacktrace.replace('\n', '<br>').replace(' ', ' ')), QMessageBox.Ignore).exec() return F = cls.DataField data = OrderedDict() data[F.EXCEPTION] = exception data[F.MODULE] = err_module if widget: data[F.WIDGET_NAME] = widget.name data[F.WIDGET_MODULE] = widget_module if canvas: filename = mkstemp(prefix='ows-', suffix='.ows.xml')[1] # Prevent excepthook printing the same exception when # canvas tries to instantiate the broken widget again with patch('sys.excepthook', lambda *_: None): canvas.save_scheme_to(canvas.current_document().scheme(), filename) data[F.WIDGET_SCHEME] = filename with open(filename) as f: data['_' + F.WIDGET_SCHEME] = f.read() data[F.VERSION] = VERSION_STR data[F.ENVIRONMENT] = 'Python {} on {} {} {} {}'.format( platform.python_version(), platform.system(), platform.release(), platform.version(), platform.machine()) data[F.MACHINE_ID] = str(uuid.getnode()) data[F.STACK_TRACE] = stacktrace cls(data=data).exec()
def sysinfo(): ''' A function to get the uuid, mac address, system name, system version, user name. ''' global SysInfo import platform, getpass, hashlib from uuid import getnode log = baselog.getChild('sysinfo') sysname = platform.system() sysver = platform.release() username = getpass.getuser() macadd = '{0:012X}'.format(getnode()) macadd = ':'.join([macadd[i:i+2] for i in range(0,len(macadd),2)]) idstr = "{0} {1} {2} {3}".format(macadd,sysname,sysver,username) hashid = hashlib.md5(idstr).hexdigest() SysInfo["hash"]=hashid SysInfo["mac"]=macadd SysInfo["sys"]=sysname SysInfo["ver"]=sysver SysInfo["user"]=username log.debug('''-----sys info------- %s''', dictdisp(SysInfo)) return idstr
def get_mac_address(): import uuid mac = uuid.UUID(int=uuid.getnode()).hex[-12:] return ":".join([mac[e:e + 2] for e in range(0, 11, 2)])
def get_mac_addr(): from uuid import getnode mac = getnode() mac = ''.join(("%012X" % mac)[i:i + 2] for i in range(0, 12, 2)) return mac
def setup_tv_conection(app): global mymac global myip global tvip global tvappstring global remotename if mymac is None: from uuid import getnode mymac = iter(hex(getnode())[3:14]) mymac = '-'.join(a + b for a, b in zip(mymac, mymac)).upper() if myip is None: myip = socket.gethostbyname(socket.gethostname()) if None in (tvip, tvappstring): print 'Trying to find a Samsung TV on the network please wait....' found_tvs = find(3) if not found_tvs: raise RuntimeError( 'No Samsung TVs could be located. Is the TV powered off!?') def tv_data(): return [ 'Model: ' + found_tv['model'], 'IP Address: ' + found_tv['ip'], 'Device ID: ' + found_tv['device_id'], 'Production Series: ' + found_tv['series'], 'Production Year: ' + str(found_tv['year']), 'Panel Size: ' + str(found_tv['size']), 'Panel Type: ' + found_tv['type'], 'Panel Format: ' + found_tv['resolution'] ] if tvappstring is not None: tv_model = tvappstring.split('.')[1].upper() for found_tv in found_tvs: if tv_model == found_tv['model']: break else: raise RuntimeError( 'TV with model number {0} not found. TV off?!?'.format( tv_model)) elif tvip is None: if len(found_tvs) == 1: print 'Found 1 TV' found_tv = found_tvs[0] print '\n'.join(tv_data()) else: print 'Found {0} TVs'.format(len(found_tvs)) for i, found_tv in enumerate(found_tvs): print str(i + 1) + ')' print ' ', '\n '.join(tv_data()) index = int( raw_input('Enter the number for the TV you wish to use: ')) found_tv = found_tvs[index - 1] tvip = found_tv['ip'] else: for found_tv in found_tvs: if found_tv['ip'] == tvip: break else: raise RuntimeError( 'TV at IP Address {0} was not found. Possibly off!?') tvappstring = "iphone.{0}.iapp.samsung".format(found_tv['model']) if remotename is None: remotename = raw_input('Enter the name you want ' 'to use for this connection: ') app.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) app.sock.connect((tvip, 55000)) ipencoded = base64.b64encode(myip) macencoded = base64.b64encode(mymac) messagepart1 = chr(0x64) + chr(0x00) + chr( len(ipencoded)) + chr(0x00) + ipencoded + chr( len(macencoded)) + chr(0x00) + macencoded + chr( len(base64.b64encode(remotename))) + chr( 0x00) + base64.b64encode(remotename) part1 = chr(0x00) + chr(len(appstring)) + chr(0x00) + appstring + chr( len(messagepart1)) + chr(0x00) + messagepart1 app.sock.send(part1) messagepart2 = chr(0xc8) + chr(0x00) part2 = chr(0x00) + chr(len(appstring)) + chr(0x00) + appstring + chr( len(messagepart2)) + chr(0x00) + messagepart2 app.sock.send(part2) return
def connect( dsn=None, database=None, user=None, password=None, timeout=None, login_timeout=15, as_dict=None, appname=None, port=None, tds_version=tds_base.TDS74, autocommit=False, blocksize=4096, use_mars=False, auth=None, readonly=False, load_balancer=None, use_tz=None, bytes_to_unicode=True, row_strategy=None, failover_partner=None, server=None, cafile=None, validate_host=True, enc_login_only=False, disable_connect_retry=False, pooling=False, use_sso=False, ): """ Opens connection to the database :keyword dsn: SQL server host and instance: <host>[\\<instance>] :type dsn: string :keyword failover_partner: secondary database host, used if primary is not accessible :type failover_partner: string :keyword database: the database to initially connect to :type database: string :keyword user: database user to connect as :type user: string :keyword password: user's password :type password: string :keyword timeout: query timeout in seconds, default 0 (no timeout) :type timeout: int :keyword login_timeout: timeout for connection and login in seconds, default 15 :type login_timeout: int :keyword as_dict: whether rows should be returned as dictionaries instead of tuples. :type as_dict: boolean :keyword appname: Set the application name to use for the connection :type appname: string :keyword port: the TCP port to use to connect to the server :type port: int :keyword tds_version: Maximum TDS version to use, should only be used for testing :type tds_version: int :keyword autocommit: Enable or disable database level autocommit :type autocommit: bool :keyword blocksize: Size of block for the TDS protocol, usually should not be used :type blocksize: int :keyword use_mars: Enable or disable MARS :type use_mars: bool :keyword auth: An instance of authentication method class, e.g. Ntlm or Sspi :keyword readonly: Allows to enable read-only mode for connection, only supported by MSSQL 2012, earlier versions will ignore this parameter :type readonly: bool :keyword load_balancer: An instance of load balancer class to use, if not provided will not use load balancer :keyword use_tz: Provides timezone for naive database times, if not provided date and time will be returned in naive format :keyword bytes_to_unicode: If true single byte database strings will be converted to unicode Python strings, otherwise will return strings as ``bytes`` without conversion. :type bytes_to_unicode: bool :keyword row_strategy: strategy used to create rows, determines type of returned rows, can be custom or one of: :func:`tuple_row_strategy`, :func:`list_row_strategy`, :func:`dict_row_strategy`, :func:`namedtuple_row_strategy`, :func:`recordtype_row_strategy` :type row_strategy: function of list of column names returning row factory :keyword cafile: Name of the file containing trusted CAs in PEM format, if provided will enable TLS :type cafile: str :keyword validate_host: Host name validation during TLS connection is enabled by default, if you disable it you will be vulnerable to MitM type of attack. :type validate_host: bool :keyword enc_login_only: Allows you to scope TLS encryption only to an authentication portion. This means that anyone who can observe traffic on your network will be able to see all your SQL requests and potentially modify them. :type enc_login_only: bool :keyword use_sso: Enables SSO login, e.g. Kerberos using SSPI on Windows and kerberos package on other platforms. Cannot be used together with auth parameter. :returns: An instance of :class:`Connection` """ if use_sso and auth: raise ValueError('use_sso cannot be used with auth parameter defined') login = _TdsLogin() login.client_host_name = socket.gethostname()[:128] login.library = "Python TDS Library" login.user_name = user or '' login.password = password or '' login.app_name = appname or 'pytds' login.port = port login.language = '' # use database default login.attach_db_file = '' login.tds_version = tds_version if tds_version < tds_base.TDS70: raise ValueError('This TDS version is not supported') login.database = database or '' login.bulk_copy = False login.client_lcid = lcid.LANGID_ENGLISH_US login.use_mars = use_mars login.pid = os.getpid() login.change_password = '' login.client_id = uuid.getnode() # client mac address login.cafile = cafile login.validate_host = validate_host login.enc_login_only = enc_login_only if cafile: if not tls.OPENSSL_AVAILABLE: raise ValueError( "You are trying to use encryption but pyOpenSSL does not work, you probably " "need to install it first") login.tls_ctx = tls.create_context(cafile) if login.enc_login_only: login.enc_flag = PreLoginEnc.ENCRYPT_OFF else: login.enc_flag = PreLoginEnc.ENCRYPT_ON else: login.tls_ctx = None login.enc_flag = PreLoginEnc.ENCRYPT_NOT_SUP if use_tz: login.client_tz = use_tz else: login.client_tz = pytds.tz.local # that will set: # ANSI_DEFAULTS to ON, # IMPLICIT_TRANSACTIONS to OFF, # TEXTSIZE to 0x7FFFFFFF (2GB) (TDS 7.2 and below), TEXTSIZE to infinite (introduced in TDS 7.3), # and ROWCOUNT to infinite login.option_flag2 = tds_base.TDS_ODBC_ON login.connect_timeout = login_timeout login.query_timeout = timeout login.blocksize = blocksize login.readonly = readonly login.load_balancer = load_balancer login.bytes_to_unicode = bytes_to_unicode if server and dsn: raise ValueError("Both server and dsn shouldn't be specified") if server: warnings.warn("server parameter is deprecated, use dsn instead", DeprecationWarning) dsn = server if load_balancer and failover_partner: raise ValueError( "Both load_balancer and failover_partner shoudln't be specified") if load_balancer: servers = [(srv, None) for srv in load_balancer.choose()] else: servers = [(dsn or 'localhost', port)] if failover_partner: servers.append((failover_partner, port)) parsed_servers = [] for srv, port in servers: host, instance = _parse_server(srv) if instance and port: raise ValueError("Both instance and port shouldn't be specified") parsed_servers.append((host, port, instance)) if use_sso: spn = "MSSQLSvc@{}:{}".format(parsed_servers[0][0], parsed_servers[0][1]) from . import login as pytds_login try: login.auth = pytds_login.SspiAuth(spn=spn) except ImportError: login.auth = pytds_login.KerberosAuth(spn) else: login.auth = auth login.servers = _get_servers_deque(tuple(parsed_servers), database) # unique connection identifier used to pool connection key = ( dsn, login.user_name, login.app_name, login.tds_version, login.database, login.client_lcid, login.use_mars, login.cafile, login.blocksize, login.readonly, login.bytes_to_unicode, login.auth, login.client_tz, autocommit, ) conn = Connection() conn._use_tz = use_tz conn._autocommit = autocommit conn._login = login conn._pooling = pooling conn._key = key assert row_strategy is None or as_dict is None,\ 'Both row_startegy and as_dict were specified, you should use either one or another' if as_dict is not None: conn.as_dict = as_dict elif row_strategy is not None: conn._row_strategy = row_strategy else: conn._row_strategy = tuple_row_strategy # default row strategy conn._isolation_level = 0 conn._dirty = False from .tz import FixedOffsetTimezone conn._tzinfo_factory = None if use_tz is None else FixedOffsetTimezone if disable_connect_retry: conn._try_open(timeout=login.connect_timeout) else: conn._open() return conn
def get_mac_address(): node = uuid.getnode() mac = uuid.UUID(int=node).hex[-12:] return mac
def getMyMac(): from uuid import getnode node = getnode() return ':'.join( [hex(node >> i & 0xff)[2:] for i in reversed(range(0, 48, 8))])
def _getmac(): mac = uuid.getnode() if (mac >> 40) % 2: mac = node() return uuid.uuid5(uuid.NAMESPACE_DNS, str(mac)).bytes
from ansible import constants as C from ansible import context from ansible.errors import AnsibleError, AnsibleOptionsError from ansible.module_utils.six import iteritems, string_types, PY3 from ansible.module_utils._text import to_native, to_text from ansible.module_utils.common._collections_compat import MutableMapping, MutableSequence from ansible.parsing.splitter import parse_kv ADDITIONAL_PY2_KEYWORDS = frozenset(("True", "False", "None")) _MAXSIZE = 2 ** 32 cur_id = 0 node_mac = ("%012x" % uuid.getnode())[:12] random_int = ("%08x" % random.randint(0, _MAXSIZE))[:8] def get_unique_id(): global cur_id cur_id += 1 return "-".join([ node_mac[0:8], node_mac[8:12], random_int[0:4], random_int[4:8], ("%012x" % cur_id)[:12], ])
def getMac(): node = uuid.getnode() mac = uuid.UUID(int=node).hex[-12:] return mac
def save_read_group(self, ctx, params): """ :param params: instance of type "save_read_group_params" (save_read_group() ** ** Method for adding Reads objects to a ReadsSet) -> structure: parameter "workspace_name" of String, parameter "output_readset_name" of String, parameter "input_reads_list" of list of String, parameter "desc" of String :returns: instance of type "save_read_group_output" -> structure: parameter "report_name" of String, parameter "report_ref" of String """ # ctx is the context object # return variables are: returnVal #BEGIN save_read_group console = [] invalid_msgs = [] #self.log(console,'Running save_read_group with params=') #self.log(console, "\n"+pformat(params)) report = '' # report = 'Running KButil_Add_Genomes_to_GenomeSet with params=' # report += "\n"+pformat(params) #### do some basic checks # if 'workspace_name' not in params: raise ValueError('workspace_name parameter is required') if 'desc' not in params: raise ValueError('desc parameter is required') if 'input_reads_list' not in params: raise ValueError('input_reads_list parameter is required') #if 'input_readsset_name' not in params: # raise ValueError('input_readsset_name parameter is optional') if 'output_readset_name' not in params: raise ValueError('output_readset_name parameter is required') # Build GenomeSet # elements = dict() savereadssetparams = {} savereadssetparams['workspace_name'] = params['workspace_name'] savereadssetparams['output_object_name'] = params[ 'output_readset_name'] readsetdata = {} if (params['desc'] is not None): readsetdata['description'] = params['desc'] readsetdata['items'] = [] # add new reads for reads_name in params['input_reads_list']: readssetitem = {} readssetitem['ref'] = params['workspace_name'] + '/' + reads_name readssetitem['label'] = '' readsetdata['items'].append(readssetitem) savereadssetparams['data'] = readsetdata # load the method provenance from the context object # #self.log(console,"Setting Provenance") # DEBUG provenance = [{}] if 'provenance' in ctx: provenance = ctx['provenance'] # add additional info to provenance here, in this case the input data object reference try: prov_defined = provenance[0]['input_ws_objects'] except: provenance[0]['input_ws_objects'] = [] for reads_name in params['input_reads_list']: provenance[0]['input_ws_objects'].append(params['workspace_name'] + '/' + reads_name) provenance[0]['service'] = 'ReadGroupEditor' provenance[0]['method'] = 'save_read_group' # Save output object # #if len(invalid_msgs) == 0: # self.log(console,"Saving ReadsSet") set_api = SetAPI(os.environ["SDK_CALLBACK_URL"]) set_api._service_ver = "dev" set_api.save_reads_set_v1(savereadssetparams) # build output report object # #self.log(console,"BUILDING REPORT") # DEBUG if len(invalid_msgs) == 0: #self.log(console,"reads in output set "+params['output_readset_name']+": "+str(len(elements.keys()))) report += 'reads in output set ' + params[ 'output_readset_name'] + ': ' + str(len( elements.keys())) + "\n" reportObj = { 'objects_created': [{ 'ref': params['workspace_name'] + '/' + params['output_readset_name'], 'description': 'save_read_group' }], 'text_message': report } else: report += "FAILURE:\n\n" + "\n".join(invalid_msgs) + "\n" reportObj = {'objects_created': [], 'text_message': report} reportName = 'save_read_group_report_' + str(hex(uuid.getnode())) ws = workspaceService(self.workspaceURL, token=ctx['token']) report_obj_info = ws.save_objects({ 'workspace': params['workspace_name'], 'objects': [{ 'type': 'KBaseReport.Report', 'data': reportObj, 'name': reportName, 'meta': {}, 'hidden': 1, 'provenance': provenance }] })[0] # Build report and return # #self.log(console,"BUILDING RETURN OBJECT") returnVal = { 'report_name': reportName, 'report_ref': str(report_obj_info[6]) + '/' + str(report_obj_info[0]) + '/' + str(report_obj_info[4]), } #self.log(console,"save_read_group DONE") #END save_read_group # At some point might do deeper type checking... if not isinstance(returnVal, dict): raise ValueError('Method save_read_group return value ' + 'returnVal is not type dict as required.') # return the results return [returnVal]
import random import sys # import _strptime import time # Define global variables gh_username = "******" gh_password = "******" gh_repo = "BlackHatPython_chapter7" gh_remote = "data/" # We generate a trojan_id. If this id is not available in our github config folder, # we will resort to using the default config file for this trojan. # We define some other constants related to the trojan trojan_id = base64.b64encode( (platform.node() + "-" + hex(getnode())).encode()).decode("utf-8") trojan_id_default = base64.b64encode("default".encode()).decode("utf-8") trojan_config_file_path = "config/{}.json".format(trojan_id) trojan_default_config_file_path = "config/{}.json".format(trojan_id_default) trojan_module_folder_path = "modules/" trojan_output_file_name = datetime.utcnow().isoformat() + "-" + trojan_id trojan_output_file_path = "data/" + trojan_output_file_name trojan_output_file_contents = "" task_queue = Queue() # GitImporter Class to allow import of our custom python modules from github class GitImporter(object): def __init__(self): self.module_code = None
from web4note.secret import SECRET_APP_KEY, HOME_MAC, WORK_MAC if sys.platform.startswith('win'): db_prefix = 'sqlite:///' else: db_prefix = 'sqlite:////' app = Flask(__name__, instance_relative_config=True) app.config.from_mapping( SECRET_KEY=SECRET_APP_KEY, SESSION_TYPE = 'filesystem', ) if uuid.getnode() == HOME_MAC: note_dir = "D:/Share/Note4Web" # home new_note_dir = "D:/Share/notebook" elif uuid.getnode() == WORK_MAC: note_dir = "E:/Share/Note4Web" # work new_note_dir = "E:/Share/notebook" # note_dir = "E:/Share/test/bb" # work test # new_note_dir = "E:/Share/test/aa" else: note_dir = new_note_dir = "" print("不是我的电脑:", uuid.getnode()) NOTEROOT = note_dir NEWNOTEDIR = new_note_dir #TEMPLIST = "./index.json" from web4note import views
def generateID(): val = uuid.getnode() for i in xrange(0, 128): val = hashlib.sha1(val).hexdigest()
) def _get(key, default=None): v = getattr(pykitconfig, key, default) logger.debug('set pykit config: {key}={v}'.format(key=key, v=v)) return v uid = _get('uid') gid = _get('gid') log_dir = _get('log_dir') cat_stat_dir = _get('cat_stat_dir') iostat_stat_path = _get('iostat_stat_path', '/tmp/pykit-iostat') zk_acl = _get('zk_acl') # (('xp', '123', 'cdrwa'), ('foo', 'bar', 'rw')) zk_auth = _get('zk_auth') # ('digest', 'xp', '123') zk_hosts = _get('zk_hosts', '127.0.0.1:21811') zk_lock_dir = _get('zk_lock_dir', 'lock/') zk_node_id = _get('zk_node_id', '%012x' % uuid.getnode()) zk_record_dir = _get('zk_record_dir', 'record/') zk_tx_dir = _get('zk_tx_dir', 'tx/') zk_seq_dir = _get('zk_seq_dir', 'seq/') rp_cli_nwr = _get('rp_cli_nwr', (3, 2, 2)) rp_cli_ak_sk = _get('rp_cli_ak_sk', ('access_key', 'secret_key')) ec_block_port = _get('ec_block_port', 6000) inner_ip_patterns = _get('inner_ip_patterns', [ '^172[.]1[6-9].*', '^172[.]2[0-9].*', '^172[.]3[0-1].*', '^10[.].*', '^192[.]168[.].*' ])
def createHID(prefix="00000000"): hid = "HID:" + prefix id = uuid.uuid1(uuid.getnode()) return hid + id.hex
def get_mac_addr(): """ 获取mac地址 """ return uuid.UUID(int = uuid.getnode()).hex[-12:].lower()
def get_uuid(): import uuid return uuid.UUID(int=uuid.getnode()).hex[-12:]
# -*- coding: utf-8 -*- # by Roman Vishnevsky aka.x0x01 @ gmail.com import urllib2 import json import hashlib import uuid import lcddriver from datetime import datetime # ключ API для разработчика, получаемый в Мои данные \ Мои ключи API после регистрации в проекте. # заменить на свой! api_key = 'qP61RWrR78gHT' # генерация уникального ID приложения app_id = str(uuid.getnode()) md5_app_id = hashlib.md5(app_id).hexdigest() # формирование тела JSON data = { 'cmd': 'sensorsOnDevice', 'id': 1222, 'uuid': md5_app_id, 'api_key': api_key, 'lang': 'en' } # обработчик исключений try: # запрос request = urllib2.Request('http://narodmon.ru/api', json.dumps(data))
TIMEOUT = CONFIG.get('plexapi.timeout', 30, int) X_PLEX_CONTAINER_SIZE = CONFIG.get('plexapi.container_size', 100, int) X_PLEX_ENABLE_FAST_CONNECT = CONFIG.get('plexapi.enable_fast_connect', False, bool) # Plex Header Configuation X_PLEX_PROVIDES = CONFIG.get('header.provides', 'controller') X_PLEX_PLATFORM = CONFIG.get('header.platform', CONFIG.get('header.platorm', uname()[0])) X_PLEX_PLATFORM_VERSION = CONFIG.get('header.platform_version', uname()[2]) X_PLEX_PRODUCT = CONFIG.get('header.product', PROJECT) X_PLEX_VERSION = CONFIG.get('header.version', VERSION) X_PLEX_DEVICE = CONFIG.get('header.device', X_PLEX_PLATFORM) X_PLEX_DEVICE_NAME = CONFIG.get('header.device_name', uname()[1]) X_PLEX_IDENTIFIER = CONFIG.get('header.identifier', str(hex(getnode()))) BASE_HEADERS = reset_base_headers() # Logging Configuration log = logging.getLogger('plexapi') logfile = CONFIG.get('log.path') logformat = CONFIG.get( 'log.format', '%(asctime)s %(module)12s:%(lineno)-4s %(levelname)-9s %(message)s') loglevel = CONFIG.get('log.level', 'INFO').upper() loghandler = logging.NullHandler() if logfile: # pragma: no cover logbackups = CONFIG.get('log.backup_count', 3, int) logbytes = CONFIG.get('log.rotate_bytes', 512000, int) loghandler = RotatingFileHandler(os.path.expanduser(logfile), 'a',
def createAD(): ad = "AD:10000000" id = uuid.uuid1(uuid.getnode()) return ad + id.hex
print("*** Core freq: " + str(os.environ.get('RESIN_HOST_CONFIG_core_freq'))) if os.environ.get("RESIN_HOST_CONFIG_dtoverlay") != None: print("*** UART mode: " + str(os.environ.get('RESIN_HOST_CONFIG_dtoverlay'))) # Check if the correct environment variables are set print("*******************") print("*** Configuration:") print("*******************") if os.environ.get("GW_EUI") == None: # The FFFE should be inserted in the middle (so xxxxxxFFFExxxxxx) my_eui = format(uuid.getnode(), '012x') my_eui = my_eui[:6] + GWID_PREFIX + my_eui[6:] my_eui = my_eui.upper() else: my_eui = os.environ.get("GW_EUI") print("GW_EUI:\t" + my_eui) if os.environ.get("ACCOUNT_SERVER_DOMAIN") == None: account_server_domain = "account.thethingsnetwork.org" else: account_server_domain = os.environ.get("ACCOUNT_SERVER_DOMAIN") # Define default configs description = os.getenv('GW_DESCRIPTION', "") placement = ""
def get_user_id(): node = uuid.getnode() mac = uuid.UUID(int=node).hex[-12:] uid = hashlib.md5(mac).hexdigest() return uid
def get_pin_and_cookie_name(app): """Given an application object this returns a semi-stable 9 digit pin code and a random key. The hope is that this is stable between restarts to not make debugging particularly frustrating. If the pin was forcefully disabled this returns `None`. Second item in the resulting tuple is the cookie name for remembering. """ pin = os.environ.get('WERKZEUG_DEBUG_PIN') rv = None num = None # Pin was explicitly disabled if pin == 'off': return None, None # Pin was provided explicitly if pin is not None and pin.replace('-', '').isdigit(): # If there are separators in the pin, return it directly if '-' in pin: rv = pin else: num = pin modname = getattr(app, '__module__', getattr(app.__class__, '__module__')) try: # `getpass.getuser()` imports the `pwd` module, # which does not exist in the Google App Engine sandbox. username = getpass.getuser() except ImportError: username = None mod = sys.modules.get(modname) # This information only exists to make the cookie unique on the # computer, not as a security feature. probably_public_bits = [ username, modname, getattr(app, '__name__', getattr(app.__class__, '__name__')), getattr(mod, '__file__', None), ] # This information is here to make it harder for an attacker to # guess the cookie name. They are unlikely to be contained anywhere # within the unauthenticated debug page. private_bits = [ str(uuid.getnode()), get_machine_id(), ] h = hashlib.md5() for bit in chain(probably_public_bits, private_bits): if not bit: continue if isinstance(bit, text_type): bit = bit.encode('utf-8') h.update(bit) h.update(b'cookiesalt') cookie_name = '__wzd' + h.hexdigest()[:20] # If we need to generate a pin we salt it a bit more so that we don't # end up with the same value and generate out 9 digits if num is None: h.update(b'pinsalt') num = ('%09d' % int(h.hexdigest(), 16))[:9] # Format the pincode in groups of digits for easier remembering if # we don't have a result yet. if rv is None: for group_size in 5, 4, 3: if len(num) % group_size == 0: rv = '-'.join(num[x:x + group_size].rjust(group_size, '0') for x in range(0, len(num), group_size)) break else: rv = num return rv, cookie_name
def get_src_mac(): return ':'.join(['{:02x}'.format((uuid.getnode() >> i) & 0xff) for i in range(0,8*6,8)][::-1])
def baodao(self, evt): # 登录按钮事件处理函数 xuehao = self.tc1.GetValue() # 获取学号 xingming = self.tc2.GetValue() # 获取姓名 serverIP = self.tc3.GetValue() if not re.match('^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$', serverIP): dlg = wx.MessageDialog(None, '服务器IP地址不合法', '很抱歉', wx.YES_DEFAULT) if dlg.ShowModal() == wx.ID_YES: self.Close() return sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: sock.connect((serverIP, 16109)) except Exception as e: dlg = wx.MessageDialog(None, '现在不是点名时间', '很抱歉', wx.YES_DEFAULT) if dlg.ShowModal() == wx.ID_YES: self.Close() return # 获取客户端MAC地址,使用MAC+IP保证每台计算机每节课只能点名一次 import uuid node = uuid.getnode() macHex = uuid.UUID(int=node).hex[-12:] mac = [] for i in range(len(macHex))[::2]: mac.append(macHex[i:i + 2]) mac = ''.join(mac) sock.sendall(','.join((xuehao, xingming, mac)).encode()) data = sock.recv(1024).decode() if data.lower() == 'ok': # 点名成功 sock.close() # 保存学号、姓名和服务器IP地址,方便下次自动填写信息 path = os.getenv('temp') filename = path + '\\' + 'info.txt' with open(filename, 'w') as fp: fp.write(','.join((xuehao, xingming, serverIP))) dlg = wx.MessageDialog(None, xuehao + ',' + xingming + ' 报到点名成功', '恭喜', wx.YES_DEFAULT) if dlg.ShowModal() == wx.ID_YES: self.Close() return elif data.lower() == 'repeat': sock.close() dlg = wx.MessageDialog(None, xuehao + '不允许重复报到', '很抱歉', wx.YES_DEFAULT) if dlg.ShowModal() == wx.ID_YES: self.Close() return elif data.lower() == 'notmatch': sock.close() dlg = wx.MessageDialog(None, xuehao + '学号与姓名不匹配', '很抱歉', wx.YES_DEFAULT) if dlg.ShowModal() == wx.ID_YES: self.Close() return elif data.lower() == 'daidianming': sock.close() dlg = wx.MessageDialog(None, xuehao + '不允许替别人点名,警告一次', '很抱歉', wx.YES_DEFAULT) if dlg.ShowModal() == wx.ID_YES: self.Close() return
def getStationMAC(self): #STATION_MAC import uuid mac = uuid.UUID(int=uuid.getnode()).hex[-12:].upper() self._data['STATION_MAC'] = ':'.join( [mac[i:i + 2] for i in range(0, 11, 2)])
from berry.selectors import LightSelect from berry.client import BerryClient, get_widget, get_selector from berry.utilities import CLIENT_PORT, LOG_LEVEL logging.getLogger().setLevel(LOG_LEVEL) if __name__ == '__main__': # Get the port number to run this client on try: port = int(sys.argv[1]) except: # Default port port = CLIENT_PORT # Create GUID guid = getnode().__str__() + str(port) # Get berry from config berry = get_widget(guid=guid) # Initialize the client client = BerryClient(berry=berry, port=port) # Start debug input mode thread threading.Thread(target=client.input_loop, daemon=True).start() # Start thread for selector, if needed if berry.live: # Get selector selector = get_selector(client) selector.setup()
def filter_contigs(self, ctx, params): # ctx is the context object # return variables are: returnVal #BEGIN filter_contigs # Print statements to stdout/stderr are captured and available as the method log print('Starting filter contigs method.') # Step 1 - Parse/examine the parameters and catch any errors # It is important to check that parameters exist and are defined, and that nice error # messages are returned to the user if 'workspace' not in params: raise ValueError('Parameter workspace is not set in input arguments') workspace_name = params['workspace'] if 'contigset_id' not in params: raise ValueError('Parameter contigset_id is not set in input arguments') contigset_id = params['contigset_id'] if 'min_length' not in params: raise ValueError('Parameter min_length is not set in input arguments') min_length_orig = params['min_length'] min_length = None try: min_length = int(min_length_orig) except ValueError: raise ValueError('Cannot parse integer from min_length parameter (' + str(min_length_orig) + ')') if min_length < 0: raise ValueError('min_length parameter shouldn\'t be negative (' + str(min_length) + ')') # Step 2- Download the input data # Most data will be based to your method by its workspace name. Use the workspace to pull that data # (or in many cases, subsets of that data). The user token is used to authenticate with the KBase # data stores and other services. DO NOT PRINT OUT OR OTHERWISE SAVE USER TOKENS token = ctx['token'] wsClient = workspaceService(self.workspaceURL, token=token) try: # Note that results from the workspace are returned in a list, and the actual data is saved # in the 'data' key. So to get the ContigSet data, we get the first element of the list, and # look at the 'data' field. contigSet = wsClient.get_objects([{'ref': workspace_name+'/'+contigset_id}])[0]['data'] except: exc_type, exc_value, exc_traceback = sys.exc_info() lines = traceback.format_exception(exc_type, exc_value, exc_traceback) orig_error = ''.join(' ' + line for line in lines) raise ValueError('Error loading original ContigSet object from workspace:\n' + orig_error) print('Got ContigSet data.') # Step 3- Actually perform the filter operation, saving the good contigs to a new list good_contigs = [] n_total = 0; n_remaining = 0; for contig in contigSet['contigs']: n_total += 1 if len(contig['sequence']) >= min_length: good_contigs.append(contig) n_remaining += 1 # replace the contigs in the contigSet object in local memory contigSet['contigs'] = good_contigs print('Filtered ContigSet to '+str(n_remaining)+' contigs out of '+str(n_total)) # Step 4- Save the new ContigSet back to the Workspace # When objects are saved, it is important to always set the Provenance of that object. The basic # provenance info is given to you as part of the context object. You can add additional information # to the provenance as necessary. Here we keep a pointer to the input data object. provenance = [{}] if 'provenance' in ctx: provenance = ctx['provenance'] # add additional info to provenance here, in this case the input data object reference provenance[0]['input_ws_objects']=[workspace_name+'/'+contigset_id] obj_info_list = None try: obj_info_list = wsClient.save_objects({ 'workspace':workspace_name, 'objects': [ { 'type':'KBaseGenomes.ContigSet', 'data':contigSet, 'name':contigset_id, 'provenance':provenance } ] }) except: exc_type, exc_value, exc_traceback = sys.exc_info() lines = traceback.format_exception(exc_type, exc_value, exc_traceback) orig_error = ''.join(' ' + line for line in lines) raise ValueError('Error saving filtered ContigSet object to workspace:\n' + orig_error) info = obj_info_list[0] # Workspace Object Info is a tuple defined as- # absolute ref = info[6] + '/' + info[0] + '/' + info[4] # 0 - obj_id objid - integer valued ID of the object # 1 - obj_name name - the name of the data object # 2 - type_string type - the full type of the data object as: [ModuleName].[Type]-v[major_ver].[minor_ver] # 3 - timestamp save_date # 4 - int version - the object version number # 5 - username saved_by # 6 - ws_id wsid - the unique integer valued ID of the workspace containing this object # 7 - ws_name workspace - the workspace name # 8 - string chsum - md5 of the sorted json content # 9 - int size - size of the json content # 10 - usermeta meta - dictionary of string keys/values of user set or auto generated metadata print('saved ContigSet:'+pformat(info)) # Step 5- Create the Report for this method, and return the results # Create a Report of the method report = 'New ContigSet saved to: '+str(info[7]) + '/'+str(info[1])+'/'+str(info[4])+'\n' report += 'Number of initial contigs: '+ str(n_total) + '\n' report += 'Number of contigs removed: '+ str(n_total - n_remaining) + '\n' report += 'Number of contigs in final set: '+ str(n_remaining) + '\n' reportObj = { 'objects_created':[{ 'ref':str(info[6]) + '/'+str(info[0])+'/'+str(info[4]), 'description':'Filtered Contigs' }], 'text_message':report } # generate a unique name for the Method report reportName = 'filter_contigs_report_'+str(hex(uuid.getnode())) report_info_list = None try: report_info_list = wsClient.save_objects({ 'id':info[6], 'objects':[ { 'type':'KBaseReport.Report', 'data':reportObj, 'name':reportName, 'meta':{}, 'hidden':1, # important! make sure the report is hidden 'provenance':provenance } ] }) except: exc_type, exc_value, exc_traceback = sys.exc_info() lines = traceback.format_exception(exc_type, exc_value, exc_traceback) orig_error = ''.join(' ' + line for line in lines) raise ValueError('Error saving filtered ContigSet object to workspace:\n' + orig_error) report_info = report_info_list[0] print('saved Report: '+pformat(report_info)) returnVal = { 'report_name': reportName, 'report_ref': str(report_info[6]) + '/' + str(report_info[0]) + '/' + str(report_info[4]), 'new_contigset_ref': str(info[6]) + '/'+str(info[0])+'/'+str(info[4]), 'n_initial_contigs':n_total, 'n_contigs_removed':n_total-n_remaining, 'n_contigs_remaining':n_remaining } print('returning:'+pformat(returnVal)) #END filter_contigs # At some point might do deeper type checking... if not isinstance(returnVal, dict): raise ValueError('Method filter_contigs return value ' + 'returnVal is not type dict as required.') # return the results return [returnVal]
import os import struct import sys import types import uuid from abc import ABCMeta from itertools import product from exploit import WmiExploiter, Ms08_067_Exploiter, SmbExploiter, RdpExploiter, SSHExploiter, ShellShockExploiter, \ SambaCryExploiter, ElasticGroovyExploiter, Struts2Exploiter from network import TcpScanner, PingScanner, SMBFinger, SSHFinger, HTTPFinger, MySQLFinger, ElasticFinger, \ MSSQLFinger __author__ = 'itamar' GUID = str(uuid.getnode()) EXTERNAL_CONFIG_FILE = os.path.join( os.path.abspath(os.path.dirname(sys.argv[0])), 'monkey.bin') def _cast_by_example(value, example): """ a method that casts a value to the type of the parameter given as example """ example_type = type(example) if example_type is str: return os.path.expandvars(value).encode("utf8") elif example_type is tuple and len(example) != 0: if value is None or value == tuple([None]): return tuple()
def get_host_info(): import uuid hw_addr = hex(uuid.getnode())[2:] hw_addr = ':'.join([hw_addr[i:i+2] for i in range(0,len(hw_addr),2)]).lower() ip_addr = subprocess.check_output(['hostname','-I']).decode('utf-8').split()[0] return ip_addr,hw_addr