def test_resource_uuid(self): uuid_str = '536e28c2017e405e89b25a1ed777b952' self.assertEqual(uuid_str, common_utils.resource_uuid(uuid_str)) # Exact 64 length string. uuid_str = ('536e28c2017e405e89b25a1ed777b952' 'f13de678ac714bb1b7d1e9a007c10db5') resource_id_namespace = common_utils.RESOURCE_ID_NAMESPACE transformed_id = uuid.uuid5(resource_id_namespace, uuid_str).hex self.assertEqual(transformed_id, common_utils.resource_uuid(uuid_str)) # Non-ASCII character test. non_ascii_ = 'ß' * 32 transformed_id = uuid.uuid5(resource_id_namespace, non_ascii_).hex self.assertEqual(transformed_id, common_utils.resource_uuid(non_ascii_)) # This input is invalid because it's length is more than 64. invalid_input = 'x' * 65 self.assertRaises(ValueError, common_utils.resource_uuid, invalid_input) # 64 length unicode string, to mimic what is returned from mapping_id # backend. uuid_str = six.text_type('536e28c2017e405e89b25a1ed777b952' 'f13de678ac714bb1b7d1e9a007c10db5') resource_id_namespace = common_utils.RESOURCE_ID_NAMESPACE if six.PY2: uuid_str = uuid_str.encode('utf-8') transformed_id = uuid.uuid5(resource_id_namespace, uuid_str).hex self.assertEqual(transformed_id, common_utils.resource_uuid(uuid_str))
def __init__(self, inputs, outputs=OP_N_TO_1, tags=None): """ :param inputs: list of uuids the operator examines """ self.inputs = inputs self.tags = tags self._has_pending = False self._pending = [null] * len(inputs) uuids = map(operator.itemgetter('uuid'), inputs) # auto-construct output ids if requested if outputs == OP_N_TO_1: self.outputs = [util.dict_all(inputs)] self.outputs[0]['uuid'] = reduce(lambda x, y: str(uuid.uuid5(y, x)), map(uuid.UUID, sorted(uuids)), self.name) elif outputs == OP_N_TO_N: self.outputs = copy.deepcopy(inputs) for i, uid in enumerate(map(lambda x: str(uuid.uuid5(x, self.name)), map(uuid.UUID, uuids))): self.outputs[i]['uuid'] = uid else: self.outputs = copy.deepcopy(outputs)
def create_channel(name, description="", editors=None, language="en", bookmarkers=None, viewers=None, public=False): domain = uuid.uuid5(uuid.NAMESPACE_DNS, name) node_id = uuid.uuid5(domain, name) channel, _new = Channel.objects.get_or_create(pk=node_id.hex) channel.name = name channel.description = description channel.language_id = language channel.public = public channel.deleted = False editors = editors or [] bookmarkers = bookmarkers or [] viewers = viewers or [] for e in editors: channel.editors.add(e) for b in bookmarkers: channel.bookmarked_by.add(b) for v in viewers: channel.viewers.add(v) channel.save() channel.main_tree.get_descendants().delete() channel.staging_tree and channel.staging_tree.get_descendants().delete() return channel
def generate(self, plist_path): """ Generates a boilerplate Safari Bookmarks plist at plist path. Raises: CalledProcessError if creation of plist fails. """ subprocess.check_call(["touch", plist_path]) contents = dict( Children=list(( dict( Title="History", WebBookmarkIdentifier="History", WebBookmarkType="WebBookmarkTypeProxy", WebBookmarkUUID=str(uuid.uuid5(uuid.NAMESPACE_DNS, "History")), ), dict( Children=list(), Title="BookmarksBar", WebBookmarkType="WebBookmarkTypeList", WebBookmarkUUID=str(uuid.uuid5(uuid.NAMESPACE_DNS, "BookmarksBar")), ), dict( Title="BookmarksMenu", WebBookmarkType="WebBookmarkTypeList", WebBookmarkUUID=str(uuid.uuid5(uuid.NAMESPACE_DNS, "BookmarksMenu")), ), )), Title="", WebBookmarkFileVersion=1, WebBookmarkType="WebBookmarkTypeList", WebBookmarkUUID=str(uuid.uuid5(uuid.NAMESPACE_DNS, "")), ) plistlib.writePlist(contents, plist_path)
def register(self,boy,girl): encodeUtil = EncodeUtil() boyname = boy['username'] girlname = girl['username'] nowtime = datetime.datetime.now() boy['id'] = uuid.uuid5(uuid.NAMESPACE_DNS,uuid.uuid1().get_hex()).get_hex() boy['lover'] = girlname boy['time'] = nowtime boy['password'] = encodeUtil.md5hash(boy['password']) boy['birthday'] = self.InvalidDateTime boy['nickname'] = boyname boy['sex'] = '1' girl['id'] = uuid.uuid5(uuid.NAMESPACE_DNS,uuid.uuid1().get_hex()).get_hex() girl['lover'] = boyname girl['time'] = nowtime girl['password'] = encodeUtil.md5hash(girl['password']) girl['birthday'] = self.InvalidDateTime girl['nickname'] = girlname girl['sex'] = '2' if self.is_user_exist(boyname): return -1 if self.is_user_exist(girlname): return -2 self.uda.insert_user_info(boy) self.uda.insert_user_info(girl) return 0
def make_uuid(self, firmware_name): """Deterministically generatse a uuid from this connection. Used by firmware drivers if the firmware doesn't specify one through other means.""" namespace = uuid.uuid5(uuid.NAMESPACE_DNS, socket.getfqdn()) return uuid.uuid5(namespace, firmware_name+"@"+self.__port)
def hw_connect_event(self, device): namespace = uuid.uuid5(uuid.NAMESPACE_DNS, socket.getfqdn()) device.uuid = uuid.uuid5(namespace, device.hw_info+"."+device.driver) printer = None self.__device_lock.acquire() try: if self.printers.has_key(device.uuid): printer = self.printers[device.uuid] else: driver_cpath = os.path.join("hardware/drivers", device.driver, "config.json") with open(driver_cpath, "r") as config_file: config = json.load(config_file) printer_cpath = os.path.join("settings", str(device.uuid) + ".json") with open(printer_cpath, "w") as config_file: json.dump(config, config_file) printer = VoxelpressPrinter(device.uuid) except IOError: print "Config file could not be opened." print "Either:", driver_cpath, "or", printer_cpath except ValueError: print "Config file contained invalid json..." print "Probably", driver_capth if printer: self.devices[device.hw_path] = device print "New device attached:" print " driver:", device.driver print " hwid:", device.hw_path print " uuid:", device.uuid printer.on_connect(device) self.__device_lock.release()
def small_uuid(name=None): if name is None: uuid = _uu.uuid4() elif "http" not in name.lower(): uuid = _uu.uuid5(_uu.NAMESPACE_DNS, name) else: uuid = _uu.uuid5(_uu.NAMESPACE_URL, name) return SmallUUID(int=uuid.int)
def _set_system_uuid(self): # start by creating the liota namespace, this is a globally unique uuid, and exactly the same for any instance of liota self.liotaNamespace = uuid.uuid5(uuid.NAMESPACE_URL, 'https://github.com/vmware/liota') log.info(str('liota namespace uuid: ' + str(self.liotaNamespace))) # we create a system uuid for the physical system on which this instance is running # we hash the interface name with the mac address in getMacAddrIfaceHash to avoid collision of # mac addresses across potential different physical interfaces in the IoT space systemUUID.__UUID = uuid.uuid5(self.liotaNamespace, self._getMacAddrIfaceHash()) log.info('system UUID: ' + str(systemUUID.__UUID))
def _set_groups(self, proj_dic): # each group needs to have own filter with UUID proj_dic['source_groups'] = {} proj_dic['include_groups'] = {} for key in SOURCE_KEYS: for group_name, files in proj_dic[key].items(): proj_dic['source_groups'][group_name] = str(uuid.uuid5(uuid.NAMESPACE_URL, group_name)).upper() for k,v in proj_dic['include_files'].items(): proj_dic['include_groups'][k] = str(uuid.uuid5(uuid.NAMESPACE_URL, k)).upper()
def get_or_create_allocation_source(api_allocation): try: source_name = "%s" % (api_allocation['project'], ) source_id = api_allocation['id'] compute_allowed = int(api_allocation['computeAllocated']) except (TypeError, KeyError, ValueError): raise TASAPIException( "Malformed API Allocation - Missing keys in dict: %s" % api_allocation ) payload = { 'allocation_source_name': source_name, 'compute_allowed': compute_allowed, 'start_date': api_allocation['start'], 'end_date': api_allocation['end'] } try: created_event_key = 'sn=%s,si=%s,ev=%s,dc=jetstream,dc=atmosphere' % ( source_name, source_id, 'allocation_source_created_or_renewed' ) created_event_uuid = uuid.uuid5( uuid.NAMESPACE_X500, str(created_event_key) ) created_event = EventTable.objects.create( name='allocation_source_created_or_renewed', uuid=created_event_uuid, payload=payload ) assert isinstance(created_event, EventTable) except IntegrityError: # This is totally fine. No really. This should fail if it already exists and we should ignore it. pass try: compute_event_key = 'ca=%s,sn=%s,si=%s,ev=%s,dc=jetstream,dc=atmosphere' % ( compute_allowed, source_name, source_id, 'allocation_source_compute_allowed_changed' ) compute_event_uuid = uuid.uuid5( uuid.NAMESPACE_X500, str(compute_event_key) ) compute_allowed_event = EventTable.objects.create( name='allocation_source_compute_allowed_changed', uuid=compute_event_uuid, payload=payload ) assert isinstance(compute_allowed_event, EventTable) except IntegrityError: # This is totally fine. No really. This should fail if it already exists and we should ignore it. pass source = AllocationSource.objects.get(name__iexact=source_name) return source
def get_id(self, entry): if 'entryUUID' in entry: return entry['entryUUID.0'] if 'uidNumber' in entry: return uuid.uuid5(LDAP_USER_UUID, entry['uidNumber.0']) if 'gidNumber' in entry: return uuid.uuid5(LDAP_GROUP_UUID, entry['gidNumber.0']) return uuid.uuid4()
def get_uuid(s='default', bit=0): try: bit = int(bit) except ValueError: bit = 0 if bit: return ''.join(random.sample(uuid.uuid5(uuid.uuid4(), s).get_hex(), bit)) else: return uuid.uuid5(uuid.uuid4(), s).get_hex()
def __init__(self, module, name, alias, full_path=None, is_our=True): # from base.module import some_function as sf self.what = name # e.g. some_function self.alias = alias # e.g. sf self.who = full_path # name of current module self.which_module = module # e.g base.module self.is_our = is_our # defines whether this code is from project or from 3rd party resources self._extra = (self.which_module + "." + self.what).lstrip('.') if self.who is not None: generated_id = uuid.uuid5(uuid.NAMESPACE_OID, self.who).get_hex() else: generated_id = uuid.uuid5(uuid.NAMESPACE_OID, self._extra).get_hex() self.id = generated_id
def generate_session_id(context): """ Generates a new session id. """ membership = getToolByName(context, 'portal_membership') # anonymous users get random uuids if membership.isAnonymousUser(): return uuid.uuid4() # logged in users get ids which are predictable for each plone site namespace = uuid.uuid5(root_namespace, str(getSite().id)) return uuid.uuid5(namespace, str(membership.getAuthenticatedMember().getId()))
def uuid(self, name = None, pad_length = 22): """ Generate and return a UUID. If the name parameter is provided, set the namespace to the provided name and generate a UUID. """ if name is None: uuid = _uu.uuid4() elif 'http' not in name.lower(): uuid = _uu.uuid5(_uu.NAMESPACE_DNS, name) else: uuid = _uu.uuid5(_uu.NAMESPACE_URL, name) return self.encode(uuid, pad_length)
def __mapIDToBE(self, xmlelem): """Maps a Redmine id to BE uuid""" if self.element.tag=="journal": self.__commentType=1 self.redmine_id=int(self.element.attrib['id']) self.parentParser.journalids[self.redmine_id]=self ret=uuid.uuid5(self.uuid_namespace, str(self.redmine_id)) elif self.element.tag=="issue": self.__commentType=0 self.redmine_id=int(self.element.find("id").text) ret=uuid.uuid5(self.parentIssue.uuid_namespace, str(self.redmine_id)) else: raise Exception("Unknown comment type '"+self.element.tag+"'") return ret
def test_01_update(self, side_effect): uuid1 = '601d3b48-a44f-40f3-aa7a-35da4a10a099' uuid2 = '0be7d422-1635-11e7-a83f-68f728db19d3' hashed_uuid1 = str(uuid.uuid5(uuid.UUID(uuid1), USER_NAME)) hashed_uuid2 = str(uuid.uuid5(uuid.UUID(uuid2), USER_NAME)) #We add an existin connection to trigger an Update method self.settings.AddConnection( dbus.Dictionary({ 'connection': dbus.Dictionary({ 'id': 'test connection', 'uuid': hashed_uuid1, 'type': '802-11-wireless'}, signature='sv'), '802-11-wireless': dbus.Dictionary({ 'ssid': dbus.ByteArray('The_SSID'.encode('UTF-8'))}, signature='sv') }) ) ca = NetworkManagerConfigAdapter() ca.bootstrap(self.TEST_UID) ca.update(self.TEST_UID, self.TEST_DATA) path1 = self.settings.GetConnectionByUuid(hashed_uuid1) path2 = self.settings.GetConnectionByUuid(hashed_uuid2) conns = self.settings.ListConnections() self.assertEqual(len(conns), 2) self.assertIn(path1, conns) self.assertIn(path2, conns) conn1 = dbus.Interface(self.dbus_con.get_object(MANAGER_IFACE, path1), 'org.freedesktop.NetworkManager.Settings.Connection') conn2 = dbus.Interface(self.dbus_con.get_object(MANAGER_IFACE, path2), 'org.freedesktop.NetworkManager.Settings.Connection') conn1_sett = conn1.GetSettings() conn2_sett = conn2.GetSettings() self.assertEqual(conn1_sett['connection']['uuid'], hashed_uuid1) self.assertEqual(conn2_sett['connection']['uuid'], hashed_uuid2) self.assertEqual(conn1_sett['connection']['permissions'], ['user:%s:' % USER_NAME,]) self.assertEqual(conn2_sett['connection']['permissions'], ['user:%s:' % USER_NAME,]) self.assertEqual(conn1_sett['user']['data']['org.fleet-commander.connection'], 'true') self.assertEqual(conn1_sett['user']['data']['org.fleet-commander.connection.uuid'], uuid1) self.assertEqual(conn2_sett['user']['data']['org.fleet-commander.connection'], 'true') self.assertEqual(conn2_sett['user']['data']['org.fleet-commander.connection.uuid'], uuid2)
def test_uuid5(self): equal = self.assertEqual # Test some known version-5 UUIDs. for u, v in [ (uuid.uuid5(uuid.NAMESPACE_DNS, "python.org"), "886313e1-3b8a-5372-9b90-0c9aee199e5d"), (uuid.uuid5(uuid.NAMESPACE_URL, "http://python.org/"), "4c565f0d-3f5a-5890-b41b-20cf47701c5e"), (uuid.uuid5(uuid.NAMESPACE_OID, "1.3.6.1"), "1447fa61-5277-5fef-a9b3-fbc6e44f4af3"), (uuid.uuid5(uuid.NAMESPACE_X500, "c=ca"), "cc957dd1-a972-5349-98cd-874190002798"), ]: equal(u.variant, uuid.RFC_4122) equal(u.version, 5) equal(u, uuid.UUID(v)) equal(str(u), v)
def uuid(name=None): """ Generate and return a UUID. If the name parameter is provided, set the namespace to the provided name and generate a UUID. """ # If no name is given, generate a random UUID. if name is None: uuid = _uu.uuid4() elif not "http" in name: uuid = _uu.uuid5(_uu.NAMESPACE_DNS, name) else: uuid = _uu.uuid5(_uu.NAMESPACE_URL, name) return encode(uuid)
def get_device_id(self, network): """Return a unique DHCP device ID for this host on the network.""" # There could be more than one dhcp server per network, so create # a device id that combines host and network ids host_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, socket.gethostname()) return 'dhcp%s-%s' % (host_uuid, network.id)
def test_resource_non_ascii_chars(self): # IDs with non-ASCII characters will be UUID5 namespaced value = u'ß' * 32 if six.PY2: value = value.encode('utf-8') expected_id = uuid.uuid5(common_utils.RESOURCE_ID_NAMESPACE, value).hex self.assertEqual(expected_id, common_utils.resource_uuid(value))
def mk_uuid(namespace, seed): # NB: we want to have the GUID strings be repeatable so, generate them # from a repeatable seed from uuid import uuid5, UUID guid = uuid5(UUID(namespace), seed) return "%s" % str(guid).upper() # MSVS uses upper-case strings for GUIDs
def amend_package(self, package): if len(package['name']) > 100: package['name'] = package['name'][:100] if not package['groups']: package['groups'] = [] #copy autor to author quelle = {} if 'contacts' in package['extras']: quelle = filter(lambda x: x['role'] == 'autor', package['extras']['contacts'])[0] if not package['author'] and quelle: package['author'] = quelle['name'] if not package['author_email']: if 'email' in quelle: package['author_email'] = quelle['email'] if not "spatial-text" in package["extras"].keys(): package["extras"]["spatial-text"] = 'Bayern 09' for r in package['resources']: r['format'] = r['format'].upper() #generate id based on OID namespace and package name, this makes sure, #that packages with the same name get the same id package['id'] = str(uuid.uuid5(uuid.NAMESPACE_OID, str(package['name'])))
def get_dhcp_agent_device_id(network_id, host): # Split host so as to always use only the hostname and # not the domain name. This will guarantee consistency # whether a local hostname or an fqdn is passed in. local_hostname = host.split('.')[0] host_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, str(local_hostname)) return 'dhcp%s-%s' % (host_uuid, network_id)
def GUID(namespace, solution, data): """ Generates GUID in given namespace, for given solution (bkl project), with given data (typically, target ID). """ g = uuid.uuid5(namespace, '%s/%s' % (str(solution), str(data))) return str(g).upper()
def _build_port_dict(self): return {'admin_state_up': True, 'device_owner': 'neutron:{0}'.format( constants.LOADBALANCER), 'device_id': str(uuid.uuid5(uuid.NAMESPACE_DNS, str(self.conf.host))), portbindings.HOST_ID: self.conf.host}
def post(self, template_variables = {}): template_variables = {} if(not "avatar" in self.request.files): template_variables["errors"] = {} template_variables["errors"]["invalid_avatar"] = [u"请先选择要上传的头像"] self.get(template_variables) return user_info = self.current_user user_id = user_info["uid"] avatar_name = "%s" % uuid.uuid5(uuid.NAMESPACE_DNS, str(user_id)) avatar_raw = self.request.files["avatar"][0]["body"] avatar_buffer = StringIO.StringIO(avatar_raw) avatar = Image.open(avatar_buffer) # crop avatar if it's not square avatar_w, avatar_h = avatar.size avatar_border = avatar_w if avatar_w < avatar_h else avatar_h avatar_crop_region = (0, 0, avatar_border, avatar_border) avatar = avatar.crop(avatar_crop_region) avatar_96x96 = avatar.resize((96, 96), Image.ANTIALIAS) avatar_48x48 = avatar.resize((48, 48), Image.ANTIALIAS) avatar_32x32 = avatar.resize((32, 32), Image.ANTIALIAS) usr_home = os.path.expanduser('~') print usr_home avatar_96x96.save(usr_home+"/www/tuila/static/avatar/b_%s.png" % avatar_name, "PNG") avatar_48x48.save(usr_home+"/www/tuila/static/avatar/m_%s.png" % avatar_name, "PNG") avatar_32x32.save(usr_home+"/www/tuila/static/avatar/s_%s.png" % avatar_name, "PNG") result = self.user_model.set_user_avatar_by_uid(user_id, "%s.png" % avatar_name) template_variables["success_message"] = [u"用户头像更新成功"] # update `updated` updated = self.user_model.set_user_base_info_by_uid(user_id, {"updated": time.strftime('%Y-%m-%d %H:%M:%S')}) self.get(template_variables)
def AddServer(self): root = '/com/intel/dLeynaServer/server/%d' % (self.next_server_id,) # Pre-process the items tree anchoring paths to the new root object items = copy.deepcopy(ITEMS) for item in items: item['Path'] = dbus.ObjectPath(item['Path'].replace('{root}', root)) item['Parent'] = dbus.ObjectPath(item['Parent'].replace('{root}', root)) for item in items: path = item['Path'] self.AddObject(path, 'org.gnome.UPnP.MediaObject2', {}, []) obj = get_object(path) obj.items = items if path == root: item['FriendlyName'] = 'Mock Server <#{0}>'.format(self.next_server_id) item['UDN'] = str(uuid.uuid5(uuid.UUID('9123ef5c-f083-11e2-8000-000000000000'), str(self.next_server_id))) obj.AddTemplate("dleynamediadevice.py", filter_properties (item, MEDIA_DEVICE_PROPERTIES)) obj.AddTemplate("dleynamediaobject.py", filter_properties(item, MEDIA_OBJECT2_PROPERTIES)) obj.AddTemplate("dleynamediacontainer.py", filter_properties(item, MEDIA_CONTAINER2_PROPERTIES)) self.servers.append(root) self.EmitSignal(MAIN_IFACE, 'FoundServer', 'o', [root]) self.next_server_id += 1 return path
def doParseSubPage(self, subPageUrl): if not Login.login(): assert "Fail to login" page = Page() try: page.url = subPageUrl page.uniqueID = str(uuid.uuid5(uuid.NAMESPACE_URL, page.url)) subPage = Utils.urlGet(subPageUrl) # imgList = Utils.getAttrList(subPage, 'img[src*="imgroom"]', CrawlConstants.ATTR_SRC) # page.imgSrcList.extend(imgList) # imgList = Utils.getAttrList(subPage, 'img[src*="imgur"]', CrawlConstants.ATTR_SRC) # page.imgSrcList.extend(imgList) imgList = Utils.getAttrList(subPage, CrawlConstants.SUB_PAGE_IMG_CSS, CrawlConstants.ATTR_SRC) if imgList is None or len(imgList) == 0: print("Cannot find image in " + subPageUrl) return page page.imgSrcList.extend(list(set(imgList))) except Exception as e: print("Exception: " + str(e)) wx.CallAfter(Utils.publish, "parseSubPage", page)
def yaml_prep_water(scene_dir, original_yml): """ Prepare individual wofs directory containing L8/S2/S1 cog water products. """ # scene_name = scene_dir.split('/')[-2][:26] scene_name = scene_dir.split('/')[-2] print("Preparing scene {}".format(scene_name)) print("Scene path {}".format(scene_dir)) # find all cog prods prod_paths = glob.glob(scene_dir + '*water*.tif') # print ( 'paths: {}'.format(prod_paths) ) # for i in prod_paths: print ( i ) # date time assumed eqv for start and stop - this isn't true and could be # pulled from .xml file (or scene dir) not done yet for sake of progression t0 = parse( str( datetime.strptime(original_yml['extent']['center_dt'], '%Y-%m-%d %H:%M:%S'))) # print ( t0 ) t1 = t0 # print ( t1 ) # name image product images = { prod_path.split('_')[-1][:9]: { 'path': str(prod_path.split('/')[-1]) } for prod_path in prod_paths } print(images) # trusting bands coaligned, use one to generate spatial bounds for all projection, extent = get_geometry( os.path.join(str(scene_dir), images['watermask']['path'])) # extent = print(projection, extent) new_id = str(uuid.uuid5(uuid.NAMESPACE_URL, f"{scene_name}_water")) return { 'id': new_id, 'processing_level': original_yml['processing_level'], 'product_type': "mlwater", 'creation_dt': str(datetime.today().strftime('%Y-%m-%d %H:%M:%S')), 'platform': { 'code': original_yml['platform']['code'] }, 'instrument': { 'name': original_yml['instrument']['name'] }, 'extent': { 'coord': original_yml['extent']['coord'], 'from_dt': str(t0), 'to_dt': str(t1), 'center_dt': str(t0 + (t1 - t0) / 2) }, 'format': { 'name': 'GeoTiff' }, 'grid_spatial': { 'projection': projection }, 'image': { 'bands': images }, 'lineage': { 'source_datasets': original_yml['lineage']['source_datasets'], } }
def uuidForLayer(layer): return str(uuid.uuid5(uuid.NAMESPACE_DNS, layer.source()))
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # Each CLSID is a hash of the current version string salted with an # arbitrary GUID. This ensures that the newly installed COM classes will # be used during/after upgrade even if there are old instances running # already. # The IDs are not random to avoid rebuilding host when it's not # necessary. import uuid import sys if len(sys.argv) != 4: print """Expecting 3 args: <daemon_controller_guid> <rdp_desktop_session_guid> <version>""" sys.exit(1) daemon_controller_guid = sys.argv[1] rdp_desktop_session_guid = sys.argv[2] version_full = sys.argv[3] # Output a GN list of 2 strings. print '["' + \ str(uuid.uuid5(uuid.UUID(daemon_controller_guid), version_full)) + '", ' print '"' + \ str(uuid.uuid5(uuid.UUID(rdp_desktop_session_guid), version_full)) + '"]'
def _get_set_type_id(cls, basetype_id): return uuid.uuid5(s_types.TYPE_ID_NAMESPACE, 'set-of::' + str(basetype_id))
def make(self, key): for word in ('Habenula', 'Hippocampus', 'Hypothalamus', 'Hypophysis'): self.insert1( dict(key, word=word, item_id=uuid.uuid5(key['topic_id'], word)))
def get_url_cache_path(cls, url): return cls.get_cache_path(str(uuid5(NAMESPACE_URL, url)))
} # Load data from stdin (generated by enry) enry_data = json.load(sys.stdin) # Now add in UASTs for supported_lang in PARSER_MAP.keys(): if supported_lang in enry_data: for src_file in enry_data[supported_lang]: # Start timing parse parse_start_time = datetime.datetime.now() # Generate a UUID to use uid = str( uuid.uuid5( uuid.NAMESPACE_URL, "https://{}/{}?at={}&branch={}".format( sys.argv[1], src_file, sys.argv[3], sys.argv[2]))) # Args for popen pargs = [ "bblfsh-cli", "-a", PARSER_MAP[supported_lang][0], "-l", PARSER_MAP[supported_lang][1], '-m', 'native', "--v2", './{}'.format(src_file) ] print(' '.join(pargs), file=sys.stderr) # Invoke bblfsh-cli (and handle possible failures) UAST = {} GOOD_PARSE = False info = {}
def handle(self): if not self.user.student_id: return self.reply_text(self.get_message('bind_account')) currentTime = datetime.datetime.now().timestamp() if self.is_event('CLICK'): event_key = self.view.event_keys['book_header'] event_key += self.input['EventKey'][len(event_key):] if self.is_event_click(event_key): act_id = self.input['EventKey'][ len(self.view.event_keys['book_header']):] activity = self.get_activity(act_id) if not activity: return self.reply_text('对不起,没有该项活动') if currentTime < activity.book_start.timestamp(): return self.reply_text('对不起,还未开放抢票') if currentTime > activity.book_end.timestamp(): return self.reply_text('对不起,抢票已经截止') #check if the student has book a ticket: if Ticket.objects.filter(student_id=self.user.student_id, activity=activity, status=Ticket.STATUS_VALID): return self.reply_text('一个人只能抢一张票哦 ^口..口^') if Ticket.objects.filter(student_id=self.user.student_id, activity=activity, status=Ticket.STATUS_USED): return self.reply_text('您已经抢过该活动的票且使用过') #lock!lock!lock! ticket = self.book_ticket(act_id) activity = self.get_activity(act_id) if ticket: return self.reply_single_news({ 'Title': activity.name, 'Description': '抢票成功', 'Url': ticket, 'PicUrl': activity.pic_url, }) else: return self.reply_text('没有多的票了!请自行尝试劝退抢到票的朋友们~') else: return False elif self.is_text('抢票'): query = self.input['Content'][3:] activity = Activity.objects.filter(key=query).first() if not activity: activity = Activity.objects.filter(name=query).first() if not activity: return self.reply_text('对不起,没有该项活动') if currentTime < activity.book_start.timestamp(): return self.reply_text('对不起,还未开放抢票') if currentTime > activity.book_end.timestamp(): return self.reply_text('对不起,抢票已经截止') if Ticket.objects.filter(student_id=self.user.student_id, activity=activity, status=Ticket.STATUS_VALID): return self.reply_text('一个人只能抢一张票哦 ^口..口^') if Ticket.objects.filter(student_id=self.user.student_id, activity=activity, status=Ticket.STATUS_USED): return self.reply_text('您已经抢过该活动的票且使用过') if activity.remain_tickets == 0: return self.reply_text('没有多的票了!请自行尝试劝退抢到票的朋友们~') unique_id = uuid.uuid5( uuid.NAMESPACE_DNS, self.user.student_id + activity.name + str(currentTime)) Ticket.objects.create(student_id=self.user.student_id, unique_id=unique_id, activity=activity, status=Ticket.STATUS_VALID) activity.remain_tickets -= 1 activity.save() ticket = self.url_ticket(unique_id) return self.reply_single_news({ 'Title': activity.name, 'Description': '抢票成功', 'Url': ticket, 'PicUrl': activity.pic_url, })
def get_client_id(request): parts = [request.META.get(key, '') for key in FINGERPRINT_PARTS] return uuid5(UUID_NAMESPACE, '_'.join(parts))
def uuid5(self, namespace, name, pad_to_length=None): if pad_to_length is None: pad_to_length = self.uuid_pad_len return num_to_string( _uuid.uuid5(namespace, name).int, self.alphabet, self.alphabet_len, pad_to_length)
async def ids(params: RequestParams) -> IDs: """Returns yours new IDs.""" try: cnt = await get_counters() except ConnectionError: logger.warning('Reconnecting to counter service...') await asyncio.sleep(0.01) cnt = await get_counters() counters = Counters( counter=cnt.counter, daily=cnt.daily, hourly=cnt.hourly, minutely=cnt.minutely, ) uuids = UUIDs( uuid1=uuid.uuid1(params.uuid_node if params.uuid_node else ( randbits(48) | (1 << 40))), uuid3=uuid.uuid3( params.uuid_namespace, params.uuid_name if params.uuid_name else token_urlsafe(TOKENS_SIZE)), uuid4=uuid.uuid4(), uuid5=uuid.uuid5( params.uuid_namespace, params.uuid_name if params.uuid_name else token_urlsafe(TOKENS_SIZE)), ) now = datetime.utcnow() date = now.year * 10000 + now.month * 100 + now.day time = now.hour * 10000 + now.minute * 100 + now.second time = Time( iso=now.isoformat(), timestamp=now.timestamp(), date=date, time=time, datetime=date * 1000000 + time, year=now.year, month=now.month, day=now.day, hour=now.hour, minute=now.minute, second=now.second, microsecond=now.microsecond, ) rnd = Random( urlsafe=token_urlsafe(TOKENS_SIZE), hex=token_hex(TOKENS_SIZE), integer=randbelow(params.random_max_int), bits=randbits(RANDBITS_SIZE), ) return IDs( counters=counters, uuid=uuids, time=time, random=rnd, )
def cache_key_mangler(key): """Construct an opaque cache key.""" if six.PY2: key = key.encode('utf-8') return uuid.uuid5(CACHE_NAMESPACE, key).hex
import datetime import random import string from urllib import request from uuid import uuid5, NAMESPACE_X500 UUID = lambda x: str( uuid5( NAMESPACE_X500, str(x) + str(datetime.datetime.now()) + ''.join( random.sample(string.ascii_letters + string.digits, 8)))) def get_price_min(code): # 最低价 url_base = "http://qt.gtimg.cn/q=" if code[0] == '6': url = url_base + 'sh' + code else: url = url_base + 'sz' + code req = request.Request(url) response = request.urlopen(req) page = response.read().decode('gbk') data = page.split('~') return data[48] def getprice(code): # 股票价格 url_base = "http://qt.gtimg.cn/q=s_" if code[0] == '6': url = url_base + 'sh' + code else:
def main(options): if hasattr(options, 'vendor_domain') and options.vendor_domain: if len(options.vendor_domain.split('.')) < 2: LOG.critical('"{0}" is not a valid domain name.'.format( options.vendor_domain)) return 1 options.vendor_id = str( uuid.uuid5(uuid.NAMESPACE_DNS, options.vendor_domain)) vendorId = uuid.UUID(options.vendor_id) if hasattr(options, 'model_name') and options.model_name: options.class_id = str(uuid.uuid5(vendorId, options.model_name)) classId = uuid.UUID(options.class_id) cert_required = True certFile = None if options.certificate: cert_required = False certFile = options.certificate.name elif hasattr(options, 'force') and options.force: cert_required = True else: try: options.certificate = open(defaults.certificate, 'rb') options.private_key = open(defaults.certificateKey, 'rb') cert_required = False LOG.warning('{} and {} already exist, not overwriting.'.format( defaults.certificate, defaults.certificateKey)) except: cert_required = True if cert_required: cmd = [ 'cert', 'create', '-o', defaults.certificate, '-K', defaults.certificateKey ] country = '' state = '' locality = '' organization = '' commonName = '' if hasattr(options, 'vendor_domain') and options.vendor_domain: commonName = options.vendor_domain validity = defaults.certificateDuration if not options.quiet: print( 'A certificate has not been provided to init, and no certificate is provided in {cert}' .format(cert=defaults.certificate)) print( 'Init will now guide you through the creation of a certificate.' ) print() print( 'This process will create a self-signed certificate, which is not suitable for production use.' ) print() print( 'In the terminology used by certificates, the "subject" means the holder of the private key that matches a certificate.' ) country = input('In which country is the subject located? ') state = input( 'In which state or province is the subject located? ') locality = input( 'In which city or region is the subject located? ') organization = input( 'What is the name of the subject organization? ') commonName = '' if hasattr(options, 'vendor_domain') and options.vendor_domain: commonName = input( 'What is the common name of the subject organization? [{}]' .format(options.vendor_domain)) or options.vendor_domain else: commonName = input( 'What is the common name of the subject organization? ') validity = input( 'How long (in days) should the certificate be valid? [{}]'. format(defaults.certificateDuration )) or defaults.certificateDuration try: os.makedirs(defaults.certificatePath) except os.error: # It is okay if the directory already exists. If something else went wrong, we'll find out when the # create occurs pass cmd = [ 'cert', 'create', '-o', defaults.certificate, '-K', defaults.certificateKey, '-V', str(validity) ] if country: cmd += ['-C', country] if state: cmd += ['-S', state] if locality: cmd += ['-L', locality] if organization: cmd += ['-O', organization] if commonName: cmd += ['-U', commonName] cert_opts = MainArgumentParser().parse_args(cmd).options rc = cert.main(cert_opts) if rc: sys.exit(1) options.certificate = open(defaults.certificate, 'rb') LOG.info('Certificate written to {}'.format(defaults.certificate)) options.private_key = open(defaults.certificateKey, 'rb') LOG.info('Private key written to {}'.format(defaults.certificateKey)) # Write the settings settings = { 'default-certificates': [{ 'file': options.certificate.name }], 'signing-script': options.signing_script, 'private-key': options.private_key.name, 'classId': str(classId), 'vendorId': str(vendorId), 'vendorDomain': options.vendor_domain, 'modelName': options.model_name } with open(defaults.config, 'w') as f: f.write(json.dumps(settings, sort_keys=True, indent=4)) LOG.info('Default settings written to {}'.format(defaults.config)) try: writeUpdateDefaults(options) except ValueError as e: LOG.critical('Error setting defaults: {}'.format(e.message)) return 1 cloud_settings = {} if hasattr(options, 'server_address') and options.server_address: cloud_settings['host'] = options.server_address if hasattr(options, 'api_key') and options.api_key: cloud_settings['api_key'] = options.api_key if cloud_settings: with open(defaults.cloud_config, 'w') as f: f.write(json.dumps(cloud_settings, sort_keys=True, indent=4)) LOG.info('Cloud settings written to {}'.format( defaults.cloud_config)) sys.exit(0)
def to_uuid(string): return str(uuid.uuid5(UUID_NAMESPACE_ANSIBLE, str(string)))
def uuid(self): return uuid5(NAMESPACE_URL, self.api.base_url)
def post_battle(i, results, s_flag, t_flag, m_flag, sendgears, debug, ismonitor=False): '''Uploads battle #i from the provided results dictionary.''' ############# ## PAYLOAD ## ############# payload = {'agent': 'splatnet2statink', 'agent_version': A_VERSION, 'automated': 'yes'} agent_variables = {'upload_mode': "Monitoring" if ismonitor else "Manual"} payload["agent_variables"] = agent_variables bn = results[i]["battle_number"] ver4 = True if "version" in results[i] and results[i]["version"] == 4 else False principal_id = results[i]["player_result"]["player"]["principal_id"] namespace = uuid.UUID(u'{73cf052a-fd0b-11e7-a5ee-001b21a098c2}') name = "{}@{}".format(bn, principal_id) payload["uuid"] = str(uuid.uuid5(namespace, name)) ################## ## LOBBY & MODE ## ################## lobby = results[i]["game_mode"]["key"] if lobby == "regular": # turf war payload["lobby"] = "standard" payload["mode"] = "regular" elif lobby == "gachi": # ranked solo payload["lobby"] = "standard" payload["mode"] = "gachi" elif lobby == "league_pair": # league pair payload["lobby"] = "squad_2" payload["mode"] = "gachi" elif lobby == "league_team": # league team payload["lobby"] = "squad_4" payload["mode"] = "gachi" elif lobby == "private": # private battle payload["lobby"] = "private" payload["mode"] = "private" elif lobby == "fes_solo": # splatfest pro / solo payload["lobby"] = "fest_pro" if ver4 else "standard" payload["mode"] = "fest" # ["fes_mode"]["key"] == "fes.result.challenge" elif lobby == "fes_team": # splatfest normal / team payload["lobby"] = "fest_normal" if ver4 else "squad_4" payload["mode"] = "fest" # ["fes_mode"]["key"] == "fes.result.regular" ########## ## RULE ## ########## rule = results[i]["rule"]["key"] if rule == "turf_war": payload["rule"] = "nawabari" elif rule == "splat_zones": payload["rule"] = "area" elif rule == "tower_control": payload["rule"] = "yagura" elif rule == "rainmaker": payload["rule"] = "hoko" elif rule == "clam_blitz": payload["rule"] = "asari" ########### ## STAGE ## ########### stage = int(results[i]["stage"]["id"]) payload["stage"] = "#{}".format(stage) ############ ## WEAPON ## ############ weapon = int(results[i]["player_result"]["player"]["weapon"]["id"]) payload["weapon"] = "#{}".format(weapon) ############ ## RESULT ## ############ result = results[i]["my_team_result"]["key"] # victory, defeat if result == "victory": payload["result"] = "win" elif result == "defeat": payload["result"] = "lose" ########################## ## TEAM PERCENTS/COUNTS ## ########################## try: my_percent = results[i]["my_team_percentage"] their_percent = results[i]["other_team_percentage"] except KeyError: pass # don't need to handle - won't be put into the payload unless relevant try: my_count = results[i]["my_team_count"] their_count = results[i]["other_team_count"] except: pass mode = results[i]["type"] # regular, gachi, league, fes if mode == "regular" or mode == "fes": payload["my_team_percent"] = my_percent payload["his_team_percent"] = their_percent elif mode == "gachi" or mode == "league": payload["my_team_count"] = my_count payload["his_team_count"] = their_count if my_count == 100 or their_count == 100: payload["knock_out"] = "yes" else: payload["knock_out"] = "no" ################ ## TURF INKED ## ################ turfinked = results[i]["player_result"]["game_paint_point"] # without bonus if rule == "turf_war": if result == "victory": payload["my_point"] = turfinked + 1000 # win bonus else: payload["my_point"] = turfinked else: payload["my_point"] = turfinked ################# ## KILLS, ETC. ## ################# kill = results[i]["player_result"]["kill_count"] k_or_a = results[i]["player_result"]["kill_count"] + results[i]["player_result"]["assist_count"] special = results[i]["player_result"]["special_count"] death = results[i]["player_result"]["death_count"] payload["kill"] = kill payload["kill_or_assist"] = k_or_a payload["special"] = special payload["death"] = death ########### ## LEVEL ## ########### level_before = results[i]["player_result"]["player"]["player_rank"] level_after = results[i]["player_rank"] star_rank = results[i]["star_rank"] payload["level"] = level_before payload["level_after"] = level_after payload["star_rank"] = star_rank ########## ## RANK ## ########## try: # udemae not present in all modes rank_after = results[i]["udemae"]["name"].lower() # non-null after playing first solo battle rank_before = results[i]["player_result"]["player"]["udemae"]["name"].lower() rank_exp_after = results[i]["udemae"]["s_plus_number"] rank_exp = results[i]["player_result"]["player"]["udemae"]["s_plus_number"] except: # based on in-game, not app scoreboard, which displays --- (null rank) as separate than C- rank_after, rank_before, rank_exp_after, rank_exp = None, None, None, None # e.g. private battle where a player has never played ranked before if rule != "turf_war": # only upload if ranked payload["rank_after"] = rank_after payload["rank"] = rank_before payload["rank_exp_after"] = rank_exp_after payload["rank_exp"] = rank_exp try: if results[i]["udemae"]["is_x"]: # == true. results[i]["udemae"]["number"] should be 128 payload["x_power_after"] = results[i]["x_power"] # can be null if not played placement games if mode == "gachi": payload["estimate_x_power"] = results[i]["estimate_x_power"] # team power, approx payload["worldwide_rank"] = results[i]["rank"] # goes below 500, not sure how low (doesn't exist in league) # top_500 from crown_players set in scoreboard method except: pass ##################### ## START/END TIMES ## ##################### try: elapsed_time = results[i]["elapsed_time"] # apparently only a thing in ranked except KeyError: elapsed_time = 180 # turf war - 3 minutes in seconds payload["start_at"] = results[i]["start_time"] payload["end_at"] = results[i]["start_time"] + elapsed_time payload["duration"] = elapsed_time ################### ## SPLATNET DATA ## ################### payload["private_note"] = "Battle #{}".format(bn) payload["splatnet_number"] = bn if mode == "league": payload["my_team_id"] = results[i]["tag_id"] payload["league_point"] = results[i]["league_point"] payload["my_team_estimate_league_point"] = results[i]["my_estimate_league_point"] payload["his_team_estimate_league_point"] = results[i]["other_estimate_league_point"] if mode == "gachi": payload["estimate_gachi_power"] = results[i]["estimate_gachi_power"] gender = results[i]["player_result"]["player"]["player_type"]["style"] payload["gender"] = gender species = results[i]["player_result"]["player"]["player_type"]["species"][:-1] payload["species"] = species ############################ ## SPLATFEST TITLES/POWER ## ############################ https://github.com/fetus-hina/stat.ink/blob/master/doc/api-2/post-battle.md#fest_title-fest_title_after if mode == "fes": title_before = results[i]["player_result"]["player"]["fes_grade"]["rank"] title_after = results[i]["fes_grade"]["rank"] fest_exp_after = results[i]["fes_point"] # present in pro, 0 in normal payload["fest_power"] = results[i]["fes_power"] # universal system pre-ver.4. now present in both pro & normal but hidden in normal payload["my_team_estimate_fest_power"] = results[i]["my_estimate_fes_power"] payload["his_team_estimate_fest_power"] = results[i]["other_estimate_fes_power"] payload["my_team_fest_theme"] = results[i]["my_team_fes_theme"]["name"] payload["his_team_fest_theme"] = results[i]["other_team_fes_theme"]["name"] payload["fest_title"] = translate_fest_rank[title_before] payload["fest_title_after"] = translate_fest_rank[title_after] payload["fest_exp_after"] = fest_exp_after points_gained = 0 if ver4: # in ver.4, everything got multiplied x10... multiplier = 10 else: multiplier = 1 # TURF INKED EXP if results[i]["player_result"]["game_paint_point"] >= 200: points_gained += 1 * multiplier if results[i]["player_result"]["game_paint_point"] >= 400: points_gained += 1 * multiplier # +20 total (post-ver.4) # WIN BONUS EXP if result == "victory": # https://github.com/frozenpandaman/splatnet2statink/issues/52#issuecomment-414609225 if results[i]["other_estimate_fes_power"] < 1400: points_gained += 3 * multiplier elif 1400 <= results[i]["other_estimate_fes_power"] < 1700: points_gained += 4 * multiplier elif 1700 <= results[i]["other_estimate_fes_power"] < 1800: points_gained += 5 * multiplier elif 1800 <= results[i]["other_estimate_fes_power"] < 1900: points_gained += 6 * multiplier elif results[i]["other_estimate_fes_power"] >= 1900: points_gained += 7 * multiplier if ver4: synergy_mult = results[i]["uniform_bonus"] if synergy_mult > 1: points_gained *= synergy_mult # SPECIAL CASE - KING/QUEEN MAX if title_before == 4 and title_after == 4 and fest_exp_after == 0: payload["fest_exp"] = 0 # already at max, no exp gained # SPECIAL CASE - CHAMPION (999) TO KING/QUEEN elif title_before == 3 and title_after == 4: # fes_point == 0 should always be true (reached max). if reaching max *exactly*, # then fest_exp = 999 - points_gained. if curtailed rollover, no way to know # e.g. even if user got +70, max (999->0) could have been reached after, say, +20 payload["fest_exp"] = None else: if title_before == title_after: # within same title fest_rank_rollover = 0 elif title_before == 0 and title_after == 1: # fanboy/girl (100) to fiend (250) fest_rank_rollover = 10 * multiplier elif title_before == 1 and title_after == 2: # fiend (250) to defender (500) fest_rank_rollover = 25 * multiplier elif title_before == 2 and title_after == 3: # defender (500) to champion (999) fest_rank_rollover = 50 * multiplier payload["fest_exp"] = fest_rank_rollover + fest_exp_after - points_gained # avoid mysterious, fatal -1 case... if payload["fest_exp"] and payload["fest_exp"] < 0: payload["fest_exp"] = 0 else: # not splatfest title_before = None # required to set for scoreboard param ##################### ## SPLATFEST VER.4 ## ##################### if ver4 and mode == "fes": # indiv. & team fest_powers in above section payload["my_team_win_streak"] = results[i]["my_team_consecutive_win"] payload["his_team_win_streak"] = results[i]["other_team_consecutive_win"] if results[i]["event_type"]["key"] == "10_x_match": payload["special_battle"] = "10x" elif results[i]["event_type"]["key"] == "100_x_match": payload["special_battle"] = "100x" total_clout_after = results[i]["contribution_point_total"] # after payload["total_clout_after"] = total_clout_after if lobby == "fes_team": # normal try: payload["my_team_nickname"] = results[i]["my_team_another_name"] except: pass try: payload["his_team_nickname"] = results[i]["other_team_another_name"] except: pass # synergy bonus if synergy_mult == 0: # always 0 in pro synergy_mult = 1.0 payload["synergy_bonus"] = synergy_mult # max 2.0 # clout clout = results[i]["contribution_point"] # in pro, = his_team_estimate_fest_power # in normal, = turfinked (if victory: +1000) -> = int(round(floor((clout * synergy_bonus) + 0.5))) payload["clout"] = clout payload["total_clout"] = total_clout_after - clout # before ################ ## SCOREBOARD ## ################ if YOUR_COOKIE != "" or debug: # requires online (or battle json). if no cookie, don't do - will fail mystats = [mode, rule, result, k_or_a, death, special, weapon, level_before, rank_before, turfinked, title_before, principal_id, star_rank, gender, species] if filename == None: payload = set_scoreboard(payload, bn, mystats, s_flag) else: payload = set_scoreboard(payload, bn, mystats, s_flag, results[0]) ################## ## IMAGE RESULT ## ################## if debug: url = "https://app.splatoon2.nintendo.net/api/share/results/{}".format(bn) share_result = requests.post(url, headers=app_head, cookies=dict(iksm_session=YOUR_COOKIE)) if share_result.ok: image_result_url = share_result.json().get("url") if image_result_url: image_result = requests.get(image_result_url, stream=True) if image_result.ok: if not s_flag: # normal scoreboard payload["image_result"] = BytesIO(image_result.content).getvalue() else: players = [0] * 8 # in case battles are < 8 people. mark missing player-positions my_missing = 4 - (len(payload["splatnet_json"]["my_team_members"]) + 1) their_missing = 4 - len(payload["splatnet_json"]["other_team_members"]) for u, v in zip(list(range(4)), list(range(3, -1, -1))): if my_missing >= u+1: # 1, 2, 3, 4 players[v] = None # from back of my team's half for u, v in zip(list(range(4)), list(range(7, -3, -1))): if their_missing >= u+1: players[v] = None for p in range(len(payload["players"])): # by default, covers all non-me names. # could be modified, e.g. in quad squads only cover enemy names try: is_player_me = payload["players"][p]["is_me"] except: is_player_me = None lowest_zero = players.index(0) # fill in 0s (uninits) with values players[lowest_zero] = is_player_me if result == "defeat": # enemy team is on top players = players[4:] + players[:4] scoreboard = blackout(image_result.content, players) bytes_result = BytesIO() scoreboard.save(bytes_result, "PNG") payload["image_result"] = bytes_result.getvalue() if sendgears: # if most recent url_profile = "https://app.splatoon2.nintendo.net/api/share/profile" if stage == 9999: # fav_stage can't be Shifty Station stages_ints = [k for k in translate_stages.keys() if k != 9999 and isinstance(k, int)] fav_stage = random.choice(stages_ints) else: fav_stage = stage settings = {'stage': fav_stage, 'color': translate_profile_color[random.randrange(0, 6)]} share_result = requests.post(url_profile, headers=app_head, cookies=dict(iksm_session=YOUR_COOKIE), data=settings) if share_result.ok: profile_result_url = share_result.json().get("url") if profile_result_url: profile_result = requests.get(profile_result_url, stream=True) if profile_result.ok: payload["image_gear"] = BytesIO(profile_result.content).getvalue() ########## ## GEAR ## ########## https://github.com/fetus-hina/stat.ink/blob/master/doc/api-2/post-battle.md#gears-structure headgear_id = results[i]["player_result"]["player"]["head"]["id"] clothing_id = results[i]["player_result"]["player"]["clothes"]["id"] shoes_id = results[i]["player_result"]["player"]["shoes"]["id"] payload["gears"] = {'headgear': {'secondary_abilities': []}, 'clothing': {'secondary_abilities': []}, 'shoes': {'secondary_abilities': []}} payload["gears"]["headgear"]["gear"] = "#{}".format(headgear_id) payload["gears"]["clothing"]["gear"] = "#{}".format(clothing_id) payload["gears"]["shoes"]["gear"] = "#{}".format(shoes_id) ############### ## ABILITIES ## ############### https://github.com/fetus-hina/stat.ink/blob/master/doc/api-1/constant/ability.md headgear_subs, clothing_subs, shoes_subs = ([-1,-1,-1] for i in range(3)) for j in range(3): try: headgear_subs[j] = results[i]["player_result"]["player"]["head_skills"]["subs"][j]["id"] except: headgear_subs[j] = '-1' try: clothing_subs[j] = results[i]["player_result"]["player"]["clothes_skills"]["subs"][j]["id"] except: clothing_subs[j] = '-1' try: shoes_subs[j] = results[i]["player_result"]["player"]["shoes_skills"]["subs"][j]["id"] except: shoes_subs[j] = '-1' headgear_main = results[i]["player_result"]["player"]["head_skills"]["main"]["id"] clothing_main = results[i]["player_result"]["player"]["clothes_skills"]["main"]["id"] shoes_main = results[i]["player_result"]["player"]["shoes_skills"]["main"]["id"] payload["gears"]["headgear"]["primary_ability"] = translate_ability.get(int(headgear_main), "") payload["gears"]["clothing"]["primary_ability"] = translate_ability.get(int(clothing_main), "") payload["gears"]["shoes"]["primary_ability"] = translate_ability.get(int(shoes_main), "") for j in range(3): payload["gears"]["headgear"]["secondary_abilities"].append(translate_ability.get(int(headgear_subs[j]), "")) payload["gears"]["clothing"]["secondary_abilities"].append(translate_ability.get(int(clothing_subs[j]), "")) payload["gears"]["shoes"]["secondary_abilities"].append(translate_ability.get(int(shoes_subs[j]), "")) ############# ## DRY RUN ## ############# if t_flag: # -t provided payload["test"] = "dry_run" # works the same as 'validate' for now #************** #*** OUTPUT *** #************** if debug: print("") print(json.dumps(payload).replace("'", "\'")) # adding support for a custom key? add to custom_key_exists() method, and # to "main process" section of monitor_battles, too. and the docs/wiki page of course elif lobby == "private" and custom_key_exists("ignore_private", True): if m_flag != -1: # monitoring mode pass else: print("Battle #{}: skipping upload based on ignore_private key.".format(i+1)) else: try: os.mkdir("Battles") except: pass f = open(os.path.join("Battles", str(i) + ".txt"), "w"); f.write(json.dumps(payload).replace("'", "\'")); f.close();
def id(self): seed = str(' '.join((self.method, self.url))) return str(uuid5(self.collection.uuid, seed))
def yaml_prep_landsat(scene_dir): """ Prepare individual L8 scene directory containing L8 cog products converted from ESPA-ordered L1T scenes. """ # scene_name = scene_dir.split('/')[-2][:26] scene_name = split_all(scene_dir)[-2] logging.info(f"Preparing scene {scene_name}") logging.info(f"Scene path {scene_dir}") # find all cog prods prod_paths = glob.glob(scene_dir + '*.tif') # print ( 'paths: {}'.format(prod_paths) ) # for i in prod_paths: print ( i ) logging.info(prod_paths) # date time assumed eqv for start and stop - this isn't true and could be # pulled from .xml file (or scene dir) not done yet for sake of progression t0 = parse(find_l8_datetime(scene_dir)) # get polorisation from each image product (S2 band) images = { band_name_landsat(prod_path): { 'path': str(split_all(prod_path)[-1]) } for prod_path in prod_paths } logging.info(images) # trusting bands coaligned, use one to generate spatial bounds for all projection, extent = get_geometry( os.path.join(str(scene_dir), images['blue']['path'])) # parse esa l2a prod metadata file for reference scene_genesis = glob.glob(scene_dir + '*.xml')[0] if os.path.exists(scene_genesis): scene_genesis = os.path.basename(scene_genesis) else: scene_genesis = ' ' new_id = str(uuid.uuid5(uuid.NAMESPACE_URL, scene_name)) platform_code = "" instrument_name = "" if "LE08_" in scene_name: logging.info(f"{scene_name} detected as landsat 8") platform_code = "LANDSAT_8" instrument_name = "OLI" elif "LE07_" in scene_name: logging.info(f"{scene_name} detected as landsat 7") platform_code = "LANDSAT_7" instrument_name = "ETM" elif "LT05_" in scene_name: logging.info(f"{scene_name} detected as landsat 5") platform_code = "LANDSAT_5" instrument_name = "TM" elif "LT04_" in scene_name: logging.info(f"{scene_name} detected as landsat 4") platform_code = "LANDSAT_4" instrument_name = "TM" else: raise Exception(f"Unknown platform {scene_name}") return { 'id': new_id, 'processing_level': "espa_l2a2cog_ard", 'product_type': "optical_ard", 'creation_dt': str(datetime.today().strftime('%Y-%m-%d %H:%M:%S')), 'platform': { 'code': platform_code }, 'instrument': { 'name': instrument_name }, 'extent': create_metadata_extent(extent, t0, t0), 'format': { 'name': 'GeoTiff' }, 'grid_spatial': { 'projection': projection }, 'image': { 'bands': images }, 'lineage': { 'source_datasets': scene_genesis, } }
def id(self): return str(uuid5(self.collection.uuid, str(self.tag)))
def save(self, *args, **kwargs): if self.uuid == NIL: self.uuid = uuid5(settings.NAMESPACE_HOST_UUID, self.user.username) super().save(*args, **kwargs)
def urn(value, namespace='url'): if isinstance(value, unicode): value = value.encode('utf-8') return unicode(uuid5(_namespaces[namespace], value).urn)
def get_device_id(self): return str(uuid.uuid5(uuid.NAMESPACE_DNS, socket.gethostname()))
def get_uuid(self, *args, **kwargs): assert self.user is not None and self.user.id is not None, "User ID required for get_uuid" assert self.exercise_id is not None, "Exercise ID required for get_uuid" namespace = uuid.UUID(self.user.id) return uuid.uuid5(namespace, self.exercise_id.encode("utf-8")).hex
def gen_uuid(name): return str(uuid.uuid5(uuid_namespace, str(name)))
def add(self, topic): """add a new topic with a its UUID""" self.insert1( dict(topic_id=uuid.uuid5(top_level_namespace_id, topic), topic=topic))
def __init__(self, name: str) -> None: self.uuid = uuid.uuid5(NAMESPACE_ID, name) self.name = name self.coupons = set() self.balance = Balance()
#!/usr/bin/python # -*- coding: UTF-8 -*- import uuid name = "test_name" namespace = "test_namespace" print uuid.uuid1() # 带参的方法参见Python Doc print uuid.uuid3(namespace, name) print uuid.uuid4() print uuid.uuid5(namespace, name)
def modifyMMD(myxml, checkId=False, collections=None, thredds=False): # Modify the XML generated with information from THREDDS #print('Parsing XML') #myxml = ET.parse(os.path.join(dstdir,outfile)) ns_map = {'mmd': "http://www.met.no/schema/mmd", 'gml': "http://www.opengis.net/gml"} myroot = myxml.getroot() # Check and potentially modify identifier if checkId: mynode = myxml.find("./mmd:metadata_identifier", myroot.nsmap) #print(mynode.text, ds.url.replace('catalog.xml?dataset=','')) # If ID is not a UUID, replace with a newly generated UUID if mynode is not None: try: uuidver = uuid.UUID(mynode.text).version except ValueError: print("\tNot containing an UUID, replacing identifier.") try: mynode.text = str(uuid.uuid5(uuid.NAMESPACE_URL, ds.url.replace('catalog.xml?dataset=',''))) except TypeError as e: print(e) else: try: mynode = ET.Element("{http://www.met.no/schema/mmd}metadata_identifier") mynode.text = str(uuid.uuid5(uuid.NAMESPACE_URL, ds.url.replace('catalog.xml?dataset=',''))) except TypeError as e: print(e) try: myroot.insert(0, mynode) except Exception as e: print(e) # Add metadata_status mynode = myxml.find("./mmd:metadata_status", myroot.nsmap) if mynode is None: mynode = ET.Element("{http://www.met.no/schema/mmd}metadata_status") mynode.text = 'Active' myroot.insert(4, mynode) # Add collections, ADC to all and additional if needed # More checking needed mynode = myxml.find("./mmd:collection", myroot.nsmap) if mynode is None: mynode = ET.Element("{http://www.met.no/schema/mmd}collection") mynode.text = 'ADC' myroot.insert(3,mynode) if collections: for el in collections.split(','): mynode = ET.Element("{http://www.met.no/schema/mmd}collection") mynode.text = el.strip() myroot.insert(3,mynode) # Add iso_topic_category # Most datasets belong to this, quick hack for now mynode = ET.Element("{http://www.met.no/schema/mmd}iso_topic_category") mynode.text = 'Not available' myroot.insert(8, mynode) # Check and potentially modify activity_type mynode = myxml.find("./mmd:activity_type",namespaces=myroot.nsmap) if mynode is None: mynode = ET.Element("{http://www.met.no/schema/mmd}activity_type") mynode.text = 'Not available' myroot.insert(9, mynode) # Check and potentially modify operational_status mynode = myxml.find("./mmd:operational_status",namespaces=myroot.nsmap) if mynode is None: mynode = ET.Element("{http://www.met.no/schema/mmd}operational_status") mynode.text = 'Not available' myroot.insert(9, mynode) # Check and potentially modify license # mynode = myxml.find("./mmd:use_constraing",namespaces=myroot.nsmap) # if mynode is not None: # mynode.text = 'Not available' if thredds: # Add related_information related_information = ET.Element( "{http://www.met.no/schema/mmd}related_information") related_information_resource = ET.SubElement(related_information, '{http://www.met.no/schema/mmd}resource') related_information_resource.text = ds.url.replace('xml','html') related_information_type = ET.SubElement(related_information, '{http://www.met.no/schema/mmd}type') related_information_type.text = 'Dataset landing page' related_information_description = ET.SubElement(related_information, '{http://www.met.no/schema/mmd}description') related_information_description.text = 'Dataset landing page' myroot.insert(-1,related_information) # Add data_access (not done automatically) data_access = ET.Element( '{http://www.met.no/schema/mmd}data_access') data_access_resource = ET.SubElement(data_access, '{http://www.met.no/schema/mmd}resource') data_access_resource.text = ds.download_url() data_access_type = ET.SubElement(data_access, '{http://www.met.no/schema/mmd}type') data_access_type.text = 'HTTP' data_access_description = ET.SubElement(data_access, '{http://www.met.no/schema/mmd}description') data_access_description.text = 'Direct download of datafile' myroot.insert(-1,data_access) data_access = ET.Element( '{http://www.met.no/schema/mmd}data_access') data_access_resource = ET.SubElement(data_access, '{http://www.met.no/schema/mmd}resource') data_access_resource.text = ds.opendap_url() data_access_type = ET.SubElement(data_access, '{http://www.met.no/schema/mmd}type') data_access_type.text = 'OPeNDAP' data_access_description = ET.SubElement(data_access, '{http://www.met.no/schema/mmd}description') data_access_description.text = 'OPeNDAP access to dataset' myroot.insert(-1,data_access) data_access = ET.Element( '{http://www.met.no/schema/mmd}data_access') data_access_resource = ET.SubElement(data_access, '{http://www.met.no/schema/mmd}resource') data_access_resource.text = ds.wms_url() data_access_type = ET.SubElement(data_access, '{http://www.met.no/schema/mmd}type') data_access_type.text = 'OGC WMS' data_access_description = ET.SubElement(data_access, '{http://www.met.no/schema/mmd}description') data_access_description.text = 'OGC WMS GetCapabilities URL' myroot.insert(-1,data_access) return(myxml)
def _get_union_type_id(self, union_type): base_type_id = ','.join( str(c.id) for c in union_type.children(self.schema)) return uuid.uuid5(s_types.TYPE_ID_NAMESPACE, base_type_id)