def test_datetime_to_uuid(self): cf_time = TestTimeUUIDs.cf_time key = 'key1' timeline = [] timeline.append(datetime.now()) time1 = uuid1() col1 = {time1:'0'} cf_time.insert(key, col1) time.sleep(1) timeline.append(datetime.now()) time2 = uuid1() col2 = {time2:'1'} cf_time.insert(key, col2) time.sleep(1) timeline.append(datetime.now()) cols = {time1:'0', time2:'1'} assert_equal(cf_time.get(key, column_start=timeline[0]) , cols) assert_equal(cf_time.get(key, column_finish=timeline[2]) , cols) assert_equal(cf_time.get(key, column_start=timeline[0], column_finish=timeline[2]) , cols) assert_equal(cf_time.get(key, column_start=timeline[0], column_finish=timeline[2]) , cols) assert_equal(cf_time.get(key, column_start=timeline[0], column_finish=timeline[1]) , col1) assert_equal(cf_time.get(key, column_start=timeline[1], column_finish=timeline[2]) , col2) cf_time.remove(key)
def createVmResource(image): disk = Disk() disk.flavor = "some-disk-flavor" disk.id = str(uuid.uuid1()) disk.persistent = False disk.new_disk = True disk.capacity_gb = 0 disk.image = DiskImage() disk.image.id = image disk.image.clone_type = CloneType.COPY_ON_WRITE disk.flavor_info = Flavor() disk.flavor_info.name = "some-disk-flavor" disk.flavor_info.cost = [] vm = Vm() vm.id = str(uuid.uuid1()) vm.flavor = "some-vm-flavor" vm.state = State.STOPPED vm.flavor_info = Flavor() vm.flavor_info.name = "some-vm-flavor" vm.flavor_info.cost = [ QuotaLineItem("vm.cpu", "1", QuotaUnit.COUNT), QuotaLineItem("vm.memory", "0.5", QuotaUnit.GB)] vm.disks = [disk] return vm
def setUp(self): # clean DB db.users.drop() db.events.drop() db.groups.drop() # create normal user a = unicode(uuid.uuid1()) u = User.create(username=u'test', password=u'test', roles=[u'authenticated'], signup_email=u'test@localhost', activation_key=a, active=True) #print u self.assertNotEqual(u, None) # create a moderator a = unicode(uuid.uuid1()) u = User.create(username=u'mod', password=u'mod', roles=[u'authenticated', u'moderator'], signup_email=u'mod@localhost', activation_key=a, active=True) #print u self.assertNotEqual(u, None)
def build_plentry_adds(playlist_id, song_ids): """ :param playlist_id :param song_ids """ mutations = [] prev_id, cur_id, next_id = None, str(uuid1()), str(uuid1()) for i, song_id in enumerate(song_ids): m_details = { "clientId": cur_id, "creationTimestamp": "-1", "deleted": False, "lastModifiedTimestamp": "0", "playlistId": playlist_id, "source": 1, "trackId": song_id, } if song_id.startswith("T"): m_details["source"] = 2 # AA track if i > 0: m_details["precedingEntryId"] = prev_id if i < len(song_ids) - 1: m_details["followingEntryId"] = next_id mutations.append({"create": m_details}) prev_id, cur_id, next_id = cur_id, next_id, str(uuid1()) return mutations
def test(): import uuid login_output = login_cli_by_account('admin', 'password') if login_output.find('%s >>>' % ('admin')) < 0: test_util.test_fail('zstack-cli is not display correct name for logined account: %s' % (login_output)) account_name1 = uuid.uuid1().get_hex() account_pass1 = hashlib.sha512(account_name1).hexdigest() test_account1 = test_account.ZstackTestAccount() test_account1.create(account_name1, account_pass1) test_obj_dict.add_account(test_account1) login_output = login_cli_by_account(account_name1, account_name1) if login_output.find('%s >>>' % (account_name1)) < 0: test_util.test_fail('zstack-cli is not display correct name for logined account: %s' % (login_output)) account_name2 = uuid.uuid1().get_hex() account_pass2 = hashlib.sha512(account_name2).hexdigest() test_account2 = test_account.ZstackTestAccount() test_account2.create(account_name2, account_pass2) test_obj_dict.add_account(test_account2) test_account_uuid2 = test_account2.get_account().uuid login_output = login_cli_by_account(account_name2, account_name2) if login_output.find('%s >>>' % (account_name2)) < 0: test_util.test_fail('zstack-cli is not display correct name for logined account %s' % (login_output)) logout_output = logout_cli() if logout_output.find('- >>>') < 0: test_util.test_fail('zstack-cli is not display correct after logout: %s' % (login_output)) test_account1.delete() test_account2.delete() test_obj_dict.rm_account(test_account1) test_obj_dict.rm_account(test_account2)
def test_user_tests(self): m = self.model def mock_test_definition(title, cluster='bdplab', user='******'): return {'title': title, 'cluster': cluster, 'user': user} test1 = m.schedule_test(uuid.uuid1(), 'ryan', 'bdplab', mock_test_definition('1')) test2 = m.schedule_test(uuid.uuid1(), 'ryan', 'bdplab', mock_test_definition('2')) test3 = m.schedule_test(uuid.uuid1(), 'bob', 'bdplab', mock_test_definition('3')) test4 = m.schedule_test(uuid.uuid1(), 'mary', 'bdplab', mock_test_definition('4')) test5 = m.schedule_test(uuid.uuid1(), 'mary', 'bdplab', mock_test_definition('5')) self.assertEquals([r['title'] for r in m.get_user_scheduled_tests('ryan')], ['2','1']) self.assertEquals([r['title'] for r in m.get_user_scheduled_tests('bob')], ['3']) self.assertEquals([r['title'] for r in m.get_user_scheduled_tests('mary')], ['5','4']) m.update_test_status(test1, 'in_progress') m.update_test_status(test4, 'in_progress') self.assertEquals([r['title'] for r in m.get_user_scheduled_tests('ryan')], ['2']) self.assertEquals([r['title'] for r in m.get_user_scheduled_tests('bob')], ['3']) self.assertEquals([r['title'] for r in m.get_user_scheduled_tests('mary')], ['5']) self.assertEquals([r['title'] for r in m.get_user_in_progress_tests('ryan')], ['1']) self.assertEquals([r['title'] for r in m.get_user_in_progress_tests('mary')], ['4']) m.update_test_status(test2, 'completed') m.update_test_status(test5, 'completed') self.assertEquals([r['title'] for r in m.get_user_completed_tests('ryan')], ['2']) self.assertEquals([r['title'] for r in m.get_user_completed_tests('mary')], ['5'])
def recv_announce(self, expiration_time=None, traversal_id=None): msg = message.Announcement() self.guid = str(uuid.uuid1()) msg.sender_id = self.guid msg.traversal_id = traversal_id or str(uuid.uuid1()) return self.recv_msg(msg, expiration_time=expiration_time)
def setUp(self): app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/test.db' app.config['SPOTCLOUD_USER'] = '******' app.config['SPOTCLOUD_PASSWD'] = 'password' db.drop_all() db.create_all() self.app = app.test_client() pkg_uuid = str(uuid.uuid1()) self.pkg = Package(ecp_uuid=pkg_uuid, nova_id='dummy_nova_id', state='ready') db.session.add(self.pkg) db.session.commit() hardware_uuid = str(uuid.uuid1()) self.hardware = HardwareTemplate( ecp_uuid=hardware_uuid, name='test', nova_id='dummy_nova_id', cpus=2, arch='i386', memory=8000) db.session.add(self.hardware) db.session.commit()
def request(self, view, request_type, callback, location=None): """ Send request to daemon process :type view: sublime.View :type request_type: str :type callback: callabel :type location: type of (int, int) or None """ logger.info('Sending request to daemon for "{0}"'.format(request_type)) if location is None: location = view.sel()[0].begin() current_line, current_column = view.rowcol(location) source = view.substr(sublime.Region(0, view.size())) if PY3: uuid = uuid1().hex else: uuid = uuid1().get_hex() data = { 'source': source, 'line': current_line + 1, 'offset': current_column, 'filename': view.file_name() or '', 'type': request_type, 'uuid': uuid, } self.stdin.put_nowait((callback, data))
def setUp(self): super(SSLCertificateControllerTest, self).setUp() self.project_id = str(uuid.uuid1()) self.service_name = str(uuid.uuid1()) self.flavor_id = str(uuid.uuid1()) # create a mock flavor to be used by new service creations flavor_json = { "id": self.flavor_id, "providers": [ { "provider": "mock", "links": [ { "href": "http://mock.cdn", "rel": "provider_url" } ] } ] } response = self.app.post('/v1.0/flavors', params=json.dumps(flavor_json), headers={ "Content-Type": "application/json", "X-Project-ID": self.project_id}) self.assertEqual(201, response.status_code)
def file_upload(request): file = request.FILES.get('file', None) type = request.DATA.get('type', None) if file: # TODO: Streaming Video (FLV, F4V, MP4, 3GP) Streaming Audio (MP3, F4A, M4A, AAC) file_name = '' thumbnail = '' convert = Converter() if type == u'video/x-flv': uuid_string = str(uuid.uuid1()) file_name = uuid_string + '.flv' thumbnail = uuid_string + '.jpg' elif type == u'video/mp4': uuid_string = str(uuid.uuid1()) file_name = uuid_string + '.mp4' thumbnail = uuid_string + '.jpg' if file_name != '': file_path = FILE_PATH + file_name with open(file_path, 'wb+') as destination: for chunk in file.chunks(): destination.write(chunk) destination.close() convert.thumbnail(file_path, 10, FILE_PATH + thumbnail) temp_file = TempFile(name=file_name, path=file_path) temp_file.save() return Response({ 'file_name': file_name, 'thumbnail': thumbnail }) else: return Response({ 'status': 'Current just support .mp4 && .flv.' })
def test_stmtref_in_context_stmt(self): stmt_guid = str(uuid.uuid1()) existing_stmt = json.dumps({'actor':{'objectType':'Agent','mbox':'mailto:[email protected]'}, 'verb': {"id":"verb:verb/url/outer"},"object": {'id':'act:activityy16'}}) path = "%s?%s" % (reverse('lrs:statements'), urllib.urlencode({"statementId":stmt_guid})) response = self.client.put(path, existing_stmt, content_type="application/json", Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION) self.assertEqual(response.status_code, 204) guid = str(uuid.uuid1()) stmt = json.dumps({'actor':{'objectType':'Agent','mbox':'mailto:[email protected]'}, 'verb': {"id":"verb:verb/url"},"object": {'id':'act:activity16'}, 'context':{'registration': guid, 'contextActivities': {'other': {'id': 'act:NewActivityID'}}, 'revision': 'foo', 'platform':'bar','language': 'en-US', 'statement': {'objectType': 'StatementRef','id': stmt_guid}}}) response = self.client.post(reverse('lrs:statements'), stmt, content_type="application/json", Authorization=self.auth, X_Experience_API_Version=settings.XAPI_VERSION) self.assertEqual(response.status_code, 200) stmt_id = json.loads(response.content)[0] stmt = Statement.objects.get(statement_id=stmt_id) activity = Activity.objects.get(id=stmt.object_activity.id) st = Statement.objects.get(id=stmt.id) self.assertEqual(st.object_activity.id, activity.id) self.assertEqual(st.context_registration, guid) self.assertEqual(st.context_revision, 'foo') self.assertEqual(st.context_platform, 'bar') self.assertEqual(st.context_language, 'en-US')
def crawCompanyNews(link): filterContext = ThemeNewsSpiderUtils.returnStartContext(link,'<div class="listnews" id="TacticNewsList1" >') startContext = ThemeNewsSpiderUtils.filterContextByTarget(filterContext,'<ul>','</ul>') len = ThemeNewsSpiderUtils.findAllTarget(startContext,'<li') newsFlag = 'good' currentList = [] for i in range(len): targetContext = ThemeNewsSpiderUtils.divisionTarget(startContext, '<li>', '</li>') startContext = targetContext['nextContext'] currentcontext = targetContext['targetContext'] keyid = str(uuid.uuid1()) linkUrl = ThemeNewsSpiderUtils.filterContextByTarget(currentcontext,'<a href="', '">') pubDate = ThemeNewsSpiderUtils.filterContextByTarget(currentcontext,'<span>','</span>') title = ThemeNewsSpiderUtils.filterContextByTarget(currentcontext,'">','</a>') if linkUrl != '': currentList.append([keyid,linkUrl,pubDate,title,newsFlag]) currentFilterContext = ThemeNewsSpiderUtils.returnStartContext(link,'<div class="listnews" id="TacticNewsList2" style="display:none;">') currentstartContext = ThemeNewsSpiderUtils.filterContextByTarget(currentFilterContext,'<ul>','</ul>') currentlen = ThemeNewsSpiderUtils.findAllTarget(currentstartContext,'<li') newsFlag = 'bad' for m in range(currentlen): targetContext = ThemeNewsSpiderUtils.divisionTarget(currentstartContext, '<li>', '</li>') currentstartContext = targetContext['nextContext'] currentcontext = targetContext['targetContext'] keyid = str(uuid.uuid1()) linkUrl = ThemeNewsSpiderUtils.filterContextByTarget(currentcontext,'<a href="', '">') pubDate = ThemeNewsSpiderUtils.filterContextByTarget(currentcontext,'<span>','</span>') title = ThemeNewsSpiderUtils.filterContextByTarget(currentcontext,'">','</a>') if linkUrl != '': currentList.append([keyid,linkUrl,pubDate,title,newsFlag]) return currentList
def issue_aft_token(): """ Issues a new antiforgery token to include in a page request. If it doesn't exist in the request, sets a cookie in the response. @param request @returns {string} the newly defined """ # check if the session is defined inside the request if not request.session: raise Error("missing session inside the request object; use the AntiforgeryValidate after the membership provider") encryption_key = request.session.guid cookie_token = request.cookies.get(cookie_name) new_token_defined = False if not cookie_token: #define a new token cookie_token = uuid.uuid1() new_token_defined = True else: can_decrypt, value = AesEncryptor.try_decrypt(cookie_token, encryption_key) if not can_decrypt: cookie_token = uuid.uuid1() new_token_defined = True else: # use the same value of before cookie_token = value if new_token_defined: cookie_token = str(cookie_token) encrypted_token = AesEncryptor.encrypt(cookie_token, str(encryption_key)) # the cookie will be set in response object inside global_handlers function request.set_aft_cookie = encrypted_token # return the token encrypted with AES; many calls always return a different value return AesEncryptor.encrypt(cookie_token, encryption_key)
def us_classifications(self): """ Returns list of dictionaries representing us classification main: class subclass """ classes = [] i = 0 main = self.xml.classification_national.contents_of('main_classification') data = {'class': main[0][:3].replace(' ', ''), 'subclass': main[0][3:].replace(' ', '')} if any(data.values()): classes.append([ {'uuid': str(uuid.uuid1()), 'sequence': i}, {'id': data['class'].upper()}, {'id': "{class}/{subclass}".format(**data).upper()}]) i = i + 1 if self.xml.classification_national.further_classification: further = self.xml.classification_national.contents_of('further_classification') for classification in further: data = {'class': classification[:3].replace(' ', ''), 'subclass': classification[3:].replace(' ', '')} if any(data.values()): classes.append([ {'uuid': str(uuid.uuid1()), 'sequence': i}, {'id': data['class'].upper()}, {'id': "{class}/{subclass}".format(**data).upper()}]) i = i + 1 return classes
def fetch_subreddit(subreddit, top=True, limit=None): sub = reddit.get_subreddit(subreddit) if sub.over18: Subreddit.create( id=uuid.uuid1(), subreddit=subreddit, nsfw=True ) else: Subreddit.create( id=uuid.uuid1(), subreddit=subreddit, nsfw=False ) for post in sub.get_hot(limit=limit): submit_reddit_post(post, subreddit, sub.over18) if top: topfuncs = [ sub.get_top_from_all, sub.get_top_from_year, sub.get_top_from_month, sub.get_top_from_week, sub.get_top_from_day, ] for func in topfuncs: for post in func(limit=limit): submit_reddit_post(post, subreddit, sub.over18)
def test_time_to_uuid(self): key = 'key1' timeline = [] timeline.append(time.time()) time1 = uuid1() col1 = {time1:'0'} self.cf_time.insert(key, col1) time.sleep(0.1) timeline.append(time.time()) time2 = uuid1() col2 = {time2:'1'} self.cf_time.insert(key, col2) time.sleep(0.1) timeline.append(time.time()) cols = {time1:'0', time2:'1'} assert_equal(self.cf_time.get(key, column_start=timeline[0]) , cols) assert_equal(self.cf_time.get(key, column_finish=timeline[2]) , cols) assert_equal(self.cf_time.get(key, column_start=timeline[0], column_finish=timeline[2]) , cols) assert_equal(self.cf_time.get(key, column_start=timeline[0], column_finish=timeline[2]) , cols) assert_equal(self.cf_time.get(key, column_start=timeline[0], column_finish=timeline[1]) , col1) assert_equal(self.cf_time.get(key, column_start=timeline[1], column_finish=timeline[2]) , col2)
def __init__(self, cookiesManager): self._values = {} self._cookiesManager = cookiesManager self._sessionPath = ".session" # set session id / create session if self._cookiesManager.get("session", None) == None: self._session_id = uuid.uuid1() self.create_session() else: # session id validation try: #int(self._cookiesManager.get("session", None), 16) # TODO: file name validation !! self._session_id = self._cookiesManager.get("session", None) except: # create new session self._session_id = uuid.uuid1() self.create_session() # set file path self._path = os.path.join(os.path.dirname(__file__), self._sessionPath) self._file_name = str(self._session_id) self._full_path = os.path.join(self._path, self._file_name) self._values = self._read_file()
def dispatch(self, methodname, vargs, kwargs): final_result = {} rpc_id_list = [] msg = {} msg[METHOD] = methodname msg[PARAMS] = vargs msg[MODE] = SYNC msg[FROM] = self.actor.aref msg[TYPE] = CALL rpc_id = str(uuid.uuid1()) msg[SRC] = self.actor.channel if methodname == 'keep_alive': later(20, send_timeout_multi, self.actor.channel, rpc_id_list) else: later(int(self.dict_actors.values()[0].syncList.get(methodname)), send_timeout_multi, self.actor.channel, rpc_id_list) for a in self.dict_actors.values(): msg[TO] = a.client.aref rpc_id = str(uuid.uuid1()) msg[RPC_ID] = rpc_id a.client.set_pending(msg[RPC_ID]) rpc_id_list.append(rpc_id) msg2 = copy.copy(msg) a.client.out.send(msg2) for a in self.dict_actors.values(): from_result, parcial_result = self.actor.receive_result() if from_result == 'timeout_controller': if not final_result: raise Exception, 'The timeout has expired' else: return final_result final_result[from_result] = parcial_result#.append(parcial_result) return final_result
def sendMessageToInputQueue(q, anim_name,frame_file,type,userid): # Data required by the API if type == 'Frame': data = { 'msgtype': str(type), 'submitdate': time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()), 'key': str(uuid.uuid1()),#secret key/anwer to s3 instead of boto config file 'userid': str(userid), #or bucketname attribute 'anim_name':str(anim_name), 'frame_file': str(frame_file) } elif type == 'killCommand': data = { 'msgtype': str(type), 'submitdate': time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()), 'key': str(uuid.uuid1()),#secret key/anwer to s3 instead of boto config file 'userid': str(userid), #or bucketname attribute 'command': str(message) } # Connect to SQS and open the queue # Put the message in the queue m = RawMessage() m.set_body(json.dumps(data)) status = q.write(m)
def parse_contents(self, response): building = BuildingItem() building['id'] = uuid.uuid1() building['building_name_cn'] = response.css("div.buildingName > h1::text").extract()[0] building['address_cn'] = response.css("div.baseInfo > span.regionName::text").extract()[0] building['keyword_tag'] = response.css("div.baseInfo > span.areaRange::text").extract()[0] building['floor_count'] = response.css("ul.feature-list > li.li-item > span.f-con::text").extract()[1] building['building_clear_height'] = response.css("ul.feature-list > li.li-item > span.f-con::text").extract()[2] building['parking_number'] = response.css("ul.feature-list > li.li-item > span.f-con::text").extract()[3] building['elevator_count'] = response.css("ul.feature-list > li.li-item > span.f-con::text").extract()[5] building['elevator_desc'] = response.css("ul.feature-list > li.li-item > span.f-con::text").extract()[5] building['building_ac_desc'] = response.css("ul.feature-list > li.li-item > span.f-con::text").extract()[6] building['major_tenants'] = response.css("ul.feature-list > li.li-item > span.f-con::text").extract()[7] building['longitude'] = response.css("div.map-data > input#longitude::attr('value')").extract()[0] building['latitude'] = response.css("div.map-data > input#latitude::attr('value')").extract()[0] for data in response.css("div.house-dialog > div.house-info"): space = SpaceItem() space['id'] = uuid.uuid1() space['building_id'] = building['id'] space['budget'] = data.css("div.house-price > i::text").extract() space['size'] = data.css("li.fl > i::text").extract()[0] space['rate'] = data.css("li.fl > i::text").extract()[1] space['available_size'] = data.css("li.fl > i::text").extract()[2] space['description'] = data.css("li.fl::text").extract()[3] space['floor'] = data.css("li.fl::text").extract()[4] space['shortest_lease_term'] = data.css("li.fl::text").extract()[5] return building
def start_format(self, **kwargs): account = kwargs['account'] self.balance = account.balance self.coming = account.coming self.output(u'OFXHEADER:100') self.output(u'DATA:OFXSGML') self.output(u'VERSION:102') self.output(u'SECURITY:NONE') self.output(u'ENCODING:USASCII') self.output(u'CHARSET:1252') self.output(u'COMPRESSION:NONE') self.output(u'OLDFILEUID:NONE') self.output(u'NEWFILEUID:%s\n' % uuid.uuid1()) self.output(u'<OFX><SIGNONMSGSRSV1><SONRS><STATUS><CODE>0<SEVERITY>INFO</STATUS>') self.output(u'<DTSERVER>%s113942<LANGUAGE>ENG</SONRS></SIGNONMSGSRSV1>' % datetime.date.today().strftime('%Y%m%d')) self.output(u'<BANKMSGSRSV1><STMTTRNRS><TRNUID>%s' % uuid.uuid1()) self.output(u'<STATUS><CODE>0<SEVERITY>INFO</STATUS><CLTCOOKIE>null<STMTRS>') self.output(u'<CURDEF>%s<BANKACCTFROM>' % (account.currency or 'EUR')) self.output(u'<BANKID>null') self.output(u'<BRANCHID>null') self.output(u'<ACCTID>%s' % account.id) try: account_type = self.TYPES_ACCTS[account.type] except IndexError: account_type = '' self.output(u'<ACCTTYPE>%s' % (account_type or 'CHECKING')) self.output(u'<ACCTKEY>null</BANKACCTFROM>') self.output(u'<BANKTRANLIST>') self.output(u'<DTSTART>%s' % datetime.date.today().strftime('%Y%m%d')) self.output(u'<DTEND>%s' % datetime.date.today().strftime('%Y%m%d'))
def testCloneModelWithCheckpoint(self): checkpointMgr = ModelCheckpointMgr() modelID = uuid.uuid1().hex destModelID = uuid.uuid1().hex # Create the source model with meta-info only (no checkpoint) modelDef = {'a': 1, 'b': 2, 'c':3} checkpointMgr.define(modelID, modelDef) # Create a model that we can clone model1 = ModelFactory.create(self._getModelParams("variant1")) checkpointMgr.save(modelID, model1, attributes="attributes1") # Clone the source model checkpointMgr.clone(modelID, destModelID) # Discard the source model checkpoint checkpointMgr.remove(modelID) # Verify that the destination model's definition is the same as the # source model's destModelDef = checkpointMgr.loadModelDefinition(destModelID) self.assertDictEqual(destModelDef, modelDef) # Verify that the destination model's attributes match the source's attributes = checkpointMgr.loadCheckpointAttributes(destModelID) self.assertEqual(attributes, "attributes1") # Attempt to load the cloned model from checkpoint model = checkpointMgr.load(destModelID) self.assertEqual(str(model.getFieldInfo()), str(model1.getFieldInfo()))
def post(): try: # Get the parsed contents of the form data jsonr = request.json msg_uuid = uuid.uuid1() # pprint.pprint(jsonr) kind = jsonr['data']['kind'] if 'drink_poured' in kind: user = jsonr['data']['user']['display_name'] pour = jsonr['data']['drink'] beverage = jsonr['data']['keg']['beverage'] abv = jsonr['data']['keg']['type']['abv'] style = jsonr['data']['keg']['type']['name'] left = jsonr['data']['keg']['percent_full'] producer = beverage['producer']['name'] img_url = pour['images'][0]['original_url'] drink = beverage['name'] volume_ml = pour['volume_ml'] pints = volume_ml * 0.00211338 message = "%s just poured %s pints of %s, a %s %s (%%%s). %%%s remains" % (user, round(pints, 3), drink, producer, style, abv, round(left, 3)) print("Sending to dasher:\n channel: %s\nimg: %s\n message: %s" % (kb_channel_id, img_url, message)) img_payload = { 'message': img_url, 'uuid': uuid.uuid1() } payload = { 'message': message, 'uuid': msg_uuid } r = requests.post(kb_post_url, data=img_payload) r = requests.post(kb_post_url, data=payload) # Render template return jsonify(jsonr) except Exception: traceback.print_exc(file=sys.stdout)
def testCloneModelWithDefinitionOnly(self): checkpointMgr = ModelCheckpointMgr() modelID = uuid.uuid1().hex destModelID = uuid.uuid1().hex # Create the source model with meta-info only (no checkpoint) modelDef = {'a': 1, 'b': 2, 'c':3} checkpointMgr.define(modelID, modelDef) # Clone the source model checkpointMgr.clone(modelID, destModelID) # Verify that the destination model's definition is the same as the # source model's destModelDef = checkpointMgr.loadModelDefinition(destModelID) self.assertDictEqual(destModelDef, modelDef) # Calling load when the model checkpoint doesn't exist should raise an # exception with self.assertRaises(ModelNotFound): checkpointMgr.load(destModelID) # Calling clone when the destination model archive already exists should # raise an exception with self.assertRaises(ModelAlreadyExists): checkpointMgr.clone(modelID, destModelID)
def test_package_serialization(self): items = [ self.request_delivery_factory.factory_item( ware_key=uuid.uuid1().get_hex()[:10], cost=Decimal(450.0 * 30), payment=Decimal(450.0), weight=500, weight_brutto=600, amount=4, comment=u'Комментарий на русском', link=u'http://shop.ru/item/44' ), self.request_delivery_factory.factory_item( ware_key=uuid.uuid1().get_hex()[:10], cost=Decimal(250.0 * 30), payment=Decimal(250.0), weight=500, weight_brutto=600, amount=4, comment=u'Комментарий на русском', link=u'http://shop.ru/item/42' ) ] package = self.request_delivery_factory.factory_package( number=uuid.uuid1().hex[:10], weight=3000, items=items ) self.assertIsInstance(package, PackageRequestObject) tostring(package.to_xml_element(u'Package'), encoding='UTF-8').replace("'", "\"")
def create_version_files (): s ='' s +="#ifndef __TREX_VER_FILE__ \n" s +="#define __TREX_VER_FILE__ \n" s +="#ifdef __cplusplus \n" s +=" extern \"C\" { \n" s +=" #endif \n"; s +='#define VERSION_USER "%s" \n' % os.environ.get('USER', 'unknown') s +='extern const char * get_build_date(void); \n' s +='extern const char * get_build_time(void); \n' s +='#define VERSION_UIID "%s" \n' % uuid.uuid1() s +='#define VERSION_BUILD_NUM "%s" \n' % get_build_num() s +="#ifdef __cplusplus \n" s +=" } \n" s +=" #endif \n"; s +="#endif \n" write_file (H_VER_FILE ,s) s ='#include "version.h" \n' s +='#define VERSION_UIID1 "%s" \n' % uuid.uuid1() s +="const char * get_build_date(void){ \n" s +=" return (__DATE__); \n" s +="} \n" s +=" \n" s +="const char * get_build_time(void){ \n" s +=" return (__TIME__ ); \n" s +="} \n" write_file (C_VER_FILE,s)
def testRemoveAll(self): """ Test removeAll """ checkpointMgr = ModelCheckpointMgr() # Should be empty at first ids = checkpointMgr.getModelIDs() self.assertSequenceEqual(ids, []) # Create some checkpoints using meta info expModelIDs = [uuid.uuid1().hex, uuid.uuid1().hex] expModelIDs.sort() for modelID in expModelIDs: checkpointMgr.define(modelID, definition={'a':1}) ids = checkpointMgr.getModelIDs() self.assertItemsEqual(ids, expModelIDs) # Delete checkpoint store ModelCheckpointMgr.removeAll() ids = checkpointMgr.getModelIDs() self.assertSequenceEqual(ids, [])
def build_plentry_adds(playlist_id, song_ids): """ :param playlist_id :param song_ids """ mutations = [] prev_id, cur_id, next_id = None, str(uuid1()), str(uuid1()) for i, song_id in enumerate(song_ids): m_details = { 'clientId': cur_id, 'creationTimestamp': '-1', 'deleted': False, 'lastModifiedTimestamp': '0', 'playlistId': playlist_id, 'source': 1, 'trackId': song_id, } if song_id.startswith('T'): m_details['source'] = 2 # AA track if i > 0: m_details['precedingEntryId'] = prev_id if i < len(song_ids) - 1: m_details['followingEntryId'] = next_id mutations.append({'create': m_details}) prev_id, cur_id, next_id = cur_id, next_id, str(uuid1()) return mutations
def get_or_create_user(first_name=None, last_name=None, always_new=True): if always_new == False: # if it's false, we will randomly decide whether we want to make a new case or not. if random.random() > CREATE_NEW_PERCENTAGE: # let's do a 25/75 create/existing split print "Returning existing user" return User.objects.all()[random.randrange(0, User.objects.all().count())] else: print "Creating new user" user = User() if first_name == None: user.first_name = uuid.uuid1().hex[0:20] else: user.first_name = first_name if last_name == None: user.last_name = uuid.uuid1().hex[0:20] else: user.last_name = last_name username = "******" % (user.first_name.replace("'", ""), user.last_name.replace("'", "")) user.username = username[0:30].lower() try: exists = User.objects.get(username=user.username) return exists except: user.save() return user
def get_uuid(self): return str(uuid.uuid1())
def __init__(self, parent): super(IdentificationPanel, self).__init__(parent) self.person_group_id = str(uuid.uuid1()) self.person_id_names = {} self.person_name_faces = {} self.faces = {} self.face_ids = [] self.vsizer = wx.BoxSizer(wx.VERTICAL) self.panel = scrolled.ScrolledPanel(self) self.hsizer = wx.BoxSizer() self.hsizer.AddStretchSpacer() self.hvsizer = wx.BoxSizer(wx.VERTICAL) self.hvsizer.SetMinSize((util.INNER_PANEL_WIDTH, -1)) label = ( '1) Place face images of one person in a folder and give ' 'the folder the same name as that person.\n' '2) Repeat the step above one or more times, creating ' 'different folders for different people.\n' '3) Place all of the person folders in one root folder.\n' '4) Click "Load PersonGroup" and select the root folder ' 'you created above.\n' '5) Click "Choose Image" to select a different image ' 'representing one of the people for whom you created ' 'folders above. The face in the image will be framed and ' 'tagged with the name of the person.' ) self.static_text = wx.StaticText(self.panel, label=label) self.static_text.Wrap(util.INNER_PANEL_WIDTH) self.hvsizer.Add(self.static_text, 0, wx.ALL, 0) self.vhsizer = wx.BoxSizer() self.lsizer = wx.BoxSizer(wx.VERTICAL) self.lsizer.SetMinSize((util.MAX_IMAGE_SIZE, -1)) flag = wx.EXPAND | wx.ALIGN_CENTER | wx.ALL self.btn_folder = wx.Button(self.panel, label='Load PersonGroup') self.lsizer.Add(self.btn_folder, 0, flag, 5) self.Bind(wx.EVT_BUTTON, self.OnChooseFolder, self.btn_folder) flag = wx.ALIGN_CENTER | wx.ALL | wx.EXPAND self.grid = base.CaptionWrapFaceList(self.panel) self.lsizer.Add(self.grid, 0, flag, 5) self.vhsizer.Add(self.lsizer, 1, wx.EXPAND) self.vhsizer.AddSpacer(90) self.rsizer = wx.BoxSizer(wx.VERTICAL) self.rsizer.SetMinSize((util.MAX_IMAGE_SIZE, -1)) flag = wx.EXPAND | wx.ALIGN_CENTER | wx.ALL self.btn_file = wx.Button(self.panel, label='Choose Image') self.rsizer.Add(self.btn_file, 0, flag, 5) self.Bind(wx.EVT_BUTTON, self.OnChooseImage, self.btn_file) flag = wx.ALIGN_CENTER | wx.ALL self.bitmap = base.MyStaticBitmap(self.panel) self.rsizer.Add(self.bitmap, 0, flag, 5) self.vhsizer.Add(self.rsizer, 1, wx.EXPAND) self.hvsizer.Add(self.vhsizer) self.hsizer.Add(self.hvsizer) self.hsizer.AddStretchSpacer() self.hsizer.Layout() self.panel.SetSizer(self.hsizer) self.panel.Layout() self.panel.SetupScrolling(scroll_x=False) self.vsizer.Add(self.panel, 3, wx.EXPAND) self.log = base.MyLog(self) self.vsizer.Add(self.log, 1, wx.EXPAND) self.SetSizerAndFit(self.vsizer) self.btn_file.Disable()
def portattack(req): """ 端口爆破 :param req: :return: """ data = json.loads(req.body) result = {"status": True, "msg": "成功", "data": "", "logid": ""} # id id = str(uuid.uuid1()) try: if data["type"] == "create": # 创建端口的爆破任务,存储数据库 # 爆破开始时间 start_time = currenttime() # 爆破状态 status = "running" # 爆破任务类型 type = "ALL" # 扫描进度 progress = "0.00" # 创建主任务数据 PortCrack.objects.create(id=id, start_time=start_time, status=status, type=type, progress=progress) attackObject = AttackObject() # 必须调用setThreads方法,里面有对queue的初始化 attackObject.setThreads(data["threads"]) print attackObject.attack_queue_dict attackObject.pid = id attackObject.usernames = "/Users/margin/PycharmProjects/AnyScan/AnyScanUI/attack/ssh_username.txt" attackObject.passwords = "/Users/margin/PycharmProjects/AnyScan/AnyScanUI/attack/ssh_password.txt" # 实时显示任务的id result["logid"] = id # 要爆破的ip,port attack_dict = data["attack_dict"] attacker = Attacker(attackObject) status = attacker.attack(attack_dict, attack_task_id_dict={}) if status == False: result["status"] == False result["msg"] == "任务添加异常,请查看日志" elif data["type"] == "start": id = data["id"] if id is None or id == "": result = {"status": False, "msg": "任务ID不可为空"} return HttpResponse(json.dumps(result, ensure_ascii=False)) # 判断任务id是否存在 portcrack = PortCrack.objects.get(id=id) if portcrack is None: result = {"status": False, "msg": "您所选的任务ID不存在"} return HttpResponse(json.dumps(result, ensure_ascii=False)) # 如果任务不是暂停状态就在启动任务 if portcrack.status != "pause": result = { "status": False, "msg": "您所选的任务不是【%s】,不能启动" % portcrack.status } return HttpResponse(json.dumps(result, ensure_ascii=False)) # 查询任务信息和子任务信息,组织数据给Attacker.py child_set = portcrack.portcrackchild_set.all() # 组织给Attacker.py的数据 attack_dict: {"ip":[80,3306],"ip2":[22]} attack_dict = {} # 搞一个字典{"ip+port":id},为了能让attacker正确的取出当前任务的id attack_task_id_dict = {} for child in child_set: __ip = attack_dict.get(child.ip) if __ip is None or __ip == "": attack_dict[child.ip] = [child.port] else: attack_dict[child.ip].append(child.port) attack_task_id_dict[child.ip + child.port] = child.id # 更新该任务状态 PortCrack.objects.filter(id=id).update(status="running", end_time=currenttime()) attackObject = AttackObject() # 当前攻击启动的类型 attackObject.type = "start" attackObject.pid = id attacker = Attacker(attackObject) status = attacker.attack(attack_dict, attack_task_id_dict) if status == False: result["status"] == False result["msg"] == "任务启动异常,请查看日志" except Exception: result = { "status": False, "msg": "任务添加异常", "data": traceback.format_exc(), "logid": "" } print traceback.format_exc() return HttpResponse(json.dumps(result, ensure_ascii=False))
def exploit(self): """ exploit :return: """ try: while True: if self.target_queue.empty() is False: try: target = str(self.target_queue.get()) status = False keyword = "" try: code = compile(self.payload, '<string>', 'exec') runtime = {"target": target} exec code in runtime status = runtime.get("status") keyword = runtime.get("keyword") if status is None or keyword is None: status = False keyword = "" except: pass id = str(uuid.uuid1()) poc_chil.objects.create(id=id, pid=self.parent, commond=self.commond, vulnerable=status, host=target, keyword=keyword) # 计算进度 progress = 1 - float( format( float(self.target_queue.qsize()) / float(self.target_queue_old_size), '.4f')) progress = '%.2f' % (progress * 100) # log log = "【%s】正在测试【%s】" % (str(progress) + "%", target) poc_main.objects.filter(id=self.pid, locker="false").update( end_time=currenttime(), status="running", log=log, progress=progress) except: pass print traceback.format_exc() else: #print "空了" # 需要检测的目标为空了,更新主任务数据 poc_main.objects.filter(id=self.pid, locker="false").update( end_time=currenttime(), status="success", log="所有网站均已验证完成", locker="true", progress="100") break except: print traceback.format_exc()
def get(self, resource_group_name, workspace_name, integration_runtime_name, metadata_path=None, custom_headers=None, raw=False, **operation_config): """Get integration runtime object metadata. Get object metadata from an integration runtime. :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: The name of the workspace :type workspace_name: str :param integration_runtime_name: Integration runtime name :type integration_runtime_name: str :param metadata_path: Metadata path. :type metadata_path: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: SsisObjectMetadataListResponse or ClientRawResponse if raw=true :rtype: ~azure.mgmt.synapse.models.SsisObjectMetadataListResponse or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ get_metadata_request = None if metadata_path is not None: get_metadata_request = models.GetSsisObjectMetadataRequest( metadata_path=metadata_path) # Construct URL url = self.get.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query( "self.api_version", self.api_version, 'str', min_length=1) # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header( "self.config.accept_language", self.config.accept_language, 'str') # Construct body if get_metadata_request is not None: body_content = self._serialize.body( get_metadata_request, 'GetSsisObjectMetadataRequest') else: body_content = None # Construct and send request request = self._client.post(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp deserialized = None if response.status_code == 200: deserialized = self._deserialize('SsisObjectMetadataListResponse', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
for s in snapshot_list: if s["name"] == snapshot_name: return s["id"] except Exception as e: print "[Error] the error message is: " + e.message return retval def update_block_snapshot(snapshot, name=None, description=None, host=None): raise NotImplementedError("This function is not implemented.") if __name__ == "__main__": # create 1000 snapshots _, volume_ids = BlockVolume.get_block_volume_ids() for i in range(1, 1001): idx = random.randint(0, len(volume_ids)) print create_block_snapshot("snapshot-" + str(uuid.uuid1()), volume_ids[idx]) # run get_block_snapshot_ids x times length = 100 start = datetime.datetime.now() print start for i in range(length): get_block_snapshot_ids() end = datetime.datetime.now() print end print (end - start ).seconds / (length * 1.0) #host = "10.0.11.233" # update_block_snapshot("xxx") #print create_block_snapshot("snapshot1", "volume1")
def get( self, resource_group_name, server_name, custom_headers=None, raw=False, **operation_config): """Gets a server encryption protector. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param server_name: The name of the server. :type server_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: EncryptionProtector or ClientRawResponse if raw=true :rtype: ~azure.mgmt.sql.models.EncryptionProtector or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ # Construct URL url = self.get.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serverName': self._serialize.url("server_name", server_name, 'str'), 'encryptionProtectorName': self._serialize.url("self.encryption_protector_name", self.encryption_protector_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp deserialized = None if response.status_code == 200: deserialized = self._deserialize('EncryptionProtector', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
def random_uuid(): return uuid.uuid1()
def generate_upgraded_definition(self, resource_group_name, workflow_name, target_schema_version=None, custom_headers=None, raw=False, **operation_config): """Generates the upgraded definition for a workflow. :param resource_group_name: The resource group name. :type resource_group_name: str :param workflow_name: The workflow name. :type workflow_name: str :param target_schema_version: The target schema version. :type target_schema_version: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: object :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ parameters = models.GenerateUpgradedDefinitionParameters( target_schema_version=target_schema_version) # Construct URL url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/workflows/{workflowName}/generateUpgradedDefinition' path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'workflowName': self._serialize.url("workflow_name", workflow_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query( "self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header( "self.config.accept_language", self.config.accept_language, 'str') # Construct body body_content = self._serialize.body( parameters, 'GenerateUpgradedDefinitionParameters') # Construct and send request request = self._client.post(url, query_parameters) response = self._client.send(request, header_parameters, body_content, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp deserialized = None if response.status_code == 200: deserialized = self._deserialize('object', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
resultdata=callrestapi(reqval,reqtype) # loop root reports if 'items' in resultdata: total_items=resultdata['count'] returned_items=len(resultdata['items']) if total_items == 0: print("Note: No items returned.") else: # export each folder and download the package file to the directory for i in range(0,returned_items): id=resultdata['items'][i]["id"] package_name=str(uuid.uuid1()) json_name=resultdata['items'][i]["name"].replace(" ","")+'_'+str(i) json_name=json_name.replace("(","_") json_name=json_name.replace(")","_") command=clidir+'sas-admin transfer export -u /reports/reports/'+id+' --name "'+package_name+'"' print(command) subprocess.call(command, shell=True) reqval='/transfer/packages?filter=eq(name,"'+package_name+'")' package_info=callrestapi(reqval,reqtype) package_id=package_info['items'][0]['id']
def md5_token(salt=None): s = str(uuid.uuid1()) if salt: return md5_salt(s, salt) return md5(s)
def validate(self, resource_group_name, location, workflow_name, workflow, custom_headers=None, raw=False, **operation_config): """Validates the workflow definition. :param resource_group_name: The resource group name. :type resource_group_name: str :param location: The workflow location. :type location: str :param workflow_name: The workflow name. :type workflow_name: str :param workflow: The workflow definition. :type workflow: :class:`Workflow <azure.mgmt.logic.models.Workflow>` :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: None :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true """ # Construct URL url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/locations/{location}/workflows/{workflowName}/validate' path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'location': self._serialize.url("location", location, 'str'), 'workflowName': self._serialize.url("workflow_name", workflow_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query( "self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header( "self.config.accept_language", self.config.accept_language, 'str') # Construct body body_content = self._serialize.body(workflow, 'Workflow') # Construct and send request request = self._client.post(url, query_parameters) response = self._client.send(request, header_parameters, body_content, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response
def get( self, account_name, pipeline_identity, start_date_time=None, end_date_time=None, custom_headers=None, raw=False, **operation_config): """Gets the Pipeline information for the specified pipeline ID. :param account_name: The Azure Data Lake Analytics account to execute job operations on. :type account_name: str :param pipeline_identity: Pipeline ID. :type pipeline_identity: str :param start_date_time: The start date for when to get the pipeline and aggregate its data. The startDateTime and endDateTime can be no more than 30 days apart. :type start_date_time: datetime :param end_date_time: The end date for when to get the pipeline and aggregate its data. The startDateTime and endDateTime can be no more than 30 days apart. :type end_date_time: datetime :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: JobPipelineInformation or ClientRawResponse if raw=true :rtype: ~azure.mgmt.datalake.analytics.job.models.JobPipelineInformation or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ # Construct URL url = '/pipelines/{pipelineIdentity}' path_format_arguments = { 'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True), 'adlaJobDnsSuffix': self._serialize.url("self.config.adla_job_dns_suffix", self.config.adla_job_dns_suffix, 'str', skip_quote=True), 'pipelineIdentity': self._serialize.url("pipeline_identity", pipeline_identity, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} if start_date_time is not None: query_parameters['startDateTime'] = self._serialize.query("start_date_time", start_date_time, 'iso-8601') if end_date_time is not None: query_parameters['endDateTime'] = self._serialize.query("end_date_time", end_date_time, 'iso-8601') query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp deserialized = None if response.status_code == 200: deserialized = self._deserialize('JobPipelineInformation', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
def crc32_token(): return crc32_hash(str(uuid.uuid1()).encode('utf-8'))
def order(self, n0, isBuy, mode, vol): pN = self.preNode cv, uid, unit = self.currentVol, self.uid, self.multiplier ETF = None # 查询匹配的期权当前价格 if isBuy < 0: # 卖出 ETF = self.Int.getETF(code=pN['code']) else: # 新开仓 type = 1 if mode == 0: mode = 1 ETF = self.Int.getETF(sign=np.sign(mode), price=n0['close']) if ETF is None or len(ETF) == 0: # print(pN['code'] if type == 0 else 'new', mode, now) return False price = ETF["ask"].values[0] if isBuy > 0 else ETF["bid"].values[0] fee = vol * self.ratio # 修改状态 self.currentPositionType = np.sign(mode) if isBuy > 0 else pN['pos'] self.currentVol += isBuy * vol doc = { "code": ETF['code'].values[0], "name": ETF['name'].values[0], "createdate": public.getDatetime(), "price": price, "vol": vol, "mode": mode, "isBuy": isBuy, "pos": self.currentPositionType, "ownerPrice": n0['close'], "fee": fee, "amount": -isBuy * price * vol * unit - fee, "uid": uid, "batchid": uuid.uuid1() if (isBuy > 0 or self.preNode is None) else self.preNode['batchid'], "method": self.method } self.Record.insert(doc) # 设置上一个记录 self.preNode = doc if self.currentVol > 0 else None # 交易提示1分钟 s = 0 while 1: self.inform(order=doc) if s > 5: break s += 1 time.sleep(15) return True
def SaveDetected(self, frame, name): img = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) path = os.path.join(self.Location, (name+str(uuid.uuid1())+".png")) Image.fromarray(img).save(path,"PNG" )
def update(self, resource_group_name, hana_instance_name, tags=None, custom_headers=None, raw=False, **operation_config): """Patches the Tags field of a SAP HANA instance. Patches the Tags field of a SAP HANA instance for the specified subscription, resource group, and instance name. :param resource_group_name: Name of the resource group. :type resource_group_name: str :param hana_instance_name: Name of the SAP HANA on Azure instance. :type hana_instance_name: str :param tags: Tags field of the HANA instance. :type tags: dict[str, str] :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: HanaInstance or ClientRawResponse if raw=true :rtype: ~azure.mgmt.hanaonazure.models.HanaInstance or ~msrest.pipeline.ClientRawResponse :raises: :class:`ErrorResponseException<azure.mgmt.hanaonazure.models.ErrorResponseException>` """ tags_parameter = models.Tags(tags=tags) # Construct URL url = self.update.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'hanaInstanceName': self._serialize.url("hana_instance_name", hana_instance_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query( "self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header( "self.config.accept_language", self.config.accept_language, 'str') # Construct body body_content = self._serialize.body(tags_parameter, 'Tags') # Construct and send request request = self._client.patch(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.ErrorResponseException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('HanaInstance', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
def __init__(self): super(BaseModel, self).__init__() self._id = str(uuid.uuid1()) self._related = [] self._meta = {} self.extras = {}
def action(self, path, method, get_vars, post_vars, env=None): """action that handles all the HTTP requests for Auth""" env = env or {} if path.startswith("plugin/"): parts = path.split("/", 2) plugin = self.plugins.get(parts[1]) if plugin: return plugin.handle_request( self, parts[2], request.query, request.json ) else: abort(404) if path.startswith("api/"): data = {} if method == "GET": # Should we use the username? if path == "api/use_username": return {"use_username": self.use_username} if path == "api/config": fields = [dict(name=f.name, type=f.type) for f in self.db.auth_user if f.type in ['string','bool','integer','float'] and f.writable and f.readable] return { "allowed_actions": self.allowed_actions, "plugins": ['local'] + [key for key in self.plugins], "fields": fields, } # Otherwise, we assume the user exists. user = self.get_user(safe=True) if not user: data = self._error("not authorized", 401) if path == "api/profile": return {"user": user} elif method == "POST" and self.db: vars = dict(post_vars) user = self.get_user(safe=False) if path == "api/register": data = self.register(vars, send=True).as_dict() elif path == "api/login": # Prioritize PAM or LDAP logins if enabled if "pam" in self.plugins or "ldap" in self.plugins: plugin_name = "pam" if "pam" in self.plugins else "ldap" username, password = vars.get("email"), vars.get("password") check = self.plugins[plugin_name].check_credentials( username, password ) if check: data = { "username": username, # "email": username + "@localhost", "sso_id": plugin_name + ":" + username, } # and register the user if we have one, just in case if self.db: data = self.get_or_register_user(data) self.session["user"] = {"id": data["id"]} self.session["recent_activity"] = calendar.timegm(time.gmtime()) self.session["uuid"] = str(uuid.uuid1()) else: data = self._error("Invalid Credentials") # Else use normal login else: user, error = self.login(**vars) if user: self.session["user"] = {"id": user.id} self.session["recent_activity"] = calendar.timegm(time.gmtime()) self.session["uuid"] = str(uuid.uuid1()) user = { f.name: user[f.name] for f in self.db.auth_user if f.readable } data = {"user": user} else: data = self._error(error) elif path == "api/request_reset_password": if not self.request_reset_password(**vars): data = self._error("invalid user") elif path == "api/reset_password": if not self.reset_password( vars.get("token"), vars.get("new_password") ): data = self._error("invalid token, request expired") elif user and path == "api/logout": self.session["user"] = None elif user and path == "api/unsubscribe": self.session["user"] = None self.gdpr_unsubscribe(user, send=True) elif user and path == "api/change_password": data = self.change_password( user, vars.get("new_password"), vars.get("old_password") ) elif user and path == "api/change_email": data = self.change_email( user, vars.get("new_email"), vars.get("password") ) elif user and path == "api/profile": data = self.update_profile(user, **vars) else: data = {"status": "error", "message": "undefined"} if not "status" in data and data.get("errors"): data.update(status="error", message="validation errors", code=401) elif "errors" in data and not data["errors"]: del data["errors"] data["status"] = data.get("status", "success") data["code"] = data.get("code", 200) return data elif path == "logout": self.session.clear() # Somehow call revoke for active plugin elif path == "verify_email" and self.db: token = get_vars.get("token") if self.verify_email(token): next = b16d(token.split("/")[1]) redirect(next or URL("auth", "email_verified")) else: redirect(URL("auth", "token_expired")) env["path"] = path return Template("auth.html").transform(env)
def encode_syllables(self, algorithm='maxonset', call_back=None, stop_check=None): """ Encodes syllables to a corpus Parameters ---------- algorithm : str defaults to 'probabilistic' determines which algorithm will be used to encode syllables """ self.reset_syllables(call_back, stop_check) onsets = self.find_onsets() if algorithm == 'probabilistic': onsets = norm_count_dict(onsets, onset=True) codas = self.find_codas() codas = norm_count_dict(codas, onset=False) elif algorithm == 'maxonset': onsets = set(onsets.keys()) else: raise (NotImplementedError) statement = '''MATCH (n:{}:syllabic) return n.label as label'''.format(self.cypher_safe_name) res = self.execute_cypher(statement) syllabics = set(x['label'] for x in res) word_type = getattr(self, self.word_name) phone_type = getattr(word_type, self.phone_name) create_syllabic_csvs(self) create_nonsyllabic_csvs(self) splits = self.speakers process_string = 'Processing speaker {} of {} ({})...' if call_back is not None: call_back(0, len(splits)) for i, s in enumerate(splits): if stop_check is not None and stop_check(): break if call_back is not None: call_back(i) call_back(process_string.format(i, len(splits), s)) q = self.query_graph(word_type) q = q.filter(word_type.speaker.name == s) q = q.order_by(word_type.discourse.name.column_name('discourse')) q = q.order_by(word_type.begin) q = q.columns(word_type.id.column_name('id'), phone_type.id.column_name('phone_id'), phone_type.label.column_name('phones'), phone_type.begin.column_name('begins'), phone_type.end.column_name('ends'), word_type.discourse.name.column_name('discourse')) results = q.all() speaker_boundaries = {s: []} speaker_non_syls = {s: []} prev_id = None cur_discourse = None for w in results: phones = w['phones'] phone_ids = w['phone_id'] phone_begins = w['begins'] phone_ends = w['ends'] discourse = w['discourse'] if discourse != cur_discourse: prev_id = None cur_discourse = discourse vow_inds = [i for i, x in enumerate(phones) if x in syllabics] if len(vow_inds) == 0: cur_id = uuid1() if algorithm == 'probabilistic': split = split_nonsyllabic_prob(phones, onsets, codas) elif algorithm == 'maxonset': split = split_nonsyllabic_maxonset(phones, onsets) label = '.'.join(phones) row = {'id': cur_id, 'prev_id': prev_id, 'onset_id': phone_ids[0], 'break': split, 'coda_id': phone_ids[-1], 'begin': phone_begins[0], 'label': label, 'type_id': make_type_id([label], self.corpus_name), 'end': phone_ends[-1]} speaker_non_syls[s].append(row) prev_id = cur_id continue for j, i in enumerate(vow_inds): cur_id = uuid1() cur_vow_id = phone_ids[i] begin = phone_begins[i] end = phone_ends[i] if j == 0: begin_ind = 0 if i != 0: cur_ons_id = phone_ids[begin_ind] begin = phone_begins[begin_ind] else: cur_ons_id = None else: prev_vowel_ind = vow_inds[j - 1] cons_string = phones[prev_vowel_ind + 1:i] if algorithm == 'probabilistic': split = split_ons_coda_prob(cons_string, onsets, codas) elif algorithm == 'maxonset': split = split_ons_coda_maxonset(cons_string, onsets) if split is None: cur_ons_id = None begin_ind = i else: begin_ind = prev_vowel_ind + 1 + split cur_ons_id = phone_ids[begin_ind] if j == len(vow_inds) - 1: end_ind = len(phones) - 1 if i != len(phones) - 1: cur_coda_id = phone_ids[end_ind] end = phone_ends[end_ind] else: cur_coda_id = None else: foll_vowel_ind = vow_inds[j + 1] cons_string = phones[i + 1:foll_vowel_ind] if algorithm == 'probabilistic': split = split_ons_coda_prob(cons_string, onsets, codas) elif algorithm == 'maxonset': split = split_ons_coda_maxonset(cons_string, onsets) if split is None: cur_coda_id = None end_ind = i else: end_ind = i + split cur_coda_id = phone_ids[end_ind] begin = phone_begins[begin_ind] end = phone_ends[end_ind] label = '.'.join(phones[begin_ind:end_ind + 1]) row = {'id': cur_id, 'prev_id': prev_id, 'vowel_id': cur_vow_id, 'onset_id': cur_ons_id, 'label': label, 'type_id': make_type_id([label], self.corpus_name), 'coda_id': cur_coda_id, 'begin': begin, 'end': end} speaker_boundaries[s].append(row) prev_id = cur_id syllables_data_to_csvs(self, speaker_boundaries) nonsyls_data_to_csvs(self, speaker_non_syls) import_syllable_csv(self, call_back, stop_check) import_nonsyl_csv(self, call_back, stop_check) if stop_check is not None and stop_check(): return if call_back is not None: call_back('Cleaning up...') self.execute_cypher( 'MATCH (n:{}:syllable) where n.prev_id is not Null REMOVE n.prev_id'.format(self.cypher_safe_name)) self.hierarchy.add_annotation_type('syllable', above=self.phone_name, below=self.word_name) self.hierarchy.add_token_labels(self, self.phone_name, ['onset', 'coda', 'nucleus']) self.hierarchy.add_token_properties(self, self.phone_name, [('syllable_position', str)]) self.encode_hierarchy() if call_back is not None: call_back('Finished!') call_back(1, 1)
def get_keys(self, resource_group_name, account_name, custom_headers=None, raw=False, **operation_config): """Gets the account keys for the specified Batch account. This operation applies only to Batch accounts created with a poolAllocationMode of 'BatchService'. If the Batch account was created with a poolAllocationMode of 'UserSubscription', clients cannot use access to keys to authenticate, and must use Azure Active Directory instead. In this case, getting the keys will fail. :param resource_group_name: The name of the resource group that contains the Batch account. :type resource_group_name: str :param account_name: The name of the Batch account. :type account_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: BatchAccountKeys or ClientRawResponse if raw=true :rtype: ~azure.mgmt.batch.models.BatchAccountKeys or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ # Construct URL url = self.get_keys.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3, pattern=r'^[-\w\._]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query( "self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header( "self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.post(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp deserialized = None if response.status_code == 200: deserialized = self._deserialize('BatchAccountKeys', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
def _create_initial(self, resource_group_name, hana_instance_name, hana_instance_parameter, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.create.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'hanaInstanceName': self._serialize.url("hana_instance_name", hana_instance_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query( "self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header( "self.config.accept_language", self.config.accept_language, 'str') # Construct body body_content = self._serialize.body(hana_instance_parameter, 'HanaInstance') # Construct and send request request = self._client.put(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 201]: raise models.ErrorResponseException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('HanaInstance', response) if response.status_code == 201: deserialized = self._deserialize('HanaInstance', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
def synchronize_auto_storage_keys(self, resource_group_name, account_name, custom_headers=None, raw=False, **operation_config): """Synchronizes access keys for the auto-storage account configured for the specified Batch account. :param resource_group_name: The name of the resource group that contains the Batch account. :type resource_group_name: str :param account_name: The name of the Batch account. :type account_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: None or ClientRawResponse if raw=true :rtype: None or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ # Construct URL url = self.synchronize_auto_storage_keys.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3, pattern=r'^[-\w\._]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query( "self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header( "self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.post(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [204]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response
def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = self.list_by_apis.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url( "service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'), 'apiId': self._serialize.url("api_id", api_id, 'str', max_length=80, min_length=1, pattern=r'^[^*#&+:<>?]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} if filter is not None: query_parameters['$filter'] = self._serialize.query( "filter", filter, 'str') if top is not None: query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1) if skip is not None: query_parameters['$skip'] = self._serialize.query( "skip", skip, 'int', minimum=0) query_parameters['api-version'] = self._serialize.query( "self.api_version", self.api_version, 'str') else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header( "self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.ErrorResponseException(self._deserialize, response) return response
def update(self, resource_group_name, account_name, tags=None, auto_storage=None, custom_headers=None, raw=False, **operation_config): """Updates the properties of an existing Batch account. :param resource_group_name: The name of the resource group that contains the Batch account. :type resource_group_name: str :param account_name: The name of the Batch account. :type account_name: str :param tags: The user-specified tags associated with the account. :type tags: dict[str, str] :param auto_storage: The properties related to the auto-storage account. :type auto_storage: ~azure.mgmt.batch.models.AutoStorageBaseProperties :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: BatchAccount or ClientRawResponse if raw=true :rtype: ~azure.mgmt.batch.models.BatchAccount or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ parameters = models.BatchAccountUpdateParameters( tags=tags, auto_storage=auto_storage) # Construct URL url = self.update.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3, pattern=r'^[-\w\._]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query( "self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header( "self.config.accept_language", self.config.accept_language, 'str') # Construct body body_content = self._serialize.body(parameters, 'BatchAccountUpdateParameters') # Construct and send request request = self._client.patch(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp deserialized = None if response.status_code == 200: deserialized = self._deserialize('BatchAccount', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
def regenerate_key(self, resource_group_name, account_name, key_name, custom_headers=None, raw=False, **operation_config): """Regenerates the specified account key for the Batch account. :param resource_group_name: The name of the resource group that contains the Batch account. :type resource_group_name: str :param account_name: The name of the Batch account. :type account_name: str :param key_name: The type of account key to regenerate. Possible values include: 'Primary', 'Secondary' :type key_name: str or ~azure.mgmt.batch.models.AccountKeyType :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: BatchAccountKeys or ClientRawResponse if raw=true :rtype: ~azure.mgmt.batch.models.BatchAccountKeys or ~msrest.pipeline.ClientRawResponse :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ parameters = models.BatchAccountRegenerateKeyParameters( key_name=key_name) # Construct URL url = self.regenerate_key.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3, pattern=r'^[-\w\._]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query( "self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header( "self.config.accept_language", self.config.accept_language, 'str') # Construct body body_content = self._serialize.body( parameters, 'BatchAccountRegenerateKeyParameters') # Construct and send request request = self._client.post(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp deserialized = None if response.status_code == 200: deserialized = self._deserialize('BatchAccountKeys', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
"""setup.py file.""" import uuid from setuptools import setup, find_packages from pip.req import parse_requirements __author__ = 'David Barroso <*****@*****.**>' install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1()) reqs = [str(ir.req) for ir in install_reqs] setup( name="napalm-junos", version="0.12.0", packages=find_packages(), author="David Barroso, Mircea Ulinic", author_email="[email protected], [email protected]", description= "Network Automation and Programmability Abstraction Layer with Multivendor support", classifiers=[ 'Topic :: Utilities', 'Programming Language :: Python', 'Operating System :: POSIX :: Linux', 'Operating System :: MacOS', ], url="https://github.com/napalm-automation/napalm-junos", include_package_data=True, install_requires=reqs, )
def _create_initial(self, resource_group_name, account_name, parameters, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.create.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3, pattern=r'^[-\w\._]+$'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query( "self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header( "self.config.accept_language", self.config.accept_language, 'str') # Construct body body_content = self._serialize.body(parameters, 'BatchAccountCreateParameters') # Construct and send request request = self._client.put(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 202]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp deserialized = None header_dict = {} if response.status_code == 200: deserialized = self._deserialize('BatchAccount', response) header_dict = { 'Location': 'str', 'Retry-After': 'int', } if raw: client_raw_response = ClientRawResponse(deserialized, response) client_raw_response.add_headers(header_dict) return client_raw_response return deserialized
def unique(): temp = uuid.uuid1() return temp.hex
def simple_plot(resource, variable='air', lat='lat', lon='lon', timestep=0, output=None): """ Generates a nice and simple plot. """ print("Plotting {}, timestep {} ...".format(resource, timestep)) pl_data = Dataset(resource) pl_val = pl_data.variables[variable][timestep, :, :] pl_lat = pl_data.variables[lat][:] pl_lon = pl_data.variables[lon][:] fig = plt.figure() fig.set_size_inches(18.5, 10.5, forward=True) ax = plt.axes(projection=ccrs.PlateCarree()) ax.coastlines(linewidth=0.8) ax.gridlines() vmin = np.min(pl_val) vmax = np.max(pl_val) levels = np.linspace(vmin, vmax, 30) cmap = get_cmap("RdBu_r") data_map = ax.contourf(pl_lon, pl_lat, pl_val, levels=levels, extend='both', cmap=cmap, projection=ccrs.PlateCarree()) data_cbar = plt.colorbar(data_map, extend='both', shrink=0.6) data_cont = ax.contour(pl_lon, pl_lat, pl_val, levels=levels, linewidths=0.5, colors="white", linestyles='dashed', projection=ccrs.PlateCarree()) plt.clabel(data_cont, inline=1, fmt='%1.0f') title = 'Simple plot for %s' % (variable) plt.title(title) plt.tight_layout() if not output: output = 'myplot_%s.png' % (uuid.uuid1()) plt.savefig(output) fig.clf() plt.close(fig) print("Plot written to {}".format(output)) return output