def generate_valid_registration_data() -> list: return [ *[generate_random_string(6, 'letters') for _ in range(6)], generate_random_string(6, 'digits'), generate_random_email(6, 8, 'mix', 'mix'), *[*[generate_random_string(8, 'mix')] * 2] ]
def context_api(mock_crud, context_tags): context = context_tags label = generate_random_string() url = generate_random_string() description = ' '.join([generate_random_string() for _ in range(5)]) tags = context.get('tags') api_id = CRUD.addApi(label, url, description=description, tags=tags, commit=False) context['api_id'] = api_id return context
def __init__(self, log, seed): self.pool_name = utils.generate_random_string(prefix="pool") self.wallet_name = utils.generate_random_string(prefix='wallet') self.pool_handle = self.wallet_handle = 0 self.log = log self.seed = seed self.config = utils.parse_config() self.threads = list() self.passed_req = self.failed_req = 0 self.start_time = self.finish_time = 0 self.fastest_txn = self.lowest_txn = -1
def create_initial_workteam(kfp_client, experiment_id, region, sagemaker_client, test_file_dir, download_dir): test_params = utils.load_params( utils.replace_placeholders( os.path.join(test_file_dir, "config.yaml"), os.path.join(download_dir, "config.yaml"), )) test_params["Arguments"]["team_name"] = workteam_name = ( utils.generate_random_string(5) + "-" + test_params["Arguments"]["team_name"]) # First create a workteam using a separate pipeline and get the name, arn of the workteam created. create_workteamjob( kfp_client, test_params, experiment_id, region, sagemaker_client, download_dir, ) workteam_arn = sagemaker_utils.get_workteam_arn(sagemaker_client, workteam_name) return workteam_name, workteam_arn
def create_workteamjob( kfp_client, experiment_id, region, sagemaker_client, test_file_dir, download_dir ): test_params = utils.load_params( utils.replace_placeholders( os.path.join(test_file_dir, "config.yaml"), os.path.join(download_dir, "config.yaml"), ) ) # Get the account, region specific user_pool and client_id for the SageMaker Workforce. ( test_params["Arguments"]["user_pool"], test_params["Arguments"]["client_id"], test_params["Arguments"]["user_groups"], ) = sagemaker_utils.get_cognito_member_definitions(sagemaker_client) # Generate random prefix for workteam_name to avoid errors if resources with same name exists test_params["Arguments"]["team_name"] = workteam_name = ( utils.generate_random_string(5) + "-" + test_params["Arguments"]["team_name"] ) _, _, workflow_json = kfp_client_utils.compile_run_monitor_pipeline( kfp_client, experiment_id, test_params["PipelineDefinition"], test_params["Arguments"], download_dir, test_params["TestName"], test_params["Timeout"], ) return workteam_name, workflow_json
def test_terminate_trainingjob(kfp_client, experiment_id, region, sagemaker_client): test_file_dir = "resources/config/simple-mnist-training" download_dir = utils.mkdir( os.path.join(test_file_dir + "/generated_test_terminate")) test_params = utils.load_params( utils.replace_placeholders( os.path.join(test_file_dir, "config.yaml"), os.path.join(download_dir, "config.yaml"), )) input_job_name = test_params["Arguments"]["job_name"] = ( utils.generate_random_string(4) + "-terminate-job") run_id, _, workflow_json = kfp_client_utils.compile_run_monitor_pipeline( kfp_client, experiment_id, test_params["PipelineDefinition"], test_params["Arguments"], download_dir, test_params["TestName"], 60, "running", ) print( f"Terminating run: {run_id} where Training job_name: {input_job_name}") kfp_client_utils.terminate_run(kfp_client, run_id) response = sagemaker_utils.describe_training_job(sagemaker_client, input_job_name) assert response["TrainingJobStatus"] in ["Stopping", "Stopped"] utils.remove_dir(download_dir)
def get(self, exploration_id): """Populates the data on the individual exploration page.""" version = self.request.get('v') version = int(version) if version else None try: exploration = exp_services.get_exploration_by_id( exploration_id, version=version) except Exception as e: raise self.PageNotFoundException(e) info_card_color = ( feconf.CATEGORIES_TO_COLORS[exploration.category] if exploration.category in feconf.CATEGORIES_TO_COLORS else feconf.DEFAULT_COLOR) self.values.update({ 'can_edit': ( self.user_id and rights_manager.Actor(self.user_id).can_edit(exploration_id)), 'exploration': exploration.to_player_dict(), 'info_card_image_url': ( '/images/gallery/exploration_background_%s_large.png' % info_card_color), 'is_logged_in': bool(self.user_id), 'session_id': utils.generate_random_string(24), 'version': exploration.version, }) self.render_json(self.values)
def create(cls, name, phone, email, company_name, company_url, token=None, key=None): global session if not token: token = utils.get_unique_string() pass if not key: key = utils.generate_random_string(24) pass user = cls() user.name = name user.phone = phone user.email = email user.company_name = company_name user.company_url = company_url user.token = token user.key = key user.status = constants.StatusEnum.valid.value # session.add(user) session.flush() new_user = session.query(cls).filter_by(token=token).one() logger.warn('create user. user.phone: %s, user.id: %s, user.key: %s, user.token: %s', new_user.phone, new_user.id, user.key, user.token) return new_user
def get(self, exploration_id): """Populates the data on the individual exploration page.""" version = self.request.get('v') version = int(version) if version else None try: exploration = exp_services.get_exploration_by_id( exploration_id, version=version) except Exception as e: raise self.PageNotFoundException(e) info_card_color = ( feconf.CATEGORIES_TO_COLORS[exploration.category] if exploration.category in feconf.CATEGORIES_TO_COLORS else feconf.DEFAULT_COLOR) self.values.update({ 'can_edit': ( self.user_id and rights_manager.Actor(self.user_id).can_edit( rights_manager.ACTIVITY_TYPE_EXPLORATION, exploration_id)), 'exploration': exploration.to_player_dict(), 'info_card_image_url': ( '/images/gallery/exploration_background_%s_large.png' % info_card_color), 'is_logged_in': bool(self.user_id), 'session_id': utils.generate_random_string(24), 'version': exploration.version, }) self.render_json(self.values)
def test_defaults(self): """Test the create_default_story and create_default_story_node method of class Story. """ topic_id = utils.generate_random_string(12) story = story_domain.Story.create_default_story( self.STORY_ID, 'Title', topic_id) expected_story_dict = { 'id': self.STORY_ID, 'title': 'Title', 'description': feconf.DEFAULT_STORY_DESCRIPTION, 'notes': feconf.DEFAULT_STORY_NOTES, 'story_contents': { 'nodes': [], 'initial_node_id': None, 'next_node_id': self.NODE_ID_1 }, 'story_contents_schema_version': (feconf.CURRENT_STORY_CONTENTS_SCHEMA_VERSION), 'language_code': constants.DEFAULT_LANGUAGE_CODE, 'corresponding_topic_id': topic_id, 'version': 0 } self.assertEqual(story.to_dict(), expected_story_dict)
async def build_schema_req(args: dict): """ Build ADD schema request. :param args: arguments for building ADD schema request. :return: schema request, request info. """ submitter_did = args['submitter_did'] try: data = { 'name': utils.generate_random_string(prefix='test'), 'version': '1.0', 'attr_names': ['test'] } utils.print_header("\n======= Build schema request =======") schema_req = await ledger.build_schema_request(submitter_did, json.dumps(data)) del data['attr_names'] data['dest'] = submitter_did req_info = json.dumps({'kind': 'schema', 'data': data}) req = json.dumps({'request': schema_req}) return req, req_info except Exception as e: utils.force_print_error_to_console( "Cannot build schema request. Skip building...") utils.force_print_error_to_console(str(e)) return ""
def setUp(self): super(StoryRightsChangeTests, self).setUp() self.STORY_ID = story_services.get_new_story_id() self.TOPIC_ID = utils.generate_random_string(12) self.story = self.save_new_story(self.STORY_ID, 'user_id', 'Title', 'Description', 'Notes', self.TOPIC_ID) self.signup('*****@*****.**', 'user')
def test_transform_job( kfp_client, experiment_id, s3_client, sagemaker_client, s3_data_bucket, test_file_dir, ): download_dir = utils.mkdir(os.path.join(test_file_dir + "/generated")) test_params = utils.load_params( utils.replace_placeholders( os.path.join(test_file_dir, "config.yaml"), os.path.join(download_dir, "config.yaml"), )) # Generate random prefix for model, job name to avoid errors if resources with same name exists test_params["Arguments"]["model_name"] = test_params["Arguments"][ "job_name"] = input_job_name = (utils.generate_random_string(5) + "-" + test_params["Arguments"]["model_name"]) print(f"running test with model/job name: {input_job_name}") # Generate unique location for output since output filename is generated according to the content_type test_params["Arguments"]["output_location"] = os.path.join( test_params["Arguments"]["output_location"], input_job_name) _, _, workflow_json = kfp_client_utils.compile_run_monitor_pipeline( kfp_client, experiment_id, test_params["PipelineDefinition"], test_params["Arguments"], download_dir, test_params["TestName"], test_params["Timeout"], ) outputs = {"sagemaker-batch-transformation": ["output_location"]} output_files = minio_utils.artifact_download_iterator( workflow_json, outputs, download_dir) # Verify Job was successful on SageMaker response = sagemaker_utils.describe_transform_job(sagemaker_client, input_job_name) assert response["TransformJobStatus"] == "Completed" assert response["TransformJobName"] == input_job_name # Verify output location from pipeline matches job output and that the transformed file exists output_location = utils.read_from_file_in_tar( output_files["sagemaker-batch-transformation"]["output_location"]) print(f"output location: {output_location}") assert output_location == response["TransformOutput"]["S3OutputPath"] # Get relative path of file in S3 bucket # URI is following format s3://<bucket_name>/relative/path/to/file # split below is to extract the part after bucket name file_key = os.path.join("/".join(output_location.split("/")[3:]), test_params["ExpectedOutputFile"]) assert s3_utils.check_object_exists(s3_client, s3_data_bucket, file_key) utils.remove_dir(download_dir)
def _get(self): importStr = self.request.get("class") times = self.get_n() prop_values = {} data = self.request.GET.dict_of_lists() for k, l in data.iteritems(): if k.startswith("prop_"): v = l[0] prop = k[5:] prop_values[prop] = v cls = class_import(importStr) types = [ (ndb.IntegerProperty, lambda: choice([i for i in range(0, 9)])), (ndb.StringProperty, lambda: utils.generate_random_string(8)), (ndb.BooleanProperty, lambda: choice([True, False])), (ndb.DateTimeProperty, lambda: datetime.datetime.now()), ] for unused_i in range(times): model = cls() for prop, val in model._properties.items(): for t in types: if isinstance(val, t[0]): setattr(model, prop, t[1]()) break if prop in prop_values: setattr(model, prop, prop_values[prop]) model.put(force_validation=False) self.response.out.write("OK")
def send_request(BASE_URL): opener = urllib2.build_opener() user_id = random.randint(0, 12) if user_id: opener.addheaders.append(('Cookie', 'id=%s' % user_id)) path_length = random.randint(0, 20) path = generate_random_string(path_length) opener.open(urljoin(BASE_URL, path))
def create_login_token(cls, login, password): person = cls.get_by_login_and_password(login, password) if person: token = utils.generate_random_string(48) memcache.set(cls._get_memcache_key_for_token(token), str(person.key()), time=60 * 60 * 8) return token
def context_endpoint(mock_crud, context_api): context = context_api api_id = context.get('api_id') label = generate_random_string() url = generate_random_string() description = ' '.join([generate_random_string() for _ in range(5)]) tags = context.get('tags') id = CRUD.addEndpoint( api_id, label, url, description, tags=tags, commit=False ) context['endpoint_id'] = id return context
def test_create_endpoint(kfp_client, experiment_id, boto3_session, sagemaker_client, test_file_dir): download_dir = utils.mkdir(os.path.join(test_file_dir + "/generated")) test_params = utils.load_params( utils.replace_placeholders( os.path.join(test_file_dir, "config.yaml"), os.path.join(download_dir, "config.yaml"), )) # Generate random prefix for model, endpoint config and endpoint name # to avoid errors if resources with same name exists test_params["Arguments"]["model_name"] = test_params["Arguments"][ "endpoint_config_name"] = test_params["Arguments"][ "endpoint_name"] = input_endpoint_name = ( utils.generate_random_string(5) + "-" + test_params["Arguments"]["model_name"]) print(f"running test with model/endpoint name: {input_endpoint_name}") _, _, workflow_json = kfp_client_utils.compile_run_monitor_pipeline( kfp_client, experiment_id, test_params["PipelineDefinition"], test_params["Arguments"], download_dir, test_params["TestName"], test_params["Timeout"], ) try: outputs = {"sagemaker-deploy-model": ["endpoint_name"]} output_files = minio_utils.artifact_download_iterator( workflow_json, outputs, download_dir) output_endpoint_name = utils.read_from_file_in_tar( output_files["sagemaker-deploy-model"]["endpoint_name"], "endpoint_name.txt") print(f"endpoint name: {output_endpoint_name}") # Verify output from pipeline is endpoint name assert output_endpoint_name == input_endpoint_name # Verify endpoint is running assert (sagemaker_utils.describe_endpoint( sagemaker_client, input_endpoint_name)["EndpointStatus"] == "InService") # Validate the model for use by running a prediction result = run_predict_mnist(boto3_session, input_endpoint_name, download_dir) print(f"prediction result: {result}") assert json.dumps(result, sort_keys=True) == json.dumps( test_params["ExpectedPrediction"], sort_keys=True) utils.remove_dir(download_dir) finally: # delete endpoint sagemaker_utils.delete_endpoint(sagemaker_client, input_endpoint_name)
def _get(self): parent = int(self.request.get("parent")) master = forum.ThreadModel.get_by_id(parent) for i in range(0, self.get_n()): forum.ReplyModel(parent=master.key, content=utils.generate_random_string(50), author=user.UserModel.query().get().key) \ .put(master=master, pagination=master.default_pagination()) self.response.out.write("OK")
def qr_code_page(request, r=None): auth_code = generate_random_string(50) auth_code_hash = salted_hash(auth_code) r.setex("".join(["qrauth_", auth_code_hash]), AUTH_QR_CODE_EXPIRATION_TIME, request.user.id) return render_to_response("qrauth/page.html", {"auth_code": auth_code}, context_instance=RequestContext(request))
def default_configs(): return [ config.StringValue("site_name", "Name", True), config.StringValue("session_secret_key", utils.generate_random_string(30), False), config.StringValue("admin_email", "*****@*****.**", True), config.BooleanValue("user_email_confirm", False, True), config.IntegerValue("blog_comments_per_page", 20, True), config.IntegerValue("forum_replies_per_page", 20, True), ]
def setUp(self): super(StoryDomainUnitTests, self).setUp() self.STORY_ID = story_services.get_new_story_id() self.TOPIC_ID = utils.generate_random_string(12) self.story = self.save_new_story(self.STORY_ID, self.USER_ID, 'Title', 'Description', 'Notes', self.TOPIC_ID) self.story.add_node(self.NODE_ID_1, 'Node title') self.signup('*****@*****.**', 'user') self.signup('*****@*****.**', 'user1')
def context_field(mock_crud, context_endpoint): context = context_endpoint endpoint_id = context.get('endpoint_id') label = generate_random_string() type_field = generate_random_string() description = ' '.join([generate_random_string() for _ in range(5)]) default = generate_random_string() required = random.choice([True, False]) id = CRUD.addEndpointField( endpoint_id, label, type_field, description=description, default=default, required=required, commit=False ) context['field_id'] = id return context
def test_workteamjob( kfp_client, experiment_id, region, sagemaker_client, test_file_dir ): download_dir = utils.mkdir(os.path.join(test_file_dir + "/generated")) test_params = utils.load_params( utils.replace_placeholders( os.path.join(test_file_dir, "config.yaml"), os.path.join(download_dir, "config.yaml"), ) ) # Generate random prefix for workteam_name to avoid errors if resources with same name exists test_params["Arguments"]["team_name"] = workteam_name = ( utils.generate_random_string(5) + "-" + test_params["Arguments"]["team_name"] ) try: workflow_json = create_workteamjob( kfp_client, test_params, experiment_id, region, sagemaker_client, download_dir, ) outputs = {"sagemaker-private-workforce": ["workteam_arn"]} output_files = minio_utils.artifact_download_iterator( workflow_json, outputs, download_dir ) response = sagemaker_utils.describe_workteam(sagemaker_client, workteam_name) # Verify WorkTeam was created in SageMaker assert response["Workteam"]["CreateDate"] is not None assert response["Workteam"]["WorkteamName"] == workteam_name # Verify WorkTeam arn artifact was created in Minio and matches the one in SageMaker workteam_arn = utils.read_from_file_in_tar( output_files["sagemaker-private-workforce"]["workteam_arn"] ) assert response["Workteam"]["WorkteamArn"] == workteam_arn finally: workteams = sagemaker_utils.list_workteams(sagemaker_client)["Workteams"] workteam_names = list(map((lambda x: x["WorkteamName"]), workteams)) # Check workteam was successfully created if workteam_name in workteam_names: sagemaker_utils.delete_workteam(sagemaker_client, workteam_name) # Delete generated files only if the test is successful utils.remove_dir(download_dir)
def test_edit_endpoint_by_id(mock_crud, context_endpoint): print('\n=> Testing editing endpoint by id') endpoint_id = context_endpoint.get('endpoint_id') label = generate_random_string() url = generate_random_string() description = ' '.join([generate_random_string() for _ in range(5)]) tags = ' '.join([generate_random_string() for _ in range(5)]) code = CRUD.editEndpoint( endpoint_id, label=label, url=url, description=description, tags=tags ) assert code>0 endpoint = CRUD.getEndpoint(endpoint_id) assert endpoint.label == label assert endpoint.url == url assert endpoint.description == description assert set(tags.split()) == set([t.text for t in endpoint.tags])
def after_model_change(self, form, model, is_created): if is_created: model.roles.append(Role.query.filter_by(name='basic').first()) _password = generate_random_string() model.password = hash_password(_password) model.active = False model.generate_verification_code() # todo send creation email # todo send verification email db.session.add(model) db.session.commit()
def test_update_user(self): # get existing User user = user_api.read_all()[0] # update username user['username'] = utils.generate_random_string(10) # do not update contact details del user['contact_details'] data, status = user_api.update(user.get('user_id'), user) self.assertEqual(status, 200)
def test_edit_api_by_id(mock_crud, context_api): print('\n=> Testing editing api by id') id = context_api.get('api_id') label = generate_random_string() url = generate_random_string() description = ' '.join([generate_random_string() for _ in range(5)]) tags = ' '.join([generate_random_string() for _ in range(5)]) code = CRUD.editApi( id, label=label, url=url, description=description, tags=tags ) assert code>0 api = CRUD.getApi(id) assert api.label == label assert api.url == url assert api.description == description assert set(tags.split()) == set([t.text for t in api.tags])
async def build_several_getting_req_to_files(self, args, req_kind, number_of_file, data_files: list): """ Build several ADD request and write them to list of temporary files. :param args: contain all necessary arguments to build a request (pool_handle, wallet_handle, submitter_did) :param req_kind: kind of ADD request (schema, nym, attribute, claim). :param number_of_file: number of temporary file you want to store requests. Number of request will be divided equally among temp files. :param data_files: list file that store request information. :return: """ utils.print_header("\n\tBuilding several get {} requests..." .format(req_kind)) if not self.log: utils.start_capture_console() req_builder = RequestBuilder.get_getting_req_builder(req_kind) files = list() lst_opened_files = list() file_iter = 0 for data_file_path in data_files: with open(data_file_path, 'r') as data_file: for line in data_file: if str(line) == '\n': continue req = await req_builder(args, json.dumps(line)) if file_iter >= number_of_file: file_iter = 0 if file_iter >= len(lst_opened_files): file_name = utils.generate_random_string( suffix='_{}.txt'.format(str(time.time()))) temp_file = open(file_name, 'w') lst_opened_files.append(temp_file) files.append(file_name) print(req, file=lst_opened_files[file_iter]) file_iter += 1 for file in lst_opened_files: file.close() if not self.log: utils.stop_capture_console() utils.print_header("\n\tBuilding request complete") return files
def test_detokenize(self): secret = generate_random_string(101) resp = self._create_new_token(secret) token = resp.findtext('token') resp = self._unwrap_token(token + 'xxx') self.assertEqual('error', resp.findtext('status')) self.assertEqual('token_not_found', resp.findtext('status_code')) resp = self._unwrap_token(token) self.assertEqual('success', resp.findtext('status')) self.assertEqual(secret, resp.findtext('plain_text')) self.assertTrue('mydomain', resp.findtext('domain')) self.assertTrue(19, len(resp.findtext('expire_ts'))) self.assertTrue('someone@somedomain', resp.findtext('notify_email'))
def qr_code_page(request, r=None): auth_code = generate_random_string(50) auth_code_hash = salted_hash(auth_code) r.setex( "".join(["qrauth_", auth_code_hash]), AUTH_QR_CODE_EXPIRATION_TIME, request.user.id ) return render_to_response("qrauth/page.html", {"auth_code": auth_code}, context_instance=RequestContext(request))
def test_corresponding_topic_id_validation(self): # Generating valid topic id of type str. valid_topic_id = utils.generate_random_string(12) self.assertTrue(isinstance(valid_topic_id, python_utils.BASESTRING)) self.story.corresponding_topic_id = valid_topic_id self.story.validate() # Setting invalid topic id type. invalid_topic_id = 123 self.story.corresponding_topic_id = invalid_topic_id self._assert_validation_error( 'Expected corresponding_topic_id should be a string, received: %s' % (invalid_topic_id))
def test_no_auth(self): secret = generate_random_string(333) resp = self._create_new_token(secret, {'user': '******'}) self.assertEqual('error', resp.findtext('status')) self.assertEqual('auth_error', resp.findtext('status_code')) resp = self._create_new_token(secret, {'user': '******'}) self.assertEqual('error', resp.findtext('status')) self.assertEqual('auth_error', resp.findtext('status_code')) resp = self._create_new_token(secret, {'passwd': 'asdfg'}) self.assertEqual('error', resp.findtext('status')) self.assertEqual('auth_error', resp.findtext('status_code')) resp = self._create_new_token(secret, {'domain': 'oebs'}) self.assertEqual('error', resp.findtext('status')) self.assertEqual('auth_error', resp.findtext('status_code'))
def get(self, exploration_id): """Populates the data on the individual exploration page.""" # TODO(sll): Maybe this should send a complete state machine to the # frontend, and all interaction would happen client-side? version = self.request.get('v') version = int(version) if version else None try: exploration = exp_services.get_exploration_by_id(exploration_id, version=version) except Exception as e: raise self.PageNotFoundException(e) init_params = exploration.get_init_params() reader_params = exploration.update_with_state_params( exploration.init_state_name, init_params) init_state = exploration.init_state interactive_widget = widget_registry.Registry.get_widget_by_id( feconf.INTERACTIVE_PREFIX, init_state.widget.widget_id) interactive_html = interactive_widget.get_interactive_widget_tag( init_state.widget.customization_args, reader_params) session_id = utils.generate_random_string(24) self.values.update({ 'is_logged_in': bool(self.user_id), 'init_html': init_state.content[0].to_html(reader_params), 'interactive_html': interactive_html, 'params': reader_params, 'state_history': [exploration.init_state_name], 'state_name': exploration.init_state_name, 'title': exploration.title, 'session_id': session_id, }) self.render_json(self.values) event_services.StateHitEventHandler.record(exploration_id, exploration.init_state_name, True) event_services.StartExplorationEventHandler.record( exploration_id, version, exploration.init_state_name, session_id, reader_params, feconf.PLAY_TYPE_NORMAL)
async def build_claim_req(args: dict): """ Build ADD claim request. :param args: arguments to build ADD claim request. :return: claim request, request info. """ import string import random pool_handle = args['pool_handle'] wallet_handle = args['wallet_handle'] submitter_did = args['submitter_did'] try: utils.print_header("\n======= Create did =======") did, verkey = await signus.create_and_store_my_did(wallet_handle, '{}') utils.print_header("\n======= Build nym request =======") nym_req = await ledger.build_nym_request(submitter_did, did, verkey, None, None) utils.print_header("\n======= Send nym request =======") await ledger.sign_and_submit_request(pool_handle, wallet_handle, submitter_did, nym_req) seq_no = random.randint(1, 1000000) signature_type = 'CL' data = {"primary": { "n": utils.generate_random_string(characters=string.digits), "s": utils.generate_random_string(characters=string.digits), "rms": utils.generate_random_string(characters=string.digits), "r": {"name": utils.generate_random_string( characters=string.digits)}, "rctxt": utils.generate_random_string( characters=string.digits), "z": utils.generate_random_string(characters=string.digits)}} utils.print_header("\n======= Build claim request =======") claim_req = await ledger.build_claim_def_txn(did, seq_no, signature_type, json.dumps(data)) req_info = json.dumps({'kind': 'claim', 'data': {'issuer_did': did, 'seq_no': seq_no, 'signature_type': signature_type}}) req = json.dumps({'request': claim_req, 'submitter_did': did}) return req, req_info except Exception as e: utils.force_print_error_to_console( "Cannot build claim request. Skip building...") utils.force_print_error_to_console(str(e)) return ""
def test_edit_field_by_id(mock_crud, context_field): print('\n=> Testing editing field by id') field_id = context_field.get('field_id') label = generate_random_string() type_field = generate_random_string() description = ' '.join([generate_random_string() for _ in range(5)]) default = generate_random_string() required = random.choice([True, False]) code = CRUD.editField( field_id, label=label, type_field=type_field, description=description, default=default, required=required, commit=False ) assert code>0 field = CRUD.getField(field_id) assert field.label == label assert field.type == type_field assert field.description == description assert field.default == default assert field.required == required
def test_cinder_volume_created(host): """Verify cinder volume can be created""" # Create a test volume random_str = utils.generate_random_string(4) volume_name = "test_volume_{}".format(random_str) cmd = "{} openstack volume create --size 1 --availability-zone nova {}'".format( utility_container, volume_name) host.run_expect([0], cmd) # Verify the volume is created assert volume_name in utils.openstack_name_list('volume', host) # Tear down utils.delete_volume(volume_name, host)
async def build_several_adding_req_to_files(self, args: dict, req_kind, number_of_file, number_of_req): """ Build several ADD request and write them to list of temporary files. :param args: contain all necessary arguments to build a request (pool_handle, wallet_handle, submitter_did) :param req_kind: kind of ADD request (schema, nym, attribute, claim). :param number_of_file: number of temporary file you want to store requests. Number of request will be divided equally among temp files. :param number_of_req: total of requests you want to build. :return: list of temporary file name. """ utils.print_header("\n\tBuilding several {} requests..." .format(req_kind)) if not self.log: utils.start_capture_console() works = RequestBuilder.divide(number_of_file, number_of_req) req_builder = RequestBuilder.get_adding_req_builder(req_kind) files = list() print(self.req_info_file_path) req_info_file = open(self.req_info_file_path, "w") for work in works: file_name = utils.generate_random_string( suffix='_{}.txt'.format(str(time.time()))) file_name = os.path.join(self.path, file_name) temp_file = open(file_name, "w") utils.print_ok_green(str(work)) for i in range(work): req = await req_builder(args) print(req[1], file=req_info_file) print(req[0], file=temp_file) temp_file.close() files.append(file_name) req_info_file.close() if not self.log: utils.stop_capture_console() utils.print_header("\n\tBuilding request complete") return files
def perform_installation(*args, **kwds): #Set up Role-based Access Control install_rbac(); q = UserModel.query(UserModel.username=="admin").get() if not q: model = UserModel(username="******", password="******", email="*****@*****.**", verified=True) model.put(force_validation=False) rbac.add_role(model.key, rbac.default_role("super_admin")) #Configurations conf = [ ("site_name", "Name", True), ("session_secret_key", utils.generate_random_string(30), False), ("admin_email", "*****@*****.**", True), ("user_email_confirm", "no", True), ] for item in conf: config.update_config(item[0], item[1], item[2])
def get(self, exploration_id): """Populates the data on the individual exploration page.""" # TODO(sll): Maybe this should send a complete state machine to the # frontend, and all interaction would happen client-side? version = self.request.get('v') version = int(version) if version else None try: exploration = exp_services.get_exploration_by_id( exploration_id, version=version) except Exception as e: raise self.PageNotFoundException(e) init_params = exploration.get_init_params() reader_params = exploration.update_with_state_params( exploration.init_state_name, init_params) init_state = exploration.init_state interactive_widget = widget_registry.Registry.get_widget_by_id( feconf.INTERACTIVE_PREFIX, init_state.widget.widget_id) interactive_html = interactive_widget.get_interactive_widget_tag( init_state.widget.customization_args, reader_params) session_id = utils.generate_random_string(24) self.values.update({ 'is_logged_in': bool(self.user_id), 'init_html': init_state.content[0].to_html(reader_params), 'interactive_html': interactive_html, 'params': reader_params, 'state_history': [exploration.init_state_name], 'state_name': exploration.init_state_name, 'title': exploration.title, 'session_id': session_id, }) self.render_json(self.values) event_services.StateHitEventHandler.record( exploration_id, exploration.init_state_name, True) event_services.StartExplorationEventHandler.record( exploration_id, version, exploration.init_state_name, session_id, reader_params, feconf.PLAY_TYPE_NORMAL)
def __init__(self, files): super(FlurryMapWritable, self).__init__() map_version = utils.generate_random_string(2) id = 1 for resource_file in files: sys.stdout.write(".") resource = FlurryResourceWritable(resource_file, map_version) for inner_resource in resource.build(): id = id + 1 self.resources[id] = inner_resource for id, resource in self.resources.iteritems(): sys.stdout.write(".") disk = resource['disk'] self.resources_disk[disk] = id uri = resource['uri'] self.resources_uri[disk] = id if len(resource['provides']) > 0: tag = resource['provides'] self.resources_tag[tag] = id if len(resource['module']) > 0: for module in resource['module']: if not module in self.modules: self.modules[module] = [] self.modules[module].append(tag) # Build module resources for module, resources in self.modules.iteritems(): id = id + 1 mod = FlurryResource() mod.requires = resources mod.type = "module" self.resources[id] = mod.toData() self.resources_tag[module] = id for module, resources in settings.packages.iteritems(): uri = "/rsrc/v1/" + map_version + "/r/" + utils.md5_for_string(module)[2:10] + "." + utils.get_file_type(module) self.packages[uri] = resources
def _get(self): importStr = self.request.get("class") times = self.request.get("n") try: times = int(times) except ValueError: times = 1 cls = class_import(importStr) types = [ (ndb.StringProperty, lambda: utils.generate_random_string(8)), (ndb.BooleanProperty, lambda: choice([True, False])), (ndb.DateTimeProperty, lambda: datetime.datetime.now()), ] for unused_i in range(times): model = cls() for prop, val in model._properties.items(): for t in types: if isinstance(val, t[0]): setattr(model, prop, t[1]()) break model.put(force_validation=False) self.response.out.write("OK")
def create_short_url(): url = request.form.get('url') custom_url = request.form.get('custom_url') print url, custom_url # Fetch the page title of the url # This function call can be made asynchronous to improve response time page_title = get_page_title(url) short_url = custom_url if custom_url!="" else generate_random_string() short_url_ = ShortURL.query.filter_by(short_url=short_url).first() if short_url_: return jsonify({ 'error': 'Link already exists, try a different custom keyword!' }) print "here" # Create the short url instance shorturl = ShortURL(url=url, short_url=short_url, url_title=page_title) db.session.add(shorturl) db.session.commit() return jsonify({ 'url_withad': url_for('short_url_ad', shorturl=shorturl.short_url, _external=True), 'url_withre': url_for('short_url_re', shorturl=shorturl.short_url, _external=True) })
def test_tokenize(self): secret = generate_random_string(1000) resp = self._create_new_token(secret) self.assertEqual('success', resp.findtext('status')) self.assertTrue(len(resp.findtext('token')) > 10)
def generate_confirm_link(username): token = utils.generate_random_string(30) link = webapp2.uri_for("account-confirm", _full=True) + "?user=%s&token=%s" % (username, token) model = UserConfirmationModel(username=username, token=token) model.put() return link
def __init__(self, command_name=None): self.command_name = generate_random_string() if not command_name else command_name
def create_login_token(cls, login, password): person = cls.get_by_login_and_password(login, password) if person: token = utils.generate_random_string(48) memcache.set(cls._get_memcache_key_for_token(token), str(person.key()), time=60*60*8) return token
def __init__(self, config, task_engine, builder_name=None): self.builder_name = 'builder' + generate_random_string() if not builder_name else builder_name self._config = config self._task_engine = task_engine
def generate_cookie_token(): return generate_random_string(50)
def hello_world(request): response_length = randint(0, 9000) response = generate_random_string(response_length) return Response(response)