def experiment(): form = ExperimentForm() if request.method == "POST": if form.validate() == False: return render_template("experiment.html", form=form) else: title = form.title.data description = form.description.data etype = form.etype.data protocol = form.protocol.data duration = form.duration.data username = session['username'] user = User.query.filter_by(username=username).first() exp = Experiment(uid=user.id, title=title, duration=duration, protocol=protocol, description=description) db.session.add(exp) db.session.flush() session['current_experiment'] = exp.eid db.session.commit() print(exp.eid) return redirect(url_for('node')) elif request.method == "GET": return render_template("experiment.html", form=form)
def RunWithPars(pars, uid): """ docstring here :param pars: :param uid: """ startTime = Common.getCurrentTimeMil() modelsFol = 'SavePath/models/' if not os.path.exists(config.basedir + '/' + modelsFol): os.makedirs(config.basedir + '/' + modelsFol) fname = modelsFol + Common.gen_rnd_filename() + '.pkl' #ADD Field pars['pid'] = os.getpid() e = Experiment() e.endDateTime = "--RUNNING--" e.startDateTime = str(startTime) e.pars = json.dumps(pars) e.results = "" e.type = pars['type'] e.expModelsFileName = '' e.userId = uid db.session.add(e) db.session.commit() ret, expModels = BCC.BCC.RunTests(uid, pars['type'], pars, config.basedir) pickle.dump(expModels, open(config.basedir + '/' + fname, 'wb')) if ret is None: e.delete() db.session.commit() else: endTime = Common.getCurrentTimeMil() e.endDateTime = str(endTime) e.startDateTime = str(startTime) e.pars = json.dumps(pars) e.expModelsFileName = fname e.results = json.dumps(ret) db.session.commit() for method in ret.keys(): for ds in ret[method].keys(): perfs = ret[method][ds] ep = ExperimentPrediction() ep.data = None ep.datasetName = ds ep.predResult = json.dumps(perfs) ep.expId = e.id ep.predDateTime = e.endDateTime ep.predType = 0 ###Test with known labels db.session.add(ep) db.session.commit()
def experiment_new(): form=ExperimentF(request.form) if form.cancel.data: return redirect(url_for('dashboard')) if form.validate_on_submit(): exp=Experiment() form.populate_obj(exp) db_session.add(exp) db_session.commit() return redirect(url_for('.experiment_info',expid=exp.id)) else: return render_template('experiment_edit.html',form=form,type='create',recent=recent.get())
def create_experiment(username, experiment): author = username date_time = datetime.now(pytz.timezone(country_timezones["AR"][1])) resume = json.loads(experiment)["settings"]["a_description"] status = "created" e = Experiment(author=author, experiment=experiment, date_time=date_time, resume=resume, status=status) e.save() return e.id
def post(self): """ Create a new Experiment (with Treatments and Properties) based on the POSTed form data. The format is as follows: exp_name = main Experiment name exp_description = main Experiment description exp_instructions = Instructions on how to test the treatments to be given to the participants exp_device = Which device(s) are allowed for this experiment treat##_name = the name of treatment ## treat##_prop##_name = the name of a property to be changed for treatment ## treat##_prop##_value = the value that property ## will be set to for treatment ## There can be any number of treatments/properties, but there can be no missing values, (ie: a property name without a value) or the creation will fail. """ experiment = self.parse() if not experiment: self.redirect('error') else: exp = Experiment() exp.name = experiment.get('name') exp.description = experiment.get('description') exp.instructions = experiment.get('instructions') exp.device = experiment.get('device') exp.owner = experiment.get('owner') exp.experiment_type = experiment.get('experiment_type') exp.put() # Adding each treatment in turn for treatment in experiment.get('treatments'): t = Treatment(parent=exp.key()) t.name = treatment.get('name') t.put() for property in treatment.get('properties'): p = Property(parent=t.key()) p.name = property.get('name') p.value = property.get('value') p.put() self.redirect('view?exp_key=%s' % exp.key())
def experiment_clone(expid=None): if not expid: expid = request.cookies.get('project') if not expid: return render_template('error.html',message="Proyecto no seleccionado",recent=recent.get()) expid=int(expid) exp_=db_session.query(Experiment).get(expid) if not exp_: return render_template('error.html',message="Experimento no definido",recent=recent.get()) exp=Experiment() exp.name="CLONE:"+exp_.name exp.definition=exp_.definition exp.description=exp_.description exp.instructions=exp_.instructions exp.invitation=exp_.invitation exp.reinvitation=exp_.reinvitation db_session.add(exp) db_session.commit() return redirect(url_for('.experiment_info',expid=exp.id))
def start_experiment(request): form = StartExperimentForm(request.POST) if form.is_valid(): data = form.cleaned_data experiment = Experiment() experiment.experiment_type = data.get("type") experiment.user = request.user experiment.init() experiment.self_efficacy = data["self_efficacy"] experiment.app_efficacy = data["app_efficacy"] experiment.experiment_efficacy = data["experiment_efficacy"] experiment.save() return json_response(success=True, key=experiment.key) return json_response(success=False)
def copyProjectExperiments(project, new_project): t_experiments = Experiment.objects.filter(project=project) for experiment in t_experiments: new_experiment = Experiment(project=new_project, name=str(experiment.name), notes=str(experiment.notes)) new_experiment.save() t_conditions = Condition.objects.filter(experiment=experiment) for condition in t_conditions: new_condition = Condition(experiment=new_experiment, name=str(condition.name), notes=str(condition.notes)) new_condition.save() t_observations = Observation.objects.filter(condition=condition) for t_observation in t_observations: new_observation = Observation( condition=new_condition, species=t_observation.species, time=t_observation.time, value=t_observation.value, stddev=t_observation.stddev, steady_state=t_observation.steady_state, min_steady_state=t_observation.min_steady_state, max_steady_state=t_observation.max_steady_state) new_observation.save() t_treatments = Treatment.objects.filter(condition=condition) for t_treatment in t_treatments: new_treatment = Treatment(condition=new_condition, species=t_treatment.species, time=t_treatment.time, value=t_treatment.value) new_treatment.save() new_condition.save() new_experiment.save()
def log_file_analyse(): from models import Experiment, Queue print("Name:", request.form['name-input']) print("Tags:", request.form['tags-input']) print("Notes", request.form['name-input']) print("Author", request.form['author-input']) print("Dataset alias", request.form['alias-input']) print("Dataset:", request.files['datasetInputFile']) name = request.form['name-input'] tags_s = request.form['tags-input'] notes = request.form['name-input'] authors_s = request.form['author-input'] alias = request.form['alias-input'] file = request.files['datasetInputFile'] tags = tags_s.split(',') authors = authors_s.split(',') filename = None if file.filename == '': print("PROOOBLEM!!!!") return redirect(url_for("home")) if file and allowed_file(file.filename): filename = secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) print("here") e = Experiment(name=name, data_file_path=filename, authors=authors, tags=tags, notes=notes, alias=alias) q = Queue(e) db.session.add(e) db.session.add(q) db.session.commit() print("New task") queue.create_new_task(q) return redirect(url_for('queue'))
k_3=2.0, k_4=1.0, K=0.5, Q_K=7.5, k_5=1.0) ans = get_all_properties(parameters_2) print(ans) exit() # 1. User defines the classes. # 2. User creates objects from the classes. # 3. Through inspecting the objects, the databases are created! # User creates these classes to make a model lubricant_1 = Lubricant(1, 'ZEPF', True, False, density_liquid=1.125) # Extra table which may not necessarily be there variable_condition_1 = VariableExperimentalConditions(450, 8, 0.34, 50, 25, 0) # Mandatory columns - variable conditions (conditions) static_condition_1 = StaticExperimentalConditions(lubricant_1, 'P20', 0.3, # Mandatory columns - static conditions (optimisation_groups) 'AA7075', 0.7, 2, 'AlCrN', 0.2, 0.8) experiment_1 = Experiment(static_condition_1, variable_condition_1) # NOT REQUIRED (experiments) parameters_1 = FrictionModelParametersLiquid(mu0_lubricated = 1.69073, # Mandatory columns - parameters (table name taken from class name) Q_lubricated = 9141.50683, mu0_dry = 10.94225, Q_dry = 9368.85126, eta_0 = 0.12, Q_eta = 11930, lambda_1 = 40.70, lambda_2 = 1.55, c = 0.00847, k_1 = 1.52, k_2 = 2.67, k_3 = 4.58) ########################################## ####### Dynamic creation of tables ####### ########################################## # Try to delete existing table (for test purposes) try: os.remove("friction_model.db") except: print("Removal of database failed")
def main(): user = User() datasets = [] experiments = [] invalid_sensors_error = False file_paths = user.choose_files() ###Dataset Creation for i, path in enumerate(file_paths): dataset = Dataset(path) if (len(dataset.invalid_sensors()) == 0): datasets.append(dataset) logger.info("All sensors OK in file {}".format(i)) else: dataset.report_invalid_sensors() invalid_sensors_error = True if invalid_sensors_error: print( "Please check if the column names of the CSV files are spelled correctly! If they are correct, please add the new sensors in the database!" ) return logger.print_chosen_files(file_paths) logger.print_available_rocks(api_handler.rocks) for i, dataset in enumerate(datasets): experiment = Experiment(dataset) experiment.rock_id = user.choose_rock(dataset.filepath) experiment.description = user.write_description() experiment.start_time = user.set_date() dataset.calculate_checksum() experiments.append(experiment) chunk_size = 500000 while True: experiment_addded = False continue_with_upload = user.continue_with_upload() if continue_with_upload: try: for experiment in experiments: logger.success("\n\nUploading file {}.".format( experiment.dataset.filepath)) nr_of_chunks = experiment.dataset.file_length // chunk_size if experiment.dataset.file_length % chunk_size == 0 else experiment.dataset.file_length // chunk_size + 1 with open(experiment.dataset.filepath) as f: for i in tqdm(range(nr_of_chunks)): chunk_response = api_handler.send_file_chunk( experiment, f, chunk_size=chunk_size) if chunk_response.text == 'DATASET_ALREADY_IN_DB': logger.error( "\n\nThis dataset is already stored in database! Stopping upload!" ) break if chunk_response.text == 'DATASET_ALREADY_IN_DB': continue metadata_response = api_handler.send_metadata(experiment) if metadata_response.text == "METADATA_RECEIVED": logger.success("File {} uploaded!".format( experiment.dataset.filepath)) add_experiment_response = api_handler.add_experiment( experiment) if add_experiment_response.text == 'EXPERIMENT_BEING_ADDED_TO_THE_DB': logger.success( "The uploaded dataset is being written in the database. It may take some time. You can check its progress in the web application!" ) else: print( "An error occred while inserting the dataset in the database!" ) except Exception as exc: logger.error("ERROR: " + str(exc)) logger.input("Press Enter to continue!") break elif continue_with_upload == False: exit()
def get_qubit_mapping(version: str) -> Dict[str, int]: mapping = {} if version == "V1.0": mapping["a1"] = 2 mapping["a2"] = 4 mapping["b1"] = 0 mapping["b2"] = 3 if version == "V1.1": mapping["a1"] = 2 mapping["a2"] = 1 mapping["b1"] = 3 mapping["b2"] = 0 return mapping if __name__ == "__main__": credentials = interfaces.ApiCredentials() Q_program: QuantumProgram = QuantumProgram() Q_program.set_api(credentials.GetToken(), credentials.GetApiUri()) experiment = Experiment("draper", "local_qasm_simulator", "V1.1", get_qubit_mapping("V1.1"), algorithm_prime) qasm, qobj, output = create_experiment(Q_program, "0b11", "0b01", experiment) print(qasm)
from models import Experiment Experiment(hit_id="test", experiment_name="test", num_subjects_total=5, num_rounds_per_subject=5, active=True, base_price_cents=5).put()