def test_successful_create(self): """ GIVEN a Flask application WHEN the '/data' endpoint is is posted with array of data THEN check that results are correct """ data_array = [] d1 = DataModel(name="nasredine", value=2.347736) d2 = DataModel(name="nasredine", value=13.763882) d3 = DataModel(name="sid", value=10) data_array.append(d1) data_array.append(d2) data_array.append(d3) data_dict = [{ "name": v.name, "value": v.value, } for v in data_array] data_json = json.dumps({ "data": data_dict, }) response = self.execute_post(data_json) self.assertAlmostEqual(8.05, response.json['nasredine'], 1) self.assertAlmostEqual(10.0, response.json['sid'], 1)
def add_data(): name = request.form['name'] content = request.form['content'] new_data = DataModel(name, content) db.session.add(new_data) db.session.commit() return redirect('/')
def create(data) -> dict: """ Create Data """ try: avg_values = DataModel.compute_average_values(data) # save each entry separately for d in avg_values: d = DataModel(d, avg_values[d]) d.save() except IntegrityError: DataModel.rollback() raise ResourceExists('hat already exists') return avg_values
def add(list1): #向数据库中增添数据 DataModel.objects.all().delete() for person in list1: student = DataModel(student_ID=person.student_ID, name=person.name, department=person.department, major=person.major, grade=person.grade, graduate_time=person.graduate_time, student_status=person.student_status, failed_number=person.failed_number, center_credits=person.center_credits, courses_must_to_take=person.courses_must_to_take, general_courses=person.general_courses, others=person.others) student.save() return
def post(self): data = parser5.parse_args() if UserModel.find_by_username(data['username']): return { 'message': 'User {} already exists'.format(data['username']) }, 400 q = data['security-question'] ques = [ "What is your hometown's name?", "What is/was your first pet's name?", "Who is your favorite author?", "Who is your favorite character in your favorite show?" ] sec_q = -1 for i, el in enumerate(ques): if el == q: sec_q = i break new_user = UserModel(username=data['username'], password=UserModel.generate_hash( data['password']), question=sec_q, answer=data['security-answer']) try: new_user.save_to_db() ___id__ = new_user.find_by_username(data['username']).id new_user_data = DataModel(user_id=___id__, username=data['username'], todo="") new_user_data.save_to_db() access_token = create_access_token(identity=data['username']) #refresh_token = create_refresh_token(identity = data['username']) resp = jsonify({'user': data['username']}) set_access_cookies(resp, access_token) #set_refresh_cookies(resp, refresh_token) resp.status_code = 200 return resp except: res = {'message': "Something went wrong"} return res, 500
def __init__(self, *args, **kwargs): self.data_model = DataModel(db_name=kwargs['db_name']) self.parent_id = kwargs['parent_id'] self.method_id = kwargs['method_id'] self.methods_meta_info = self.data_model.get_method_meta()
import time from models import CharacterLSTM, DataModel parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--save_dir', type=str, default='./save', help='Directory to save model checkpoints') parser.add_argument('--rnn_layers', type=int, default=2, help='No:of layers in the RNN') parser.add_argument('--rnn_size', type=int, default=128, help='Size of RNN hidden states') parser.add_argument('--lr', type=float, default=0.002, help='Learning Rate') parser.add_argument('--decay', type=float, default=0.97, help='Decay Rate') parser.add_argument('--batch_size', type=int, default=100, help='Mini-batch size') parser.add_argument('--seq_length', type=int, default=50, help='Sequence Length') parser.add_argument('--epochs', type=int, default=30, help='No:of Epochs') parser.add_argument('--save_every', type=int, default=1000, help='save frequency') args = parser.parse_args() with open('character_set.pkl', 'rb') as f: character_set = pickle.load(f) data_model = DataModel(args.batch_size, args.seq_length, character_set, args.save_dir) args.vocab_size = data_model.get_total_characters() with open(args.save_dir + '/args.pkl', 'wb') as f: pickle.dump(args, f, pickle.HIGHEST_PROTOCOL) model = CharacterLSTM(args) start_time = time.time() model.train(data_model) end_time = time.time() print('Finished in %d minutes %d seconds' % ((end_time - start_time) / 60, (end_time - start_time) % 60))
help='No:of layers in the RNN') parser.add_argument('--rnn_size', type=int, default=128, help='Size of RNN hidden states') parser.add_argument('--lr', type=float, default=0.002, help='Learning Rate') parser.add_argument('--decay', type=float, default=0.97, help='Decay Rate') parser.add_argument('--batch_size', type=int, default=30, help='Mini-batch size') parser.add_argument('--seq_length', type=int, default=30, help='Sequence Length') parser.add_argument('--epochs', type=int, default=50, help='No:of Epochs') parser.add_argument('--save_every', type=int, default=1000, help='save frequency') args = parser.parse_args() data_model = DataModel(args.batch_size, args.seq_length, args.save_dir) args.vocab_size = data_model.get_vocab_size() with open(args.save_dir + '/args.pkl', 'wb') as f: pickle.dump(args, f, pickle.HIGHEST_PROTOCOL) model = TokenLSTM(args) model.train(data_model)
# initialize array to store battery cell data data = np.zeros(len(mags), dtype=object) # load battery cell data for each temperature as objects then store in data array # note that data files are in the dyn_data folder print('Load files') for idx, temp in enumerate(temps): mag = mags[idx] if temp < 0: tempfmt = f'{abs(temp):02}' files = [ Path(f'./dyn_data/{cellID}_DYN_{mag}_N{tempfmt}_s1.csv'), Path(f'./dyn_data/{cellID}_DYN_{mag}_N{tempfmt}_s2.csv'), Path(f'./dyn_data/{cellID}_DYN_{mag}_N{tempfmt}_s3.csv') ] data[idx] = DataModel(temp, files) print(*files, sep='\n') else: tempfmt = f'{abs(temp):02}' files = [ Path(f'./dyn_data/{cellID}_DYN_{mag}_P{tempfmt}_s1.csv'), Path(f'./dyn_data/{cellID}_DYN_{mag}_P{tempfmt}_s2.csv'), Path(f'./dyn_data/{cellID}_DYN_{mag}_P{tempfmt}_s3.csv') ] data[idx] = DataModel(temp, files) print(*files, sep='\n') modeldyn = processDynamic(data, modelocv, numpoles, doHyst) # convert ocv and dyn results model object to dict, then save in JSON to disk modeldyn = {
def mutate(root, info, input=None): # new_data = DataModel(data=input.data, parent_id=TaskList.get_query(info.context).first()) new_data = DataModel(data=input.data) db_session.add(new_data) db_session.commit() return CreateData(data=new_data)