def __init__(self, frontFileName, sideFileName, height, ellipticTable, bau): self.frontFigure = Length.Length(frontFileName, bau) self.frontFigure.input_height(height) self.sideFigure = Length.Length(sideFileName, bau) self.sideFigure.input_height(height) self.ellipticTable = ellipticTable
def generate_model(input_shape_cdr3, num_outputs, filter_size): features_cdr3 = Input(shape=input_shape_cdr3) features_quantity = Input(shape=[]) feature_age = Input(batch_shape=[1]) weight = Input(batch_shape=[1]) level = Input(batch_shape=[1]) features_mask = Masking(mask_value=0.0)(features_cdr3) features_length = Length()(features_mask) features_abundance = Abundance()(features_quantity) features_age = BatchExpand()([feature_age, features_abundance]) weights_instance = Multiply()([weight, features_quantity]) num_filters = 2 * num_outputs # Two instances are required for each output logits_cdr3 = Alignment(num_filters, filter_size, penalties_feature=0.0, penalties_filter=-1.0E16, length_normalize=False)(features_mask) logits_cdr3_norm = NormalizeInitializationByAggregation( 1, epsilon=1.0E-5)([logits_cdr3, weights_instance, level]) feature_length_norm = NormalizeInitializationByAggregation( 0, epsilon=1.0E-5)([features_length, weights_instance, level]) logits_length = Dense(num_filters)(feature_length_norm) logits_length_norm = NormalizeInitializationByAggregation( 1, epsilon=1.0E-5)([logits_length, weights_instance, level]) features_abundance_norm = NormalizeInitializationByAggregation( 0, epsilon=1.0E-5)([features_abundance, weights_instance, level]) logits_abundance = Dense(num_filters)(features_abundance_norm) logits_abundance_norm = NormalizeInitializationByAggregation( 1, epsilon=1.0E-5)([logits_abundance, weights_instance, level]) features_age_norm = NormalizeInitializationByAggregation( 0, epsilon=1.0E-5)([features_age, weights_instance, level]) logits_age = Dense(num_filters)(features_age_norm) logits_age_norm = NormalizeInitializationByAggregation( 1, epsilon=1.0E-5)([logits_age, weights_instance, level]) logits = Add()([ logits_cdr3_norm, logits_length_norm, logits_abundance_norm, logits_age_norm ]) logits_aggregate = Aggregate2Instances()(logits) # Uses to instances logits_aggregate_norm = NormalizeInitializationByAggregation( 2, epsilon=1.0E-5)([logits_aggregate, weight, level]) logits_flat = FullFlatten()(logits_aggregate_norm) model = Model( inputs=[features_cdr3, features_quantity, feature_age, weight, level], outputs=logits_flat) return model
class RegistrationForm(FlaskForm): username =StringField('Usrname',validators=[DataRequired(),Length(min=2, max=20)]) email =StringField('Email',validators=[DataRequired(),Email()]) password =PasswordField('Password',validators=[DataRequired()]) confirm_password =PasswordField('Confirm Password',validators=[DataRequired(),Equalto('password')]) submit =SubmitField('Sign Up') class LoginForm(FlaskForm): email =StringField('Email',validators=[DataRequired(),Email()]) password =PasswordField('Password',validators=[DataRequired()]) remember= BoolenField('Remember Me') submit =SubmitField('Login')
class RegistrationForm(FlaskForm): username = StringField('Username', validators=[DataRequired(), Length(min=2, max=20)]) email= StringField('Email', validators=[DataRequired(), Email()]) password= StringField('Password', validators=[DataRequired()]) confirm_password= StringField('Confirm Password', validators=[DataRequired(), EqualTo('password')]) submit = SubmitField('Sign Up')
def generate_model(input_shape_cdr3, num_outputs, filter_size): features_cdr3 = Input(shape=input_shape_cdr3) features_quantity = Input(shape=[]) feature_age = Input(batch_shape=[1]) weight = Input(batch_shape=[1]) level = Input(batch_shape=[1]) features_mask = Masking(mask_value=0.0)(features_cdr3) features_length = Length()(features_mask) features_abundance = Abundance()(features_quantity) features_age = BatchExpand()([ feature_age, features_abundance ]) weights_instance = Multiply()([weight, features_quantity]) logits_cdr3 = Conv1D(num_outputs, filter_size)(features_cdr3) logits_cdr3_mask = MaskCopy(trim_front=filter_size-1)([ logits_cdr3, features_mask ]) logits_cdr3_pool = GlobalPoolWithMask()(logits_cdr3_mask) logits_cdr3_norm = NormalizeInitializationByAggregation(1, epsilon=1.0E-5)([ logits_cdr3_pool, weights_instance, level ]) feature_length_norm = NormalizeInitializationByAggregation(0, epsilon=1.0E-5)([ features_length, weights_instance, level ]) logits_length = Dense(num_outputs)(feature_length_norm) logits_length_norm = NormalizeInitializationByAggregation(1, epsilon=1.0E-5)([ logits_length, weights_instance, level ]) features_abundance_norm = NormalizeInitializationByAggregation(0, epsilon=1.0E-5)([ features_abundance, weights_instance, level ]) logits_abundance = Dense(num_outputs)(features_abundance_norm) logits_abundance_norm = NormalizeInitializationByAggregation(1, epsilon=1.0E-5)([ logits_abundance, weights_instance, level ]) features_age_norm = NormalizeInitializationByAggregation(0, epsilon=1.0E-5)([ features_age, weights_instance, level ]) logits_age = Dense(num_outputs)(features_age_norm) logits_age_norm = NormalizeInitializationByAggregation(1, epsilon=1.0E-5)([ logits_age, weights_instance, level ]) logits = Add()( [ logits_cdr3_norm, logits_length_norm, logits_abundance_norm, logits_age_norm ] ) logits_aggregate = Aggregate()(logits) logits_aggregate_norm = NormalizeInitializationByAggregation(2, epsilon=1.0E-5)([ logits_aggregate, weight, level ]) logits_flat = FullFlatten()(logits_aggregate_norm) model = Model( inputs=[ features_cdr3, features_quantity, feature_age, weight, level ], outputs=logits_flat ) return model
def generate_model(input_shape_tra_cdr3, input_shape_tra_vgene, input_shape_tra_jgene, input_shape_trb_cdr3, input_shape_trb_vgene, input_shape_trb_jgene, num_outputs): features_tra_cdr3 = Input(shape=input_shape_tra_cdr3) features_tra_vgene = Input(shape=input_shape_tra_vgene) features_tra_jgene = Input(shape=input_shape_tra_jgene) features_trb_cdr3 = Input(shape=input_shape_trb_cdr3) features_trb_vgene = Input(shape=input_shape_trb_vgene) features_trb_jgene = Input(shape=input_shape_trb_jgene) weights = Input(shape=[]) features_tra_mask = Masking(mask_value=0.0)(features_tra_cdr3) features_tra_length = Length()(features_tra_mask) logits_tra_cdr3 = LSTM(num_outputs)(features_tra_mask) logits_tra_cdr3_norm = NormalizeInitialization(epsilon=0.0)( [logits_tra_cdr3, weights]) logits_tra_length = Dense(num_outputs)(features_tra_length) logits_tra_length_norm = NormalizeInitialization(epsilon=0.0)( [logits_tra_length, weights]) logits_tra_vgene = Dense(num_outputs)(features_tra_vgene) logits_tra_vgene_norm = NormalizeInitialization(epsilon=0.0)( [logits_tra_vgene, weights]) logits_tra_jgene = Dense(num_outputs)(features_tra_jgene) logits_tra_jgene_norm = NormalizeInitialization(epsilon=0.0)( [logits_tra_jgene, weights]) features_trb_mask = Masking(mask_value=0.0)(features_trb_cdr3) features_trb_length = Length()(features_trb_mask) logits_trb_cdr3 = LSTM(num_outputs)(features_trb_mask) logits_trb_cdr3_norm = NormalizeInitialization(epsilon=0.0)( [logits_trb_cdr3, weights]) logits_trb_length = Dense(num_outputs)(features_trb_length) logits_trb_length_norm = NormalizeInitialization(epsilon=0.0)( [logits_trb_length, weights]) logits_trb_vgene = Dense(num_outputs)(features_trb_vgene) logits_trb_vgene_norm = NormalizeInitialization(epsilon=0.0)( [logits_trb_vgene, weights]) logits_trb_jgene = Dense(num_outputs)(features_trb_jgene) logits_trb_jgene_norm = NormalizeInitialization(epsilon=0.0)( [logits_trb_jgene, weights]) logits = Add()([ logits_tra_cdr3_norm, logits_tra_length_norm, logits_tra_vgene_norm, logits_tra_jgene_norm, logits_trb_cdr3_norm, logits_trb_length_norm, logits_trb_vgene_norm, logits_trb_jgene_norm ]) logits_norm = NormalizeInitialization(epsilon=0.0)([logits, weights]) model = Model(inputs=[ features_tra_cdr3, features_tra_vgene, features_tra_jgene, features_trb_cdr3, features_trb_vgene, features_trb_jgene, weights ], outputs=logits_norm) return model
def main(): container = Container() #obtain configs in a list format config = open(sys.argv[1]).read().splitlines() # obtain the problem file and throw it into a list object container.shapes = open(sys.argv[2]).read().splitlines() # Variables that will be used to set the 2d array of material container.maxWidth = container.shapes[0].split(" ")[0] container.maxLength = Length.getLength(container.shapes) #number of shapes in the problem file container.numShapes = container.shapes[0].split(" ")[1] # delete the width and number of shapes from the shape list del container.shapes[0] # User decided on the Randomized run if sys.argv[3] == "Random": # setting up variables using config file if sys.argv[4] == "newSeed": for rules in config: info = rules.split(" ") if info[0] == "fitness_evaluations": container.evaluations = info[1] elif info[0] == "runs": container.numRuns = info[1] elif info[0] == "prob_log": container.prob_log_file = info[1] elif info[0] == "prob_solution": container.prob_solution_file = info[1] elif info[0] == "seed": container.seed = eval(info[1]) elif sys.argv[4] == "lastSeed": for rules in config: info = rules.split(" ") if info[0] == "fitness_evaluations": container.evaluations = info[1] elif info[0] == "runs": container.numRuns = info[1] elif info[0] == "prob_log": container.prob_log_file = info[1] elif info[0] == "prob_solution": container.prob_solution_file = info[1] elif info[0] == "seed": obtain_seed = open( container.prob_log_file).read().splitlines(3) for lines in obtain_seed: line = lines.split(" ") if line[0] == "Random": container.seed = line[3] break # Seeds the random function using a saved value that is put into the log file random.seed(container.seed) # opening the log file result_log = open(container.prob_log_file, 'w') # formatting the result log with Result Log at the top result_log.write("Result Log \n") result_log.write("Problem Instance Path = ../%s \n" % sys.argv[2]) result_log.write("Random Seed = %s \n" % container.seed) result_log.write( "Parameters used = {'fitness evaluations': %s, 'number of runs': %s, 'problem solution location': '%s'}\n\n" % (container.evaluations, container.numRuns, container.prob_solution_file)) # runs through the program as many times as the config files says to for run in range(1, int(container.numRuns) + 1): # highest fitness calculation thus far this run highest_fitness = 0 # Open the current solution file to obtain the fitness value solution_file = open( container.prob_solution_file).read().splitlines() # grabs the solution file's fitness value solution_fitness = solution_file[1].split(" ")[3] # Titles each section with Run i, where i is the run number (1-30) result_log.write("Run " + str(run) + "\n") # run through the given amount of times given by fitness evaluation for fitness in range(1, int(container.evaluations) + 1): # list of solution locations incase it is the best solution_locations = [] # holders for length of material used LargestX = 0 SmallestX = 156 # the material sheet being used to cut out shapes container.materialSheet = [[ 0 for x in range(0, int(container.maxWidth)) ] for y in range(0, int(container.maxLength))] # for every shape in the file, choose a position for shape in container.shapes: if not shape[0].isdigit(): valid = False # Keep obtaining a new position until it fits on the material while not valid: # generate random position and rotation x_cord = random.randrange(0, int(container.maxLength)) y_cord = random.randrange(0, int(container.maxWidth)) rotation = random.randrange(0, 4) # Rotate the shape if needed if rotation != 0: shape = rotate.rotate_shape(rotation, shape) # Check whether the shape fits on the material in the current position valid = shapeManipulation.validPlacement( container.materialSheet, container.maxLength, container.maxWidth, x_cord, y_cord, shape) # if the move was valid and was placed if valid: shapeManipulation.placeShape( container.materialSheet, x_cord, y_cord, shape) # store the location in a tuple if it worked placementLocation = (x_cord, y_cord, rotation) # append it to the list solution_locations.append(placementLocation) # obtains the smallest and largest position in the material array for i in range(len(container.materialSheet)): if 1 in container.materialSheet[i]: if i < SmallestX: SmallestX = i elif i > LargestX: LargestX = i # Determines the Length of the material used by this iteration usedLength = ((LargestX - SmallestX) + 1) #print(usedLength) current_fitness = fitnessCalc(container.maxLength, usedLength) if highest_fitness < current_fitness: highest_fitness = current_fitness result_log.write( str(fitness + 1) + " " + str(current_fitness) + "\n") # If the current solution is the best, replace the info in the file with the new solution if int(solution_fitness) < highest_fitness: solution_file = open(prob_solution_file, 'w') solution_file.write("Solution File\n") solution_file.write("Fitness Calculation = " + str(highest_fitness) + "\n\n") for i in range(len(solution_locations)): solution_file.write( str(solution_locations[i])[1:-1] + "\n") # formatting the result log with a space after each run block result_log.write("\n") result_log.close()
def generate_model(input_shape_tra_cdr3, input_shape_tra_vgene, input_shape_tra_jgene, input_shape_trb_cdr3, input_shape_trb_vgene, input_shape_trb_jgene, num_outputs): kmer_size = 4 features_tra_cdr3 = Input(shape=input_shape_tra_cdr3) features_tra_vgene = Input(shape=input_shape_tra_vgene) features_tra_jgene = Input(shape=input_shape_tra_jgene) features_trb_cdr3 = Input(shape=input_shape_trb_cdr3) features_trb_vgene = Input(shape=input_shape_trb_vgene) features_trb_jgene = Input(shape=input_shape_trb_jgene) weights = Input(shape=[]) features_tra_mask = Masking(mask_value=0.0)(features_tra_cdr3) features_tra_length = Length()(features_tra_mask) logits_tra_cdr3 = Conv1D(8, kmer_size)(features_tra_cdr3) logits_tra_cdr3 = Conv1D(num_outputs, kmer_size)(logits_tra_cdr3) logits_tra_cdr3_mask = MaskCopy(trim_front=2 * kmer_size - 2)([logits_tra_cdr3, features_tra_mask]) logits_tra_cdr3_pool = GlobalPoolWithMask()(logits_tra_cdr3_mask) logits_tra_cdr3_norm = NormalizeInitialization(epsilon=0.0)( [logits_tra_cdr3_pool, weights]) logits_tra_length = Dense(num_outputs)(features_tra_length) logits_tra_length_norm = NormalizeInitialization(epsilon=0.0)( [logits_tra_length, weights]) logits_tra_vgene = Dense(num_outputs)(features_tra_vgene) logits_tra_vgene_norm = NormalizeInitialization(epsilon=0.0)( [logits_tra_vgene, weights]) logits_tra_jgene = Dense(num_outputs)(features_tra_jgene) logits_tra_jgene_norm = NormalizeInitialization(epsilon=0.0)( [logits_tra_jgene, weights]) features_trb_mask = Masking(mask_value=0.0)(features_trb_cdr3) features_trb_length = Length()(features_trb_mask) logits_trb_cdr3 = Conv1D(8, kmer_size)(features_trb_cdr3) logits_trb_cdr3 = Conv1D(num_outputs, kmer_size)(logits_trb_cdr3) logits_trb_cdr3_mask = MaskCopy(trim_front=2 * kmer_size - 2)([logits_trb_cdr3, features_tra_mask]) logits_trb_cdr3_pool = GlobalPoolWithMask()(logits_trb_cdr3_mask) logits_trb_cdr3_norm = NormalizeInitialization(epsilon=0.0)( [logits_trb_cdr3_pool, weights]) logits_trb_length = Dense(num_outputs)(features_trb_length) logits_trb_length_norm = NormalizeInitialization(epsilon=0.0)( [logits_trb_length, weights]) logits_trb_vgene = Dense(num_outputs)(features_trb_vgene) logits_trb_vgene_norm = NormalizeInitialization(epsilon=0.0)( [logits_trb_vgene, weights]) logits_trb_jgene = Dense(num_outputs)(features_trb_jgene) logits_trb_jgene_norm = NormalizeInitialization(epsilon=0.0)( [logits_trb_jgene, weights]) logits = Add()([ logits_tra_cdr3_norm, logits_tra_length_norm, logits_tra_vgene_norm, logits_tra_jgene_norm, logits_trb_cdr3_norm, logits_trb_length_norm, logits_trb_vgene_norm, logits_trb_jgene_norm ]) logits_norm = NormalizeInitialization(epsilon=0.0)([logits, weights]) model = Model(inputs=[ features_tra_cdr3, features_tra_vgene, features_tra_jgene, features_trb_cdr3, features_trb_vgene, features_trb_jgene, weights ], outputs=logits_norm) return model
def generate_model(input_shape_tra_cdr3, input_shape_tra_vgene, input_shape_tra_jgene, input_shape_trb_cdr3, input_shape_trb_vgene, input_shape_trb_jgene, num_outputs): features_tra_cdr3 = Input(shape=input_shape_tra_cdr3) features_tra_vgene = Input(shape=input_shape_tra_vgene) features_tra_jgene = Input(shape=input_shape_tra_jgene) features_trb_cdr3 = Input(shape=input_shape_trb_cdr3) features_trb_vgene = Input(shape=input_shape_trb_vgene) features_trb_jgene = Input(shape=input_shape_trb_jgene) features_tra_mask = Masking(mask_value=0.0)(features_tra_cdr3) features_tra_length = Length()(features_tra_mask) logits_tra_cdr3 = Alignment(num_outputs, input_shape_tra_cdr3[0], penalties_feature=-1.0E16, penalties_filter=0.0, length_normalize=True)(features_tra_mask) logits_tra_cdr3_norm = BatchNormalization(momentum=0.5)(logits_tra_cdr3) logits_tra_length = Dense(num_outputs)(features_tra_length) logits_tra_length_norm = BatchNormalization( momentum=0.5)(logits_tra_length) logits_tra_vgene = Dense(num_outputs)(features_tra_vgene) logits_tra_vgene_norm = BatchNormalization(momentum=0.5)(logits_tra_vgene) logits_tra_jgene = Dense(num_outputs)(features_tra_jgene) logits_tra_jgene_norm = BatchNormalization(momentum=0.5)(logits_tra_jgene) features_trb_mask = Masking(mask_value=0.0)(features_trb_cdr3) features_trb_length = Length()(features_trb_mask) logits_trb_cdr3 = Alignment(num_outputs, input_shape_trb_cdr3[0], penalties_feature=-1.0E16, penalties_filter=0.0, length_normalize=True)(features_trb_mask) logits_trb_cdr3_norm = BatchNormalization(momentum=0.5)(logits_trb_cdr3) logits_trb_length = Dense(num_outputs)(features_trb_length) logits_trb_length_norm = BatchNormalization( momentum=0.5)(logits_trb_length) logits_trb_vgene = Dense(num_outputs)(features_trb_vgene) logits_trb_vgene_norm = BatchNormalization(momentum=0.5)(logits_trb_vgene) logits_trb_jgene = Dense(num_outputs)(features_trb_jgene) logits_trb_jgene_norm = BatchNormalization(momentum=0.5)(logits_trb_jgene) logits = Add()([ logits_tra_cdr3_norm, logits_tra_length_norm, logits_tra_vgene_norm, logits_tra_jgene_norm, logits_trb_cdr3_norm, logits_trb_length_norm, logits_trb_vgene_norm, logits_trb_jgene_norm ]) logits_norm = BatchNormalization(momentum=0.5)(logits) probabilities = Activation('softmax')(logits_norm) model = Model(inputs=[ features_tra_cdr3, features_tra_vgene, features_tra_jgene, features_trb_cdr3, features_trb_vgene, features_trb_jgene ], outputs=probabilities) return model
for key, value in results_dict.items(): li = [] li.append(key) for v in value: li.append(v) li.append(1) writer.writerow(li) print("Results saved in results.csv file!") print("Calculating A and C1") essays_list = kd_reader.read_essays(data_path_train, train_files) essay_words, essay_pos = kd_reader.gen_word_pos(essays_list) essay_tokenized = kd_reader.gen_sent(essays_list) essay_parsed = kd_reader.gen_parse(essay_tokenized, stanford_path) length_train = Length.count_sent(essay_pos, essay_tokenized) #no. of sentences a c_1_count_train = c_1.count_c1( essay_parsed) # number of subj-verb agreement mistakes in each essay essays_list = kd_reader.read_essays(data_path_test, test_files) essay_words, essay_pos = kd_reader.gen_word_pos(essays_list) essay_tokenized = kd_reader.gen_sent(essays_list) essay_parsed = kd_reader.gen_parse(essay_tokenized, stanford_path) length_test = Length.count_sent(essay_pos, essay_tokenized) #no. of sentences a c_1_count_test = c_1.count_c1(essay_parsed) #print(c_1_count_test,c_1_count_train,length_test,length_train) df_train = pd.DataFrame() df_train['length_train'] = length_train df_train['c1_train'] = c_1_count_train
def generate_model( input_shape_tra_cdr3, input_shape_tra_vgene, input_shape_tra_jgene, input_shape_trb_cdr3, input_shape_trb_vgene, input_shape_trb_jgene, num_outputs ): features_tra_cdr3 = Input(shape=input_shape_tra_cdr3) features_tra_vgene = Input(shape=input_shape_tra_vgene) features_tra_jgene = Input(shape=input_shape_tra_jgene) features_trb_cdr3 = Input(shape=input_shape_trb_cdr3) features_trb_vgene = Input(shape=input_shape_trb_vgene) features_trb_jgene = Input(shape=input_shape_trb_jgene) weights = Input(shape=[]) features_tra_mask = Masking(mask_value=0.0)(features_tra_cdr3) features_tra_length = Length()(features_tra_mask) logits_tra_cdr3 = Alignment(num_outputs, input_shape_tra_cdr3[0], penalties_feature=-1.0E16, penalties_filter=0.0, length_normalize=True)(features_tra_mask) logits_tra_cdr3_norm = WeightedInitNormalization(epsilon=0.0)([ logits_tra_cdr3, weights ]) logits_tra_length = Dense(num_outputs)(features_tra_length) logits_tra_length_norm = WeightedInitNormalization(epsilon=0.0)([ logits_tra_length, weights ]) logits_tra_vgene = Dense(num_outputs)(features_tra_vgene) logits_tra_vgene_norm = WeightedInitNormalization(epsilon=0.0)([ logits_tra_vgene, weights ]) logits_tra_jgene = Dense(num_outputs)(features_tra_jgene) logits_tra_jgene_norm = WeightedInitNormalization(epsilon=0.0)([ logits_tra_jgene, weights ]) features_trb_mask = Masking(mask_value=0.0)(features_trb_cdr3) features_trb_length = Length()(features_trb_mask) logits_trb_cdr3 = Alignment(num_outputs, input_shape_trb_cdr3[0], penalties_feature=-1.0E16, penalties_filter=0.0, length_normalize=True)(features_trb_mask) logits_trb_cdr3_norm = WeightedInitNormalization(epsilon=0.0)([ logits_trb_cdr3, weights ]) logits_trb_length = Dense(num_outputs)(features_trb_length) logits_trb_length_norm = WeightedInitNormalization(epsilon=0.0)([ logits_trb_length, weights ]) logits_trb_vgene = Dense(num_outputs)(features_trb_vgene) logits_trb_vgene_norm = WeightedInitNormalization(epsilon=0.0)([ logits_trb_vgene, weights ]) logits_trb_jgene = Dense(num_outputs)(features_trb_jgene) logits_trb_jgene_norm = WeightedInitNormalization(epsilon=0.0)([ logits_trb_jgene, weights ]) logits = Add()( [ logits_tra_cdr3_norm, logits_tra_length_norm, logits_tra_vgene_norm, logits_tra_jgene_norm, logits_trb_cdr3_norm, logits_trb_length_norm, logits_trb_vgene_norm, logits_trb_jgene_norm ] ) logits_norm = WeightedInitNormalization(epsilon=0.0)([ logits, weights ]) model = Model( inputs=[ features_tra_cdr3, features_tra_vgene, features_tra_jgene, features_trb_cdr3, features_trb_vgene, features_trb_jgene, weights ], outputs=logits_norm ) return model
class InputForm(FlaskForm): symbol = StringField('Symbol', validators=[DataRequired(), Length(min=2, max=4)]) startdate = StringField('StartDate') enddate = StringField('EndDate') submit = SubmitField('Get History')