def execute_chrosomomes(self, strain_config): print("Adding chromosomes...") filepath = settings.data_folder + "/" + strain_config["sequence_filename"] for record in SeqIO.parse(filepath, "fasta"): # loop through chromosomes chr_id = record.id if chr_id in settings.ignored_chromosomes: continue seq_str = str(record.seq) temp_filepath = settings.temp_folder + "/tmp.fa" # Save a row of chromosome data to a text file temp_file = open(temp_filepath, "w") temp_file.write(strain_config["name"] + "\t" + chr_id + "\t" + seq_str) temp_file.close() # Import file into the DB sql = """ LOAD DATA LOCAL INFILE '/tmp/tmp.fa' REPLACE INTO TABLE chromosome """ db_session.execute(sql) db_session.commit() # Delete the file os.remove(temp_filepath) print("Added [" + chr_id + "]") print("Finished adding chromosomes to [" + strain_config["name"] + "]")
def issue_search(): if request.method == 'POST': q = request.form['search'] q = '%' + q + '%' s = text(""" SELECT ISSUE_ID, ISSUE_NAME, ISSUE_DAY, ISSUE_MONTH, ISSUE_YEAR FROM DETAIL_ISSUE WHERE ISSUE_NAME LIKE :q ORDER BY `ISSUE_YEAR` DESC , `ISSUE_MONTH` DESC , `ISSUE_DAY` DESC """) results = db_session.execute( s, {'q':q} ) obj = results.fetchall() else: obj = '' s = text(""" SELECT I.ISSUE_ID, ISSUE_NAME, ISSUE_DAY, ISSUE_MONTH, ISSUE_YEAR FROM USER_ISSUE U, DETAIL_ISSUE I WHERE U.ISSUE_ID = I.ISSUE_ID LIMIT 30 """) results = db_session.execute( s ) obj2 = results.fetchall() return render_template( "issue_search.html", obj=obj, obj2=obj2 )
def err_execute_and_complete_payment(paypal_payment_id, paypal_payer_id): """"Returns ERROR or None""" # lock table tickets db_session.execute('LOCK TABLES tickets WRITE, payments WRITE;') payment = get_og_payment(paypal_payment_id) if not payment: return jsonify({'message': 'aucun paiement'}), 404 ticket = get_ticket_from_payment(payment) err = get_err_from_ticket(ticket) if err: return err prepare_payment_execution(payment, paypal_payer_id, ticket) # Unlock tables (we do not want to lock while we query the paypal api) db_session.execute('UNLOCK TABLES;') # Validate payment is created paypal_payment = PaypalPayment.find(paypal_payment_id) if paypal_payment.state.lower() != 'created': # TODO log status print(paypal_payment) return jsonify({'message': ERR_CREATE_PAYPAL}), 402 # Execute the payment if (not paypal_payment.execute({"payer_id": paypal_payer_id}) or paypal_payment.state.lower() != 'approved'): # Could not execute or execute did not approve transaction return jsonify({'message': ERR_INVALID_PAYPAL}), 402 return complete_purchase(ticket)
def execute_chrosomomes(self, strain_config): print("Adding chromosomes...") filepath = settings.data_folder+"/"+strain_config["sequence_filename"] for record in SeqIO.parse(filepath, "fasta"): # loop through chromosomes chr_id = record.id if (chr_id in settings.ignored_chromosomes): continue seq_str = str(record.seq) temp_filepath = settings.temp_folder+"/tmp.fa" # Save a row of chromosome data to a text file temp_file = open(temp_filepath, "w") temp_file.write(strain_config["name"]+"\t"+chr_id+"\t"+seq_str) temp_file.close() # Import file into the DB sql = """ LOAD DATA LOCAL INFILE '/tmp/tmp.fa' REPLACE INTO TABLE chromosome """ db_session.execute(sql) db_session.commit() # Delete the file os.remove(temp_filepath) print("Added ["+chr_id+"]") print("Finished adding chromosomes to ["+strain_config["name"]+"]")
def char_detail(id): s = text(""" SELECT * FROM `DETAIL_CHARACTER` WHERE CHARACTER_ID = :id """) results = db_session.execute( s, {'id':id} ) obj = results.fetchone() s = text(""" SELECT * FROM ISSUE_CHARACTER C, DETAIL_ISSUE I WHERE C.CHARACTER_ID = :id AND I.ISSUE_ID = C.ISSUE_ID ORDER BY I.ISSUE_YEAR DESC LIMIT 30 """) results = db_session.execute( s, {'id':id} ) obj2 = results.fetchall() s = text(""" SELECT DP.POWER_ID, DP.POWER_NAME FROM DETAIL_CHARACTER C, CHARACTER_POWER CP, DETAIL_POWER DP WHERE CP.POWER_ID = C.CHARACTER_ID AND CP.CHARACTER_ID = DP.POWER_ID AND C.CHARACTER_ID = :id """) results = db_session.execute( s, {'id':id} ) obj3 = results.fetchall() return render_template( "char_detail.html", obj=obj, obj2=obj2, obj3=obj3)
def get_auth_update(): ##Assuming the POST becomes the request.json. JSON key names are correct in any event. print(request.json) receive_uuid = request.json['uuid'] receive_auth_id = request.json['approval_request']['transaction']['hidden_details']['auth_id'] status = request.json['status'].strip() pending_auths = Pending_Auth.query.filter(Pending_Auth.auth_id == receive_auth_id).all() pending_auth = [pending_auth for pending_auth in pending_auths if pending_auth.comms_info == str(receive_uuid)][0] print(pending_auth) if status == 'approved': db_session.delete(pending_auth) needed_auths = Pending_Auth.query.filter(Pending_Auth.group_id == pending_auth.group_id).all() print(needed_auths) if needed_auths: print("RUN THE COMMAND") pending_policy = Pending_Policy.query.filter(Pending_Policy.auth_group_id == pending_auth.group_id).first() print(pending_policy.command) db_session.execute(pending_policy.command) elif status == 'denied': # find pending policy and drop Pending_Policy.query.filter(Pending_Policy.auth_group_id == pending_auth.group_id).delete() Pending_Auth.query.filter(Pending_Auth.group_id == pending_auth.group_id).delete() #else if status == 'expired': else: # find pending policy and drop for now Pending_Policy.query.filter(Pending_Policy.auth_group_id == pending_auth.group_id).delete() Pending_Auth.query.filter(Pending_Auth.group_id == pending_auth.group_id).delete() ##Use uuid to determine which pending policy the result applies to and make changes (or don't) accordingly. ##Similar to send_auth_req, probably aren't going to be returning the uuid/auth_result, just placeholding for now. db_session.commit() return make_response(jsonify({'uuid': receive_uuid, 'status': 'success'}))
def execute(self, experiment_config): from sqlalchemy import update transcript_ids = get_inserted_transcript_ids() coverage_filepath = experiment_config["coverage_filepath"] print("coverage_filepath: [" + coverage_filepath + "]") if not os.path.isfile(coverage_filepath): print("WARNING: skipped import of missing [" + coverage_filepath + "]") return with open(coverage_filepath) as coverage_file: for coverage_line in coverage_file: (transcript_id, coverage) = coverage_line.strip().split("\t") # skip transcripts not already in DB if transcript_id not in transcript_ids: continue update_q = ( update(NucleotideMeasurementSet) .where( and_( NucleotideMeasurementSet.nucleotide_measurement_run_id == experiment_config["nucleotide_measurement_run_id"], NucleotideMeasurementSet.transcript_id == transcript_id, ) ) .values(coverage=coverage) ) db_session.execute(update_q) db_session.commit()
def __init__(self): sess.query(Schedule).delete() sess.execute(text("DELETE FROM sched2assn")) #FIXME sess.commit() print_msg('Started Scheduling') #delcare data self.all_assns = {} #Get data out of DB self.courses = sess.query(Course).all() #courses.append(None) # add NO_ASSIGNMENT self.mentors = sess.query(Mentor).all() self.pre_assns = [] self.req_assns = [] self.add_assns = [] self.schedule = Schedule() #mentor2course = {} #TODO for speeding up assn_valid self.req_mentor_slots = [] #required mentor slots self.add_mentor_slots = [] #additional mentor slots self.req_best_cost = 0 self.add_best_cost = 0 self.unassned_courses = set(self.courses) #discard items from this set as courses are assigned
def execute(self, experiment_config): from sqlalchemy import update transcript_ids = get_inserted_transcript_ids() coverage_filepath = experiment_config["coverage_filepath"] print("coverage_filepath: ["+coverage_filepath+"]") if not os.path.isfile(coverage_filepath): print("WARNING: skipped import of missing ["+coverage_filepath+"]") return with open(coverage_filepath) as coverage_file: for coverage_line in coverage_file: (transcript_id, coverage) = coverage_line.strip().split("\t") # skip transcripts not already in DB if transcript_id not in transcript_ids: continue update_q = update(NucleotideMeasurementSet) \ .where(and_( NucleotideMeasurementSet.nucleotide_measurement_run_id==experiment_config["nucleotide_measurement_run_id"], NucleotideMeasurementSet.transcript_id==transcript_id, ))\ .values(coverage=coverage) db_session.execute(update_q) db_session.commit()
def test_create_article(): _author_id = random.randint(1, 20) _new_article = Article() _new_article.title = 'Test_Article_%s' % random.randint(100001, 999999) _new_article.author_id = _author_id _new_article.published_datetime = _new_article.last_modified_datetime = datetime.datetime.now() _random_seed = str(random.random()) _new_article.digest = 'digest - %s' % (''.join(random.randint(2, 5)*md5(_random_seed).hexdigest())) _content = ArticleContent(content='content - %s' % (''.join(random.randint(10, 50)*sha224(_random_seed).hexdigest()))) _new_article.content = _content db_session.add(_new_article) # @UndefinedVariable db_session.flush() # @UndefinedVariable _catalogs = [ random.randint(1, 20), random.randint(1, 20), random.randint(1, 20), ] for _cid in _catalogs: db_session.execute(association_table_catalog_article.insert().values({ # @UndefinedVariable 'catalog_id': _cid, 'article_id': _new_article.id, })) db_session.commit() # @UndefinedVariable
def calc_assn_weights(self): """ For each Course/Mentor pair (referred to as an assignment) calculate the weight value """ print_msg('Calculating Scores') sess.query(Assignment).delete() sess.commit() #For each pref_type, mentor, course triple, get all choices that match sess.execute(text(""" INSERT INTO assignments (mentor_id, course_id, cost) SELECT M.mentor_id, C.course_id, SUM(COALESCE(PW.weight_value, PT.def_weight_val)) FROM mentors M, courses C JOIN course2pref C2P ON C2P.course_id = C.course_id JOIN prefs P ON P.pref_id = C2P.pref_id JOIN pref_types PT ON PT.pref_type_id = P.pref_type_id LEFT JOIN choices Ch ON Ch.mentor_id = M.mentor_id AND Ch.pref_id = P.pref_id LEFT JOIN pref_weights PW ON Ch.weight_id = PW.pref_weight_id GROUP BY M.mentor_id, C.course_id """)) sess.commit() #I don't know why these null assignments are getting added to the table sess.query(Assignment).filter(or_(Assignment.mentor_id == None, Assignment.course_id == None)).delete() sess.commit() assignments = sess.query(Assignment).all() for assignment in assignments: self.all_assns[(assignment.course_id,assignment.mentor_id)] = assignment print_msg('Succesfully Calculated Scores')
def put(self, user): #get the name of the user that we're trying to update username = user #get the name of the user trying to do the updating current_user_name = get_jwt_identity() #return a helpful message if the user is trying to edit an account which is not their own if not current_user_name == username: return {'message': 'You are not authorized to perform that action'} #get JSON from request body data = request.get_json() #load a user object from the data in the request body edited_user = user_schema_without_pass.load(data) #replace found user's fields with those from the edited_user new_user = update(UserModel.__table__).where( UserModel.__table__.c.username == username).values( firstName=edited_user.data['firstName'], lastName=edited_user.data['lastName'], email=edited_user.data['email'], phoneNumber=edited_user.data['phoneNumber'], proPicUrl=edited_user.data['proPicUrl'], organization=edited_user.data['organization']) try: ret = user_schema_without_pass.dump(data) db_session.execute(new_user) db_session.commit() return ret except: return {'message': 'Something went wrong'}, 500
def change_seat(): if 'user_id' not in session: return login_in_please() user_id = session['user_id'] req = request.get_json() if 'seat_num' not in req: return bad_request() seat_num = req['seat_num'] res = None try: db_session.execute('LOCK TABLES tickets WRITE, users READ;') err = change_seat_for_user(user_id, seat_num) if err: res = err else: ticket = Ticket.query.filter(Ticket.owner_id == user_id) \ .filter(Ticket.reserved_until >= datetime.now()) \ .one() db_session.commit() res = jsonify({'ticket': ticket.as_pub_dict()}), 200 except Exception as e: # TODO log print(e) res = jsonify({'error': 'Erreur inconnue.'}), 500 finally: db_session.execute('UNLOCK TABLES;') return res
def save(self): data = self.to_json() del data['id'] del data['created_at'] del data['updated_at'] data['password'] = generate_password_hash(str(self.password)) db_session.execute( insert(Users).values(data).on_duplicate_key_update(**data)) return db_session.commit()
def complete_purchase(ticket): try: db_session.execute('LOCK TABLES tickets WRITE, payments WRITE;') # update ticket ticket.paid = True db_session.add(ticket) db_session.commit() db_session.execute('UNLOCK TABLES;') # TODO send email with payment confirmation except Exception as e: print(str(e)) return jsonify({'message': ERR_COMPLETION}), 409 return None
def loc_detail(id): s = text("SELECT * FROM `DETAIL_LOCATION` WHERE LOCATION_ID= :id") results = db_session.execute( s, {'id':id} ) obj = results.fetchone() s = text(""" SELECT DI.ISSUE_ID, DI.ISSUE_NAME, DL.LOCATION_ID, DL.LOCATION_NAME FROM DETAIL_LOCATION DL , ISSUE_LOCATION IL , DETAIL_ISSUE DI WHERE DL.LOCATION_ID = IL.LOCATION_ID AND IL.ISSUE_ID = DI.ISSUE_ID AND DL.LOCATION_ID = :id """) results = db_session.execute( s, {'id':id} ) obj2 = results.fetchall() return render_template( "loc_detail.html", obj=obj, obj2=obj2 )
def cre_detail(id): s = text("SELECT * FROM `DETAIL_PERSON` WHERE PERSON_ID= :id") results = db_session.execute( s, {'id':id} ) obj = results.fetchone() s = text(""" SELECT DI.ISSUE_ID, DI.ISSUE_NAME, DP.PERSON_ID, DP.PERSON_NAME FROM DETAIL_ISSUE DI , ISSUE_PERSON IP, DETAIL_PERSON DP WHERE DI.ISSUE_ID = IP.ISSUE_ID AND IP.CREATOR_ID = DP.PERSON_ID AND DP.PERSON_ID = :id """) results = db_session.execute( s, {'id':id} ) obj2 = results.fetchall() return render_template( "cre_detail.html", obj=obj, obj2=obj2 )
def get(self, supplier_id): if check_member_role(["admin"], current_user.email) == False: return { "message": 'Missing authorization to retrieve content', }, 401 if check_member_supplier(supplier_id, current_user.email) == False: return { "message": 'Missing authorization to view info for this supplier', }, 401 all_download = OutboundDownload.query.filter_by( supplier=supplier_id).all() data = [download.as_dict() for download in all_download] # Finds up to the 10 most recent entries for every user with inner query. Then deletes everything not returned by the inner query. result = OutboundDownload.query.filter_by(supplier=supplier_id). \ order_by(OutboundDownload.created_at.desc()). \ slice(2,(len(all_download))) sq = OutboundDownload.query.filter_by(supplier=supplier_id). \ order_by(OutboundDownload.created_at.desc()). \ limit(2). \ with_for_update() q = update(OutboundDownload).where( OutboundDownload.supplier == sq.as_scalar()).values( {"active": False}) db_session.execute(q) db_session.commit() print(q) all_download2 = OutboundDownload.query.filter_by( supplier=supplier_id).all() result2 = OutboundDownload.query.filter_by( supplier=supplier_id).count() resultdata = [download.as_dict() for download in all_download2] # print(result2) # print(len(all_download)) return { "version": api_version, "message": "downloadable outbound file for supplier: {}".format(supplier_id), "data": resultdata }, 200
def success(): results = [] qry = db_session.query(Items) # results = db_session.query(Items) # db_session.rollback() results = db_session.execute(qry) results = [u.__dict__ for u in qry.all()] return str(results)
def upgrade(): # we need drop if exists so that script fails don't break the import db_session.execute("DROP TABLE IF EXISTS raw_replicate_counts") op.create_table('raw_replicate_counts', sa.Column('id', sa.Integer(), nullable=False), sa.Column('nucleotide_measurement_run_id', sa.Integer(), nullable=True), sa.Column('transcript_id', sa.String(length=256), nullable=True), sa.Column('minusplus_id', sa.String(length=256), nullable=False), sa.Column('bio_replicate_id', sa.Integer(), nullable=False), sa.Column('tech_replicate_id', sa.Integer(), nullable=False), sa.Column('values', sa.Text(), nullable=False), sa.ForeignKeyConstraint(['nucleotide_measurement_run_id'], ['nucleotide_measurement_run.id'], ), sa.ForeignKeyConstraint(['transcript_id'], ['transcript.id'], ), sa.PrimaryKeyConstraint('id') ) from importers import import_raw_replicate_counts import_raw_replicate_counts()
def distribution_of_cosponsorships(age): bill_t = Bill.__table__ stmt = select([func.count(cosponsorship.c.id)])\ .select_from(cosponsorship.join(bill_t))\ .where(bill_t.c.age == age)\ .group_by(cosponsorship.c.person_id) distribution = db_session.execute(stmt).fetchall() distribution = map(lambda x: x[0], distribution) return distribution
def getall2brands(self): """getall2map.""" r = db_session.execute(""" select I.*,c.id, c.symbol, c.name, c.shortname, count(c.shortname) AS cnt FROM Ip AS I LEFT JOIN country AS C ON (C.id=I.country) LEFT JOIN Phishing AS P ON (P.ip=I.id) WHERE P.phishingstatus !='running' GROUP BY(c.shortname); """) return r
def upgrade(): # we need drop if exists so that script fails don't break the import db_session.execute("DROP TABLE IF EXISTS raw_replicate_counts") op.create_table( 'raw_replicate_counts', sa.Column('id', sa.Integer(), nullable=False), sa.Column('nucleotide_measurement_run_id', sa.Integer(), nullable=True), sa.Column('transcript_id', sa.String(length=256), nullable=True), sa.Column('minusplus_id', sa.String(length=256), nullable=False), sa.Column('bio_replicate_id', sa.Integer(), nullable=False), sa.Column('tech_replicate_id', sa.Integer(), nullable=False), sa.Column('values', sa.Text(), nullable=False), sa.ForeignKeyConstraint( ['nucleotide_measurement_run_id'], ['nucleotide_measurement_run.id'], ), sa.ForeignKeyConstraint( ['transcript_id'], ['transcript.id'], ), sa.PrimaryKeyConstraint('id')) from importers import import_raw_replicate_counts import_raw_replicate_counts()
def issue_list(page): s = text(""" SELECT ISSUE_ID, ISSUE_NAME, ISSUE_DAY, ISSUE_MONTH, ISSUE_YEAR FROM DETAIL_ISSUE ORDER BY `ISSUE_YEAR` DESC , `ISSUE_MONTH` DESC , `ISSUE_DAY` DESC LIMIT 30 OFFSET :p """) results = db_session.execute( s, {'p':page*30} ) obj = results.fetchall() s = text(""" SELECT I.ISSUE_ID, ISSUE_NAME, ISSUE_DAY, ISSUE_MONTH, ISSUE_YEAR FROM USER_ISSUE U, DETAIL_ISSUE I WHERE U.ISSUE_ID = I.ISSUE_ID LIMIT 30 """) results = db_session.execute( s ) obj2 = results.fetchall() return render_template( "issue_list.html", obj=obj, obj2=obj2, page=page )
def getall2map(self): """getall2map.""" r = db_session.execute(""" select I.*,c.id, c.symbol, c.name, c.shortname, count(c.shortname) AS cnt FROM Ip AS I LEFT JOIN country AS C ON (C.id=I.country) LEFT JOIN Phishing AS P ON (P.ip=I.id) WHERE P.phishingstatus !='running' AND P.dateadded >= '{}' GROUP BY(c.shortname); """.format(self.yesterday())) return r
def import_raw_replicate_counts(): print("Importing raw replicate counts...") db_session.execute("USE "+settings.db_name) for lane_type in settings.raw_replicate_counts_keys: entries = settings.raw_replicate_counts_keys for bio_rep_ind in range(0, len(entries[lane_type])): for tech_rep_ind in range(0, len(entries[lane_type][bio_rep_ind])): tech_key = entries[lane_type][bio_rep_ind][tech_rep_ind] # load the counts from the tech key input_filepath = settings.data_folder+"/reps/"+tech_key+"/results.dist.txt" print("Importing "+input_filepath) import_raw_replicate_counts_file( db_session, lane_type, bio_rep_ind + 1, tech_rep_ind + 1, input_filepath) print("Committing...") db_session.commit() print("Done.")
def import_scratch(): # # # # Delete the whole DB and recreate again, much more reliable than using ORM db_session.execute("DROP DATABASE " + settings.db_name) db_session.execute("CREATE DATABASE " + settings.db_name) db_session.execute("USE " + settings.db_name) db_session.commit() # # # Create all the tables. Base.metadata.create_all(bind=engine) # # # # Add the annotations SequenceImporter().execute() # # # Add DMS reactivities. This should be raw reactivities from plus and minus first # # # Includes adding coverage and normalisation ReactivitiesImporter().execute(settings.dms_reactivities_experiment) # # # Import all available RNA structures StructureImporter().execute(settings.structures_in_silico) StructureImporter().execute(settings.structures_in_vivo) # Do PCA analysis on the structures PcaImporter().execute(settings.structures_in_silico) PcaImporter().execute(settings.structures_in_vivo)
def import_scratch(): # # # # Delete the whole DB and recreate again, much more reliable than using ORM db_session.execute("DROP DATABASE "+settings.db_name) db_session.execute("CREATE DATABASE "+settings.db_name) db_session.execute("USE "+settings.db_name) db_session.commit() # # # Create all the tables. Base.metadata.create_all(bind=engine) # # # # Add the annotations SequenceImporter().execute() # # # Add DMS reactivities. This should be raw reactivities from plus and minus first # # # Includes adding coverage and normalisation ReactivitiesImporter().execute(settings.dms_reactivities_experiment) # # # Import all available RNA structures StructureImporter().execute(settings.structures_in_silico) StructureImporter().execute(settings.structures_in_vivo) # Do PCA analysis on the structures PcaImporter().execute(settings.structures_in_silico) PcaImporter().execute(settings.structures_in_vivo)
def import_raw_replicate_counts(): print("Importing raw replicate counts...") db_session.execute("USE " + settings.db_name) for lane_type in settings.raw_replicate_counts_keys: entries = settings.raw_replicate_counts_keys for bio_rep_ind in range(0, len(entries[lane_type])): for tech_rep_ind in range(0, len(entries[lane_type][bio_rep_ind])): tech_key = entries[lane_type][bio_rep_ind][tech_rep_ind] # load the counts from the tech key input_filepath = settings.data_folder + "/reps/" + tech_key + "/results.dist.txt" print("Importing " + input_filepath) import_raw_replicate_counts_file( db_session, lane_type, bio_rep_ind + 1, tech_rep_ind + 1, input_filepath ) print("Committing...") db_session.commit() print("Done.")
def char_list(page): s = text(""" SELECT C.CHARACTER_ID, CHARACTER_NAME, CHARACTER_GENDER, COUNT( * ) AS ISSUE_COUNT FROM DETAIL_CHARACTER C, ISSUE_CHARACTER I WHERE I.CHARACTER_ID = C.CHARACTER_ID GROUP BY C.CHARACTER_ID ORDER BY CHARACTER_NAME ASC LIMIT 30 OFFSET :p """) results = db_session.execute( s, {'p':page*30} ) obj = results.fetchall() s = text(""" SELECT DISTINCT C.CHARACTER_ID, CHARACTER_NAME, CHARACTER_GENDER FROM DETAIL_CHARACTER C, USER_ISSUE U, ISSUE_CHARACTER I WHERE U.ISSUE_ID = I.ISSUE_ID AND I.CHARACTER_ID = C.CHARACTER_ID """) results = db_session.execute( s ) obj2 = results.fetchall() return render_template( "char_list.html", obj=obj, obj2=obj2, page=page )
def init_db(): Base.metadata.drop_all(bind=engine) Base.metadata.create_all(bind=engine) wall_question = Question(text='What was on your wall as a kid?') db_session.add(wall_question) pet_question = Question(text='What was your favorite pet?') db_session.add(pet_question) matt = Comrade(name='Matt', email='*****@*****.**') db_session.add(matt) julian = Comrade(name='Julian', email='*****@*****.**') db_session.add(julian) wall_1 = Round(name='wall_round_one', date=date.today(), question=wall_question) wall_2 = Round(name='wall_round_two', date=date.today() + one_week, question=wall_question) pet_1 = Round(name='pet_round_one', date=date.today() + 2 * one_week, question=pet_question) pet_2 = Round(name='pet_round_two', date=date.today() + 3 * one_week, question=pet_question) db_session.add_all([wall_1, wall_2, pet_1, pet_2]) db_session.commit() db_session.execute(comrade_to_round.insert().values([ (matt.uuid, wall_1.uuid), (julian.uuid, wall_2.uuid), (julian.uuid, pet_1.uuid), (matt.uuid, pet_2.uuid) ])) db_session.commit()
def savedevents(): eventids = UserSavedEvents.query.all() allids = "(" for ids in eventids: allids += str(ids.event) + "," allids = allids[:-1] allids += ")" print allids #events = Event.query.filter(Event.id.in_((allids))).all() events = db_session.execute("SELECT * FROM events WHERE id IN " + str(allids)) return render_template("savedevents.html", events=events)
def execute_payment(): req = request.get_json() paypal_payment_id = req['payment_id'] payer_id = req['payer_id'] try: err = err_execute_and_complete_payment(paypal_payment_id, payer_id) if err: db_session.rollback() db_session.execute('UNLOCK TABLES;') return err # Success return jsonify({'message': MSG_SUCCESS_PAY}), 200 except Exception as e: try: db_session.rollback() # try to unlock table db_session.execute('UNLOCK TABLES;') except: pass # TODO logging and error redirect print(e) return jsonify({'error': 'Une erreur inconnue est survenue.'}), 500
def query_locations(lat_user, long_user, communities, radius): goodlocations = [] for community in communities: sql_query="SELECT * FROM locations HAVING locations.community='%s' AND (SQRT(POWER(%f-locations.lat,2)+POWER(%f-locations.long,2)))<='%f'" % (community, lat_user, long_user, radius) locations = db_session.execute(sql_query) num_locations_found = 0; for location in locations: location = dict(zip(location.keys(), location)) goodlocations.append(location) num_locations_found+=1 if num_locations_found == 1000: break goodlocations_dict = ({}) for location in goodlocations: goodlocations_dict[location['id']]=location return goodlocations_dict
def db_change_IPv4Address(ip_domain_id, ip, data): print ip_domain_id, ip, data print "bla" for (entry) in data: if entry == 'ip_domain_id': stmt = update(IPv4Address).where((IPv4Address.ip==ip) & (IPv4Address.ip_domain_id == ip_domain_id) ).values(ip_domain_id=data[entry]) if entry == 'fqdn': stmt = update(IPv4Address).where((IPv4Address.ip==ip) & (IPv4Address.ip_domain_id == ip_domain_id) ).values(fqdn=data[entry]) if entry == 'description': stmt = update(IPv4Address).where((IPv4Address.ip==ip) & (IPv4Address.ip_domain_id == ip_domain_id) ).values(description=data[entry]) if entry == 'reserved': stmt = update(IPv4Address).where((IPv4Address.ip==ip) & (IPv4Address.ip_domain_id == ip_domain_id) ).values(reserved=data[entry]) print stmt result = db_session.execute(stmt) print vars(result) return stmt
def login(): error = None form = LoginForm(request.form) if request.method == 'POST' and form.validate(): users_db = db_session.execute('SELECT id, name, gauth, password FROM users') for user in users_db: if not request.form['username'] in user[1]: error = "User not found" elif not check_password_hash(user[3], request.form['password']): error = "Password incorrect" elif not auth(user[3], request.form['authcode']): error = "Google Auth incorrect" #return redirect(url_for('home')) else: error = form.validate() return render_template('login.html', form=form, error=error)
def login(): error = None form = LoginForm(request.form) if request.method == 'POST' and form.validate(): users_db = db_session.execute( 'SELECT id, name, gauth, password FROM users') for user in users_db: if not request.form['username'] in user[1]: error = "User not found" elif not check_password_hash(user[3], request.form['password']): error = "Password incorrect" elif not auth(user[3], request.form['authcode']): error = "Google Auth incorrect" #return redirect(url_for('home')) else: error = form.validate() return render_template('login.html', form=form, error=error)
def book_temp(user_id, ticket_type, price, tickets_max, seat_num=None): try: db_session.execute('LOCK TABLES tickets WRITE;') # Get reservation and paid ticket total count for user user_ticket_count = Ticket.query \ .filter(Ticket.owner_id == user_id) \ .filter(or_( Ticket.paid, Ticket.reserved_until >= datetime.now())) \ .count() # Check if user can order a ticket if user_ticket_count > 0: db_session.rollback() db_session.execute('UNLOCK TABLES;') return False, \ 'Vous avez déjà un billet ou une réservation en cours !' # Get reservation and paid ticket total count for ticket type ticket_type_count = Ticket.query \ .filter(Ticket.type_id == ticket_type) \ .filter(or_( Ticket.paid, Ticket.reserved_until >= datetime.now())) \ .count() # Check if more tickets is allowed for this type if ticket_type_count >= tickets_max[ticket_type]: db_session.rollback() db_session.execute('UNLOCK TABLES;') return False, \ 'Le maximum de billet a été réservé pour le moment !' # Check if seat is taken if seat_num: wanted_seat_count = Ticket.query \ .filter(Ticket.seat_num == seat_num) \ .filter(or_( Ticket.paid, Ticket.reserved_until >= datetime.now())) \ .count() if wanted_seat_count > 0: db_session.rollback() db_session.execute('UNLOCK TABLES;') return False, \ 'Ce siège est déjà occupé ou réservé !' # Book ticket for 10 minutes reserved_until = datetime.now() + timedelta(minutes=10) # Insert ticket ticket = Ticket(ticket_type, user_id, price, reserved_until=reserved_until, seat_num=seat_num) db_session.add(ticket) db_session.commit() db_session.execute('UNLOCK TABLES;') return True, ticket except Exception as e: db_session.rollback() db_session.execute('UNLOCK TABLES;') print(str(e)) return False, '''\
def index(): cur = db_session.execute('select phone, buddy from numbers order by id desc') cur = Numbers.query.all() app.logger.debug('All numbers: %s' % (cur)) return render_template('show_entries.html')
def getall(self): """getall.""" r = db_session.execute(""" select * from Country """) return r
def issue_add(id, next_page): s = text("REPLACE INTO USER_ISSUE VALUES(:id)") results = db_session.execute( s, {'id':id} ) return redirect( next_page )
line_num = 0 csvfile = open(file_location, 'r') csvreader = csv.reader(csvfile, quotechar='"') header_row = ['votesmart_candidate_id', 'crp_id', 'fec_id', 'nimsp_candidate_id', 'nimsp_entity_id', 'firstname', \ 'nickname', 'middlename', 'lastname', 'suffix', 'office_state_id', 'election_state_id'] db_session.begin() # Delete donations from this year sql_query = "TRUNCATE votesmart_candidates" results = db_session.execute(sql_query) for row in csvreader: line_num += 1 if len(row) != len(header_row): print("ERROR: ", line_num, len(row), row) continue row_dict = dict(zip(header_row, row)) candidate = VotesmartCandidate(**row_dict) db_session.add(candidate)
def pub_detail(id): s = text("SELECT * FROM `DETAIL_PUBLISHER` WHERE PUB_ID= :id") results = db_session.execute( s, {'id':id} ) return render_template( "pub_detail.html", obj=results.fetchone() )
def cre_list(page): s = text("SELECT * FROM `DETAIL_PERSON` ORDER BY `PERSON_NAME` LIMIT 30 OFFSET :p") results = db_session.execute( s, {'p':page*30} ) return render_template( "cre_list.html", obj=results.fetchall(), page=page )
from sqlalchemy import func import StringIO import random import datetime import collections from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas from matplotlib.figure import Figure import matplotlib.pyplot as plt import numpy as np import seaborn as sns; sns.set(style="ticks", color_codes=True) from flask import request, jsonify, make_response wake_up_db = db_session.execute("select 1") #Git comment def register_user(user): db_session.add(user) db_session.flush() return user def get_user_by_id(input_id): user = User.query.filter_by(id=input_id).first() number_of_drinks = db_session.query(func.max(Result.number_of_drinks)).filter(Result.user_id == user.id).scalar() if user is not None: return_user = user.get_object_with_results() return_user.update({'number_of_drinks': number_of_drinks}) return jsonify(user=return_user) else:
def getall(self): """getall.""" r = db_session.execute(""" select * from Brand """) return r
from models import Book, Author, author_book from database import init_db, db_session init_db() db_session.execute(author_book.delete()) Book.query.delete() Author.query.delete() cookbook_book = Book(title="Python Cookbook") learning_book = Book(title="Learning Python") pocket_book = Book(title="Python Pocket Reference") enlightment_book = Book(title="Code Complete") bkj_author = Author(name="Brian K. Jones") db_author = Author(name="David Beazley") ml_author = Author(name="Mark Lutz") sm_author = Author(name="Steve McConnell") cookbook_book.authors.append(bkj_author) cookbook_book.authors.append(db_author) pocket_book.authors.append(ml_author) learning_book.authors.append(ml_author) enlightment_book.authors.append(sm_author) db_session.add(cookbook_book) db_session.add(learning_book) db_session.add(pocket_book) db_session.add(enlightment_book) db_session.add(bkj_author)
def issue_del(id, next_page): s = text("DELETE FROM USER_ISSUE WHERE ISSUE_ID=:id") results = db_session.execute( s, {'id':id} ) return redirect( next_page )
def seed_db(): db_session.execute('''CREATE TABLE data (a int, b int, c int, x int , y int , z int) ''') db_session.execute('''INSERT INTO data VALUES (1,2,3,4,5,6) ''') db_session.execute('''INSERT INTO data VALUES (2,2,3,6,5,10) ''') db_session.execute('''INSERT INTO data VALUES (3,3,3,9,5,12) ''') db_session.execute('''INSERT INTO data VALUES (4,3,2,3,5,14) ''') db_session.execute('''INSERT INTO data VALUES (5,4,2,5,5,16) ''') db_session.execute('''INSERT INTO data VALUES (6,4,2,1,5,18) ''') db_session.execute('''INSERT INTO data VALUES (7,5,1,2,5,16) ''') db_session.execute('''INSERT INTO data VALUES (8,5,1,8,5,16) ''') db_session.execute('''INSERT INTO data VALUES (9,6,1,3,5,12) ''') db_session.execute('''INSERT INTO data VALUES (10,7,1,5,5,10) ''') db_session.execute('''INSERT INTO data VALUES (11,8,0,8,5,6) ''') db_session.add_all([p1, p2, p3, p4, p5, p6]) db_session.commit()
def seed_db(): db_session.execute('''CREATE TABLE data (a int, b int, c int, x int , y int , z int) ''') db_session.execute('''INSERT INTO data VALUES (1,2,3,4,5,6) ''') db_session.execute('''INSERT INTO data VALUES (2,2,3,6,5,10) ''') db_session.execute('''INSERT INTO data VALUES (3,3,3,9,5,12) ''') db_session.execute('''INSERT INTO data VALUES (4,3,2,3,5,14) ''') db_session.execute('''INSERT INTO data VALUES (5,4,2,5,5,16) ''') db_session.execute('''INSERT INTO data VALUES (6,4,2,1,5,18) ''') db_session.execute('''INSERT INTO data VALUES (7,5,1,2,5,16) ''') db_session.execute('''INSERT INTO data VALUES (8,5,1,8,5,16) ''') db_session.execute('''INSERT INTO data VALUES (9,6,1,3,5,12) ''') db_session.execute('''INSERT INTO data VALUES (10,7,1,5,5,10) ''') db_session.execute('''INSERT INTO data VALUES (11,8,0,8,5,6) ''') db_session.add_all([p1,p2,p3,p4,p5,p6]) db_session.commit()