def save(job_external_id, path, cut): try: session.add(CutJob(job_external_id, path, cut)) session.commit() return True except: return False
def register_post(): # Create the user object to store our data user = User() user.first_name = request.form.get('first_name') user.last_name = request.form.get('last_name') user.email = request.form.get('email') user.phone = request.form.get('phone') user.password = request.form.get('password') user.role = request.form.get('role') existing = db_session.query(User).filter_by(email=user.email).first() if existing: flash("Email already in use", "error") return redirect(url_for("user_login_get")) # Add the user object to the database db_session.add(user) # Save the user in the database db_session.commit() # # Redirect user to landing page return redirect(url_for("traveler_view_trip"))
def add_price(user, price): """ Update trade instance with price of service """ trade = get_recent_trade(user) trade.price = float(price) session.add(trade)
def register_user(): if request.method == 'POST': email = request.form['email'] password = request.form['password'] confirm_password = request.form['confirm_password'] age = request.form['age'] gender = request.form['gender'] job = request.form['job'] zipcode = request.form['zipcode'] if password != confirm_password: flash("Your passwords do not match. Please re-type all your information.") return redirect("/sign_up") existing = db_session.query(User).filter_by(email=email).first() if existing: flash("Email is already in use.", "error") return redirect(url_for("display_search")) #create a new user object user = User(email=email, password=password, age=age, gender=gender, job=job, zipcode=zipcode) db_session.add(user) db_session.commit() db_session.refresh(user) session['user_id'] = user.id # save a cookie to the browser return redirect(url_for("display_search")) return redirect(url_for("login"))
def load_users(session): with open("seed_data/u.user", "rb") as user_file: reader = csv.reader(user_file, delimiter="|") for row in reader: user = User(id=row[0], age=row[1], zipcode=row[4]) session.add(user) session.commit()
def login(provider_name): response = make_response() result = authomatic.login(WerkzeugAdapter(request, response), provider_name) if result: # If we've received a user from Facebook... if result.user: # Get the user's profile data and look for it in our database result.user.update() facebook_id = result.user.id user = dbsession.query(User).filter_by(facebook_id = facebook_id).first() # If we don't find the user in our database, add it! if not user: user = User(facebook_id = facebook_id, email=result.user.email, name=result.user.name) dbsession.add(user) dbsession.commit() # Store the user in our session, logging them in login_user(user) # Redirect somewhere after log in. In this case, the homepage return redirect('/') return response
def load_ratings(session): with open("seed_data/u.data", "rb") as ratings_file: reader = csv.reader(ratings_file, delimiter="\t") for row in reader: rating = Rating(user_id=row[0], movie_id=row[1], rating=row[2]) session.add(rating) session.commit()
def sync_photo(id, flickr, check_dirty=False): print id db_photo = session.query(Photo).filter(Photo.flickr_id == id).first() if db_photo and not check_dirty: print 'Photo is already local.' return db_photo photo = simplejson.loads(flickr.photos_getInfo(photo_id=id, nojsoncallback=1)) p = photo['photo'] (id, title) = (int(p['id']), p['title']['_content']) url = url_for_photo(p) page_url = p['urls']['url'][0]['_content'] description = """%s\n %s Taken: %s in %s Flickr: %s""" % (p['title']['_content'], p['description']['_content'], p['dates']['taken'], loc_to_string(p), page_url) if db_photo: print "Photo %s already exists" % id if db_photo.title == title and db_photo.description == description: return db_photo db_photo.dirty = True db_photo.title = title db_photo.description = description else: url = url_for_photo(p) db_photo = Photo(title= title, description=description, flickr_id=id, dirty=False, url=url) if not p['visibility']['ispublic']: db_photo.private = True session.add(db_photo) sync_tags(db_photo, p) session.commit() return db_photo
def save_assets(): """ Pulls assets from user input (as a post request), save to database, and routes to next question (/results will perform the calculations). """ form = AssetsForm(request.form) if form.validate_on_submit(): assets = float(request.form["assets"]) # Checks that user's assets are getting updated each time they change # their input, and not getting added to the database. user_assets = m_session.query(model.UserBanking).filter_by( user_id=g.user.id).first() if user_assets is not None: update_assets = m_session.query(model.UserBanking).filter_by( user_id=g.user.id).update( {model.UserBanking.inputted_assets: assets}) else: new_account = model.UserBanking( user_id=g.user.id, inputted_assets=assets, checking_amt=0, savings_amt=0, IRA_amt=0, comp401k_amt=0, investment_amt=0) m_session.add(new_account) m_session.commit() return redirect("/input/income") else: flash("Please enter an integer. No commas or symbols.") return redirect("/input/assets")
def get(ctx,request:YuHeLg.Request): post_id = request.vars.id try: post = session.query(Post).filter(Post.id==post_id).one() post.hits +=1 session.add(post) try: session.commit() except: session.rollback() #处理tags pts = session.query(Post_tag).filter(Post_tag.post_id == post_id).limit(10).all() tags = " ".join([pt.tag.tag for pt in pts]) buryinfo, diginfo = get_digs_or_burys(post_id) return jsonify(post={ 'post_id':post.id, 'title':post.title, 'author':post.author.name, 'postdate':post.postdate.timestamp(), 'content':post.content.content, 'hits':post.hits },diginfo=diginfo,buryinfo=buryinfo,tags=tags) except Exception as e: print(e) raise exc.HTTPNotFound()
def __get_calculator(self, matrix): calculator = Calculator() calculator.algorithm_name = matrix.algorithm calculator.filter_name = matrix.matrix_filter session.add(calculator) session.commit() return calculator
def _persist_contact(self, contact): if session.query(Contact).filter( Contact.email == contact['email']).count() == 0: c = Contact() (c.email, c.name) = (contact['email'], contact['name']) session.add(c) session.commit()
def load_globalcounts(list_of_wordcounts): """ Adds wordcounts for all unique words. There should only be one row per unique word. """ # i = 0 for localcount_dict in list_of_wordcounts: # if i < 5: for word, count in localcount_dict.iteritems(): item = session.query(GlobalCount).filter(GlobalCount.term == word).first() if item: print "%r is already in globalcounts. Updating count..." % word # update the global count for this word, because we have added new songs with more occurrences of this word q = session.query(LocalCount.term, func.sum(LocalCount.count)) q = q.group_by(LocalCount.term) q = q.filter(LocalCount.term == word) results = q.all() # print "Current count for %r is %d" % (item.term, item.count) item.count = results[0][1] print "Updating %r's count to %d" % (item.term, item.count) session.commit() else: print "%r not in globalcounts table, creating new row" % word qq = session.query(LocalCount.term, func.sum(LocalCount.count)) qq = qq.group_by(LocalCount.term) qq = qq.filter(LocalCount.term == word) resultsresults = qq.all() countcount = resultsresults[0][1] new_row = GlobalCount(term = word, count = countcount) session.add(new_row) # you must commit before you query the same word/item again! session.commit()
def post(self): voice = request.json['data'] speaked_at = request.json['speaked_at'] print('*** request.json ***', file=sys.stderr) # print(request.json, file=sys.stderr) print('*** ************ ***', file=sys.stderr) voice = base64.b64decode(voice) voice = conv_endian(voice) speaked_at = dateutil.parser.parse(speaked_at) print('**** speaked_at ****', file=sys.stderr) print(speaked_at, file=sys.stderr) print('*** ************ ***', file=sys.stderr) # voiceを認識 voice = recognizer.recognize(voice) print('****** voice *******', file=sys.stderr) print(voice, file=sys.stderr) print('*** ************ ***', file=sys.stderr) conversation = Conversation(content=voice, speaked_at=speaked_at) session.add(conversation) session.commit() return "ok"
def sign_up_form(): ## input new user input into database email = request.form.get("email") password = request.form.get("password") username = request.form.get("username") first_name = request.form.get("first_name") last_name = request.form.get("last_name") gender = int(request.form.get("gender")) age = int(request.form.get("age")) zipcode = request.form.get("zipcode") hashed_password = hash_password(password, email) # create an instance of User with email, password, username, etc. as attributes user = User(email=email, password=hashed_password, username=username, first_name=first_name, last_name=last_name, gender=gender, age=age, zipcode=zipcode) # check for email in db, if not there, add it to db if dbsession.query(User).filter_by(email = email).first(): flash("This email address is already in use. Please try again.") return redirect("/sign_up") else: dbsession.add(user) dbsession.commit() created_user = dbsession.query(User).filter_by(email = email).first() session["login"] = created_user.id session["user"] = created_user return redirect("/pick_genres")
def load_artwork(session): f2 = unicode_csv_reader(open("artwork_data.csv"), delimiter = ",") f2.next() for row in f2: artwork = model.Artwork() artwork.artworkId = int(row[0]) artwork.artistRole = row[3] if int(row[4])!= 19232: if int(row[4])!= 5265: if int(row[4])!= 3462: if int(row[4])!= 12951: artwork.artistId = int(row[4]) artwork.title = row[5] artwork.dateText = row[6] artwork.medium = row[7] if row[9].isdigit(): artwork.year = row[9] artwork.dimensions = row[11] if row[12].isdigit(): artwork.width = row[12] if row[13].isdigit(): artwork.height = row[13] if row[15].isdigit(): artwork.units = row[15] artwork.inscription = row[16] artwork.thumbnailCopyright = row[17] artwork.thumbnailURL = row[18] artwork.url = row[19] session.add(artwork) session.commit()
def update_watched_episodes(): user_id = int(request.form.get("user_id")) episode_id = int(request.form.get("episode_id")) status = request.form.get("status") print "status", status if status == "true": watched_episode = model.WatchedEpisode(user_id=user_id, episode_id=episode_id) DB.add(watched_episode) else: watched_episode = DB.query(model.WatchedEpisode).filter_by(user_id=user_id, episode_id=episode_id).one() DB.delete(watched_episode) DB.commit() series = DB.query(Series).filter(Series.episodes.any(Episode.id == episode_id)).one() eps_list = DB.query(Episode).filter_by(series_id=series.id).order_by(Episode.season_num).all() season_dict = {} watched_ep_ids =[] watched_count = DB.query(model.WatchedEpisode).\ join(model.WatchedEpisode.episode).\ filter(model.Episode.series_id == series.id).\ filter(model.WatchedEpisode.user_id == user_id).count() pct = round(100 * float(watched_count)/float(len(eps_list)), 1) response = { 'success': True, 'completion_percentage': pct, } return jsonify(response)
def recreate_index(): ''' This function indexes the book_info table of the database. I'm implimenting tf-idf functionality, so I save the number of documents in which the term shows up, and I also save a record of the specific documents that contain the term. ''' book_infos = BookInfo.query.all() freq_by_id_by_token = defaultdict(Counter) for info in book_infos: tokens = get_tokens_from_book_info(info) for token in tokens: freq_by_id_by_token[token][info.id] += 1 # deletes all search terms before recreating index SearchTerm.query.delete() for token, frequency_by_id in freq_by_id_by_token.items(): search_term = SearchTerm( token=token, num_results=len(frequency_by_id), # creates a json string from the `frequency_by_id` dict document_ids=json.dumps(frequency_by_id), ) session.add(search_term) session.commit()
def index_new_book_info(book_info): ''' This function updates a dictionary containing all tokens for a book. New search terms are saved to the SearchTerm table. The key is the token, the value is a list of document IDs that contain the token. ''' book_info_ids_by_token = {} tokens = get_tokens_from_book_info(book_info) for token in tokens: if not token in book_info_ids_by_token: book_info_ids_by_token[token] = [] book_info_ids_by_token[token].append(book_info.id) for token, book_ids in book_info_ids_by_token.items(): # TODO: check the DB first before creating new search term search_term = SearchTerm( token=token, num_results=len(book_ids), # creates a json string from the book_ids array document_ids=json.dumps(book_ids), ) session.add(search_term) session.commit() return book_info_ids_by_token
def parse_location(activity_dict): """Receives a shipment activity dictionary. If the activity contains a city and a state, saves the location to the database.""" for shipment_id in activity_dict: activity_list = activity_dict[shipment_id] for activity in activity_list: if activity['ActivityLocation'] != 'Unknown': address_info = activity['ActivityLocation']['Address'] if address_info.has_key('City') and address_info.has_key('StateProvinceCode'): city = address_info['City'] # state = address_info['StateProvinceCode'] shipment_id = shipment_id # date = datetime.strptime(activity['Date'], "%Y%m%d") timestamp = datetime.strptime(activity['Date'] + activity['Time'], "%Y%m%d%H%M%S") status = activity['Status']['StatusType']['Description'] # Query db to see if this activity has already been saved try: previous_location = (db_session.query(Location) .filter_by(shipment_id=shipment_id) .filter_by(placename=city) .filter_by(timestamp=timestamp.strftime("%Y-%m-%d %H:%M:%S.000000")) .filter_by(status_description=status) .one()) # If location not in db, create Location object, save to db except sqlalchemy.orm.exc.NoResultFound, e: location = Location(shipment_id=shipment_id, placename=city, latitude="None", longitude="None", timestamp=timestamp, status_description=status, tracking_url='Need to get this.') db_session.add(location)
def editItem(catalog_id, item_id): """ Edit an item """ catalog = session.query(Catalog).filter_by(id=catalog_id).one_or_none() editItem = session.query(Item).filter_by(id=item_id).one_or_none() if catalog is None or item is None: flash("The catalog and/or item you are looking for does not exist.") return redirect(url_for('catalog.showAllCatalogs')) if editItem.user_id != login_session['user_id']: flash("You are not authorized to edit.") return redirect(url_for('item.showAllItems', catalog_id=catalog.id)) if editItem != [] and request.method == 'POST': editItem.name = request.form['editItemName'] editItem.description = request.form['editItemDescription'] session.add(editItem) session.commit() flash(editItem.name + " is edited!") return redirect(url_for('item.showAllItems', catalog_id=catalog_id)) elif editItem != [] and request.method == 'GET': return render_template( 'items_edit.html', catalog=catalog, item=editItem)
def execute(self): t = Task(self.name) if self.should_be_active: t.activate() session.add(t) session.commit() print "Added task %d." % t.id
def load_songs(lyrics_data): """ Add songs to the songs table. """ # i = 0 # go through each song dictionary and extract data for song_dictionary in lyrics_data: # if i < 5: # check whether the song already exists in the database if session.query(Song).filter(Song.url == song_dictionary['url']).first(): print "%r is already in the database!" % song_dictionary['songname'] else: # let's turn this song... into a Song! # make a new row in the songs table url = song_dictionary['url'] artist = song_dictionary['artist'] songname = song_dictionary['songname'] new_song = Song(url = url, artist = artist, songname = songname) session.add(new_song) print "SUCCESS! %r is such a jam." % new_song.songname # i += 1 session.commit()
def create_tables(): Base.metadata.create_all(engine) u = User(email='*****@*****.**', username='******') u.set_password('unicorn') session.add(u) u2 = User(email='*****@*****.**', username='******') u2.set_password('unicorn') session.add(u2) b = Book( title='The Book of Steph', amazon_url='www.smstroud.com', owner_id=1 ) session.add(b) b2 = Book( title='Stroud\'s Story', amazon_url='www.smstroud.com', owner_id=1, current_borrower=2 ) b_h = BorrowHistory(book_id=2, borrower_id=2, date_borrowed=datetime.now) # p = Post(title='test post', body='body of a test post.') # u.posts.append(p) session.add(b) session.add(b2) b2.borrow_history.append(b_h) session.commit()
def update_page(page, chapter): print "Calling %s" % page.page_link response = urllib2.urlopen(page.page_link) if not (response.code >= 200 and response.code < 300): raise Exception("Could not retrieve the page for link . %s" % page.page_link) print "Response %s" % response.code content = response.read() (next_link, image) = get_image_and_next_link(content, page.page_link) while next_link is not None: if image is None: raise Exception("Something went wrong with the lack of image for given page") page.image_link = image next_page = Page(next_link, chapter) session.add(next_page) session.commit() print "Added Page[%d] %s" % (next_page.id, next_page.page_link) page.next_page_id = next_page.id session.add(page) session.commit() print "Update page %d with image %s" % (page.id, page.image_link) page = next_page response = urllib2.urlopen(page.page_link) if not (response.code >= 200 and response.code < 300): raise Exception("Could not retrieve the page for link . %s" % page.page_link) content = response.read() (next_link, image) = get_image_and_next_link(content, page.page_link)
def load_rss(): # query the db: how long is it? Use this number later to empty db of old stories exstories = db_session.query(Stories).all() last_id = exstories[-1].id sources = {"NPR News": 'http://www.npr.org/rss/rss.php?id=1001', "BBC": 'http://feeds.bbci.co.uk/news/rss.xml'} for source in sources: print source # use feedparser to grab & parse the rss feed parsed = feedparser.parse(sources[source]) print "parsed" # go through each entry in the RSS feed to pull out elements for Stories for i in range(len(parsed.entries)): title = parsed.entries[i].title url = parsed.entries[i].link source = source # pull abstract, parse out extra crap that is sometimes included abstract = (parsed.entries[i].description.split('<'))[0] print abstract # connect with db story = db_session.Stories(title=title, url=url, abstract=abstract, source=source) print "connected with db model??" # add story to db db_session.add(story) print "added story to db" # commit db_session.commit() print "committed" # delete from db old stories for l in range(1,last_id+1): db_session.query(Stories).filter_by(id=l).delete() db_session.commit()
def cadastrar(self): #sempre chama o dicionario em funcao da funcao self.nome = raw_input("Digite o hostname do server: ") self.descricao = raw_input("Digite descricao para o server: ") #self.ip = raw_input("Digite IP para o server: ") try: ssh = SSH() docker = Docker() ssh.executa_comando(docker.criar(self.nome)) container = ssh.executa_comando(docker.pegar_ip(self.nome)) container = json.loads(container) self.ip = container[0].get("NetworkSettings").get("IPAddress") s = ServidorModel(self) #s.nome = servidor.get("nome") #s.descricao = servidor.get("descricao") #s.ip = servidor.get("ip") session.add(s) session.commit() print "Servidor cadastrado com sucesso!" except Exception as e: session.rollback() print "Falhou ao cadastrar servidor: ",e
def create_store_review(store_id, content, score, reviewer): try: store = session.query(Store).filter(Store.id == store_id).first() if store: add_review = StoreReview(store_id=store_id, content=content, score=score, reviewer=reviewer, datetime=datetime.datetime.now()) session.add(add_review) session.commit() average_score_calculation(store_id) return {"message": "success for create store review"}, 201 else: abort(400, "bad request") except SQLAlchemyError: session.rollback() return abort(500, "database error")
def create_dispute(user, trade): "Returns a newly created disput to a trade" dispute = Dispute( id=generate_id(), user=user.id, created_on=str(datetime.now()), trade=trade, ) trade.dispute.append(dispute) if user.id == trade.seller and user.id == trade.buyer: dispute.is_buyer = True dispute.is_seller = True elif user.id != trade.seller and user.id == trade.buyer: dispute.is_buyer = True dispute.is_seller = False elif user.id == trade.seller and user.id != trade.buyer: dispute.is_buyer = False dispute.is_seller = True else: dispute.is_seller = False dispute.is_buyer = False session.add(dispute) session.add(trade) session.commit() return dispute
def cadastrar(self): #sempre chama o dicionario em funcao da funcao self.nome = raw_input("Digite o hostname do server: ") self.descricao = raw_input("Digite descricao para o server: ") #self.ip = raw_input("Digite IP para o server: ") try: ssh = SSH() docker = Docker() ssh.executa_comando(docker.criar(self.nome)) container = ssh.executa_comando(docker.pegar_ip(self.nome)) container = json.loads(container) self.ip = container[0].get("NetworkSettings").get("IPAddress") s = ServidorModel(self) #s.nome = servidor.get("nome") #s.descricao = servidor.get("descricao") #s.ip = servidor.get("ip") session.add(s) session.commit() print "Servidor cadastrado com sucesso!" except Exception as e: session.rollback() print "Falhou ao cadastrar servidor: ", e
def join_carpool(trip, user): # Creates new TripPassenger object; returns appropriate message. carpool = db_session.query(model.Trip).filter_by(id=trip).first() driver = get_user_by_id(carpool.trip_driver) if user.id == carpool.trip_driver: return "You cannot join a carpool where you are the driver." if carpool.seats_available > 0: carpool_contact = driver.first_name + ", email: " + driver.email new_passenger = model.TripPassenger(trip_id=trip, passenger_id=user.id) seats_available = carpool.seats_available - 1 db_session.query(model.Trip).filter_by(id=carpool.id).update({"seats_available": seats_available}) db_session.add(new_passenger) db_session.commit() # Sends email to driver, confirming new passenger. message = ( user.first_name + " " + user.last_name + " (email: " + user.email + ") has joined your carpool to " + carpool.event_name + " on " + carpool.departure_date + "." ) send_email("inStep carpool addition", driver.email, "*****@*****.**", message) response = "You have been added to the carpool. The driver is " + carpool_contact return response else: return "That carpool is full."
def load_localcounts(lyrics_data, list_of_wordcounts): """ Adds local wordcounts for each song. """ # i = 0 for song_dictionary in lyrics_data: # if i < 5: url = song_dictionary['url'] # put on your counting shoes for k, v in song_dictionary.iteritems(): lyrics = song_dictionary['lyrics'] unique_words = {} for line in lyrics: line = line.lower() words = re.findall('\w+', line) # unique words for each song for word in words: if unique_words.get(word): unique_words[word] += 1 else: unique_words[word] = 1 # make all the localcount rows for that song for word, localcount in unique_words.iteritems(): new_row = LocalCount(song_id = url, term = word, count = localcount) print "Adding %r with count of %r" % (new_row.term, new_row.count) session.add(new_row) # i += 1 session.commit() list_of_wordcounts.append(unique_words) return list_of_wordcounts
def register(ctx,request:YuHeLg.Request): payload = request.json #email,pwd,name email = payload.get("email") #验证邮箱是否唯一 if session.query(User).filter(User.email == email).first() is not None: raise exc.HTTPConflict("{} already exists".format(email)) user = User() try: user.name = payload.get("name") user.email = payload.get("email") user.password = bcrypt.hashpw(payload.get("password").encode(), bcrypt.gensalt()) except Exception as e: print(e) exc.HTTPBadRequest() session.add(user) try: session.commit() res = jsonify(user={ 'id': user.id, 'name': user.name }, token=gen_token(user.id)) print(res) return res except: session.rollback() raise exc.HTTPInternalServerError()
def close_trade(id): trade = Trade.query.filter_by(id=id).one() trade.close_date = datetime.datetime.utcnow() db_session.add(trade) db_session.commit() flash("Your trade has been marked as complete.", "success") return redirect("/trade_history")
def access_bank(): """ Allows login to banking institutions using Intuit API and Python library aggcat. Calls functions in accounts.py. Assumes that all account assets will be checking accounts. """ form = BankLoginForm(request.form) if form.validate_on_submit(): institution = str(request.form["institution"]) username = request.form["user_name"] password = request.form["user_password"] user_fields = accounts.get_credential_fields( accounts.create_client(), institution) credentials = {} credentials[user_fields["username"]] = username credentials[user_fields["password"]] = password try: account = accounts.discover_add_account( accounts.create_client(), institution, credentials) account_data = account.content # Checks the HTTP error code if account needs further # authentication if account.status_code in [200, 201]: checking_balance = account_data.balance_amount # Checks that user's assets are getting updated each time # they change their input, and not getting added to the # database. user_assets = m_session.query(model.UserBanking).filter_by( user_id=g.user.id).first() if user_assets is not None: update_assets = m_session.query( model.UserBanking).filter_by(user_id=g.user.id).update( {model.UserBanking.checking_amt: checking_balance}) else: new_account = model.UserBanking( user_id=g.user.id, inputted_assets=0, checking_amt=checking_balance, savings_amt=0, IRA_amt=0, comp401k_amt=0, investment_amt=0) m_session.add(new_account) m_session.commit() flash("%s account XXXX%s with $%s has been added to your \ assets." % (account_data.account_nickname, account_data.account_number[-4:], account_data.balance_amount)) return redirect("/input/assets") else: return redirect("/banklogin/challenge") except: flash("There was an error accessing your account. Please try \ again.") return redirect("/banklogin") else: flash("Please enter a valid email and password.") return redirect("/banklogin")
def populate_movie_from_OMDB(movie_info): # query API for move title using OMDB API parameters title = movie_info.title #urllib.quote_plus(movie_info.title) res = omdb.request(t=title, y=movie_info.year, r='JSON', apikey="e5b6d27b", tomatoes="true") print "fetching [%s]" % title # Exception Handler: do this where you expect a failure try: json_content = json.loads(res.content) # do this if failure except: print res.content return # updates a column with datetime stamp movie_info.omdbLoad = datetime.datetime.now() # fetch attributes of json content to pass to movie_info object poster = check_api_result(json_content, 'Poster') if poster: movie_info.poster = poster imdbRating = check_api_result(json_content, 'imdbRating') if imdbRating: movie_info.imdbRating = float(imdbRating) imdbID = check_api_result(json_content, 'imdbID') if imdbID: movie_info.imdbID = imdbID movie_info.imdbURL = "http://www.imdb.com/title/%s" % imdbID runtime = check_api_result(json_content, 'Runtime') if runtime: movie_info.runtime = runtime.replace(' min', '') director = check_api_result(json_content, 'Director') if director: movie_info.director = director actors = check_api_result(json_content, 'Actors') if actors: movie_info.actors = actors tomatoMeter = check_api_result(json_content, 'tomatoMeter') if tomatoMeter: movie_info.tomatoMeter = int(tomatoMeter) tomatoUserRating = check_api_result(json_content, 'tomatoUserRating') if tomatoUserRating: movie_info.tomatoUserRating = float(tomatoUserRating) tomatoUserMeter = check_api_result(json_content, 'tomatoUserMeter') if tomatoUserMeter: movie_info.tomatoUserMeter = int(tomatoUserMeter) mpaa_rating = check_api_result(json_content, 'Rated') if mpaa_rating: movie_info.mpaa_rating = mpaa_rating metascore = check_api_result(json_content, 'Metascore') if metascore: movie_info.metascore = int(metascore) shortPlot = check_api_result(json_content, 'Plot') if shortPlot: movie_info.shortPlot = shortPlot dbsession.add(movie_info) dbsession.commit()
def vote_no(item_id): """Route if decision is 'no' to an item. The item is added to the viewed items table in the database """ viewed_item = ItemViewed(decision="no", item_id=item_id, viewer_id=current_user.id, date_viewed=datetime.datetime.now()) dbsession.add(viewed_item) dbsession.commit() return ""
def put (self, name): # PUT http://[hostname]/trips/[name] trip= session.query (Trip).filter_by (name=name).first () trip.updatePoints (request.form['trip']) session.add (trip) session.commit () return trip.toJson (), 201, CORPSE
def log_file_processing(filename, last_line, filename_date, filename_sequence): try: session.add( CutFile(filename, last_line, filename_date, filename_sequence)) session.commit() return True except: return False
def add_user_create(): age = request.args.get("age") email = request.args.get("email") password = request.args.get("password") zipcode = request.args.get("zipcode") u = User(age=age, email=email, password=password, zipcode=zipcode) session.add(u) session.commit() return "Succesfully added user!"
def seed_geohashed_station_database(session): stations = session.query(model.Station).all() for station in stations: geohash_loc = geohash.encode(station.latitude, station.longitude) station_geohash = model.Station_Geohash(\ station_id = station.id,\ geohash_loc = geohash_loc) session.add(station_geohash) session.commit()
def process_acct(): email = request.form["email"] password = request.form["password"] new_user_acct = model.User(email=email, password=password) model_session.add(new_user_acct) model_session.commit() flash("Your account has been succesfully added.") flask_session["email"] = email return redirect("/")
def add_wallet(user, address): """ Update trade instance with wallet for seller """ trade = get_recent_trade(user) trade.wallet = str(address) trade.updated_at = str(datetime.now()) session.add(trade) session.commit()
def run(self): while True: job = self.update_queue.get() page = session.query(Page).filter(Page.id == job.page_id).first() page.downloaded = True session.add(page) session.commit() print "Page [%d] saved" % page.id self.update_queue.task_done()
def add_review(): movie = request.form.get("movie") # TODO: Add a check if movie doesn't exist movie_id = dbsession.query(Movie).filter_by(name=movie).first().id rating = request.form.get("rating") rating = Rating(movie_id=movie_id, user_id=session["login"], rating=rating) dbsession.add(rating) dbsession.commit() return render_template("main.html")
def vote(): allphotos = db_session.query(Photo).all() sql = """select distinct v.photo_id from votes v where v.give_vote_user_id = %s and v.value > 0;""" % ( g.user_id) upvotes = [vote[0] for vote in db_session.execute(sql)] print upvotes sql = """select distinct v.photo_id from votes v where v.give_vote_user_id = %s and v.value < 0;""" % ( g.user_id) downvotes = [vote[0] for vote in db_session.execute(sql)] if request.form: vote = request.form['vote'] photoid = request.form['photoid'] photoowner = request.form['photoowner'] v = db_session.query(Vote).filter_by(give_vote_user_id=g.user_id, photo_id=photoid).first() if not v: v = Vote(give_vote_user_id=g.user_id, photo_id=photoid, receive_vote_user_id=photoowner) db_session.add(v) p = db_session.query(Photo).filter_by(id=photoid).one() if vote == "upvote": v.value = 1 p.up_vote = Photo.up_vote + 1 elif vote == "downvote": v.value = -1 p.down_vote = Photo.down_vote + 1 db_session.commit() sql = """select distinct v.photo_id from votes v where v.give_vote_user_id = %s and v.value > 0;""" % ( g.user_id) upvotes = [vote[0] for vote in db_session.execute(sql)] sql = """select distinct v.photo_id from votes v where v.give_vote_user_id = %s and v.value < 0;""" % ( g.user_id) downvotes = [vote[0] for vote in db_session.execute(sql)] return render_template("_vote.html", u=g.user, photos=allphotos, upvotes=upvotes, downvotes=downvotes) return render_template("vote.html", u=g.user, photos=allphotos, upvotes=upvotes, downvotes=downvotes)
def uploadfile(): if request.method == 'POST': file = request.files['file'] if file and allowed_file(file.filename): filename = secure_filename(file.filename) photo_location = "uploads/" + filename photo_file_path = os.path.join(app.config['UPLOAD_PHOTO_FOLDER'], filename) file.save(photo_file_path) thumbnail_file_path = os.path.splitext( photo_file_path)[0] + ".thumbnail" create_thumbnail(filename, photo_file_path, thumbnail_file_path) thumbnail_location = "uploads/" + os.path.splitext( filename)[0] + ".thumbnail" image = Image.open(photo_file_path) exif_data = get_exif_data(image) latlon = get_lat_lon(exif_data) l = str(latlon) latitude = lat(l) longitude = lon(l) timestamp = get_time(exif_data) if timestamp != None: timestamp = datetime.strptime(str(timestamp), "%Y:%m:%d %H:%M:%S") caption = request.form['caption'] p = Photo(file_location=photo_location, caption=caption, latitude=latitude, longitude=longitude, timestamp=timestamp, user_id=g.user_id, thumbnail=thumbnail_location) db_session.add(p) db_session.commit() db_session.refresh(p) if latitude == None: # photo_id is a key and p.id is a value and session is a dict print "SESSION" session['photo_id'] = p.id return redirect(url_for('addlocation', photo_id=p.id)) user = db_session.query(User).filter_by(id=g.user_id).one() # create a template that shows the view of an uploaded photo and then the user's other photos return redirect(url_for('userpage')) return render_template("upload.html")
def cadastrar(self): self.login = raw_input("Digite o login do usuario: ") self.senha = raw_input("Digite a senha:") try: u = UsuarioModel(self) session.add(u) session.commit() print "Usuario cadastrado com sucesso!" except Exception as e: session.rollback() print "Falhou ao cadastrar usuario: ", e
def get_search_params(self, gender: int, age_from: int, age_to: int): """ Пишет в таблицу search_params параметры поиска пользователей ВК """ add_record = SearchParams(id_User=self.id_User, gender=gender, age_from=age_from, age_to=age_to, date=datetime.datetime.today()) session.add(add_record) session.commit()
def post(self): # import pdb; pdb.set_trace() username = self.get_argument("username") email = self.get_argument("email") hashed_password = yield executor.submit( bcrypt.hashpw, tornado.escape.utf8(self.get_argument("password")), bcrypt.gensalt()) user = User(username=username, email=email, password=hashed_password) session.add(user) session.commit() self.set_secure_cookie("blog_user", str(username)) self.redirect(self.get_argument("next", "/"))
def create_audit_request(): uid, err = get_oemid(request=request) if err is not None: return jsonify(UNAUTH_RESULT) ctx.current_user = DccaUser.get_by_id(uid) check_json(request) solution_id = get_json(request).get('solution_id') comments = get_json(request).get('description') permission = get_json(request).get('permission') if not DccaAssSolutionImage.is_solution_owner(solution_id): return jsonify({ 'status': "fail", 'message': "Solution not exist or you are not permit to access." }) solution = DccaAssSolutionImage.get(solution_id) if not DccaModel.check_model_permission( solution.model_id) and not solution.is_public: return jsonify({ 'status': "fail", 'message': "Model is private, Fail ro create request." }) if DccaModel.check_model_permission( solution.model_id) and solution.is_public: if DccaModel.is_bind_solution(solution_id): return jsonify({ 'status': "fail", 'message': "Solution is default solution of public model, Fail to create request." }) to_public = check_permission(permission) req = DccaSolutionAudit(uid, comments, solution_id, to_public) try: session.add(req) session.commit() except Exception as e: err_msg = "Fail to create request. {}".format(str(e)) raise DCCAException(err_msg) return jsonify({ "status": "success", "message": "Request has been created, please wait for approval", "id": req.id })
def process_challenge(): """ Authenticates access to banking institutions if there is a challenge response with HTTP code 401. """ try: institution = str(request.form["institution"]) username = request.form["user_name"] password = request.form["user_password"] # Responses must be in a list for XML to parse responses = request.form[[challenge]] user_fields = accounts.get_credential_fields( accounts.create_client(), institution) credentials = {} credentials[user_fields["username"]] = username credentials[user_fields["password"]] = password account = accounts.discover_and_add_accounts( accounts.create_client(), institution, credentials) # Access "account" dictionary to pull the session and node id challenge_session_id = account.headers["challengesessionid"] challenge_node_id = account.headers["challengenodeid"] confirmed_account = accounts.confirm_challenge( create_client(), institution, challenge_session_id, challenge_node_id, responses) print accounts.content.account_nickname, \ accounts.content.account_number checking_balance = confirmed_account.balance_amount user_assets = m_session.query(model.UserBanking).filter_by( user_id=g.user.id).first() if user_assets is not None: update_assets = m_session.query(model.UserBanking).filter_by( user_id=g.user.id).update( {model.UserBanking.checking_amt: checking_balance}) else: new_account = model.UserBanking( user_id=g.user.id, checking_amt=checking_balance) m_session.add(new_account) m_session.commit() flash("%s account XXXX%s with $%s has been added to your assets." % (confirmed_account.content.account_nickname, confirmed_account.content.account_number[-4:], confirmed_account.content.balance_amount)) return redirect("/input/assets") except: flash("There was an error authenticating your account. Please \ try again.") return redirect("/banklogin/challenge")
def _persist_recipients(self): for item in self.tolist: r = MsgRecipient() (r.contact_email, r.recipient_type, r.message_id) = (item['email'], 'to', self.messageid) session.add(r) for item in self.cclist: r = MsgRecipient() (r.contact_email, r.recipient_type, r.message_id) = (item['email'], 'cc', self.messageid) session.add(r) session.commit()
def cadastrar_servidor(): servidor = {} servidor["hostname"] = raw_input("Digite o nome do servidor:") servidor["descricao"] = raw_input("Digite uma descricao: ") try: s = ServidorModel() s.nome = servidor.get("hostname") s.descricao = servidor.get("descricao") session.add(s) session.commit() except Exception as e: session.rollback() print "Falhou ao cadastrar servidor: ", e
def save(self): try: record = CutInstruction(self.instruction['start_time'], self.instruction['end_time'], self.instruction['title'], self.instruction['duration'], self.instruction['reconcile_key']) session.add(record) session.commit() return record except: print(f'Erro ao tentar salvar {self.instruction}') return False
def create_shipments(tracking_numbers): """Receives a list of tracking numbers. Creates a shipments object for each tracking number, saves it to the database, and returns a list of shipment objects.""" shipments = [] for tracking_number in tracking_numbers: if tracking_number is not None: shipment = Shipment(tracking_no=tracking_number, user_id=session['user_id']) shipments.append(shipment) db_session.add(shipment) db_session.commit() return shipments
def set_user_status(id_search_result: int, status: int): """ Присваивает найденному пользователю статус status, пишет результат в таблицу viewed_users Обновляет поле viewed в таблице search_result на True у записи пользователя с id_search_result """ session.add( ViewedUsers(id_SearchResult=id_search_result, status=status)) session.commit() update_viewed_column = session.query(SearchResult).filter( SearchResult.id == id_search_result).first() update_viewed_column.viewed = True session.commit()
def get_user(msg): "Stores Chat Information" chat = msg.message.chat.id id = msg.from_user.id user = session.query(User).filter_by(id=id).first() if user: return user else: user = User(id=id, chat=chat) session.add(user) session.commit() return user