def test_delete_item(): delete_me = TodoItem(id=1, completed=False, text="Delete this item") session.add(delete_me) session.flush() client.delete("/todo/1/") actual = session.query(TodoItem).filter_by(id=1).first() assert actual is None
def song_post(): """ post a song """ headers = { "Location": url_for("song_post"), "Content-Type": "application/json" } ######### get the data from the form data = request.json post_song = models.Song( song_name=data["song_name"], id=data["song_id"]) ## ask sam if we really need to post seperately. post_file = models.File(song_id=data["song_id"], file_name=data["file_name"], id=data["file_id"]) if not session.query(models.Song).get( post_song.id ): #consider adding a check here for duplicate fileID too session.add_all([post_song, post_file]) session.commit() else: print "************* ELSE ***************" session.rollback() session.flush() return Response(json.dumps( {"status": "failed - that song already exists"}), 500, mimetype="application/json") return Response(stripUnicode(data), 200, headers=headers, mimetype="application/json")
def test_get_all(): item1 = TodoItem(completed=False, text="Test item text") session.add(item1) item2 = TodoItem(completed=False, text="Test item two text") session.add(item2) item3 = TodoItem(completed=True, text="Test item three text") session.add(item3) session.flush() response = client.get("/todo/") actual = response.json() expected = { "status": "success", "data": [ { "id": item1.id, "completed": False, "text": "Test item text" }, { "id": item2.id, "completed": False, "text": "Test item two text" }, { "id": item3.id, "completed": True, "text": "Test item three text" }, ], } assert actual == expected
def auto_rollback(): for item in session.query(TodoItem).all(): session.delete(item) session.flush() engine.commit = lambda: None yield session.rollback()
def song_delete(): """ delete a song """ headers = {"Location": url_for("song_delete")} # get the data from the form data = request.json post_song = models.Song(song_name=data["song_name"],id=data["song_id"]) ## ask sam if we really need to post seperately. post_file = models.File(song_id=data["song_id"],file_name=data["file_name"],id=data["file_id"]) if session.query(models.Song).get(post_song.id): #consider adding a check here for duplicate fileID too del_song=session.query(models.Song).get(post_song.id) session.delete(del_song) del_file = session.query(models.File).get(post_file.id) session.delete(del_file) session.commit() else: print "************* ELSE ***************" session.rollback() session.flush() return Response(json.dumps({"status":"failed - that song doesnt exists"}),500,mimetype="application/json") return Response(json.dumps({"status":"deleted"}), 200, headers=headers, mimetype="application/json")
def createUser(login_session): """Add a User to the database using the current information stored in the login session. Args: login_session (dict): Dictionary containing information about the current login session, including the user's profile information from Google+ Returns: int: The user_id from the newly created record in the User table in the database. """ print "Creating User" newUser = User( name=login_session['username'], email=login_session['email'], picture=login_session['picture'] ) session.add(newUser) session.flush() session.commit() return newUser.id
async def handle_subscribe(bot, ctx, sub_arg): session.flush() if len(sub_arg) == 0: return await bot.send(ctx, '缺少参数') if is_number(sub_arg[0]) == False and sub_arg[0] != 'all': return await bot.send(ctx, message='参数只能是编号或者all', at_sender=True) vid_list = list(map(lambda x: x[0], session.query(Vtb.vid).all())) user_id = ctx['user_id'] try: if sub_arg[0] == 'all': session.bulk_save_objects( [UserSubscribe(user_id=user_id, vid=vid) for vid in vid_list]) else: if int(sub_arg[0]) not in vid_list: return await bot.send(ctx, message='你订阅了不存在的vtb', at_sender=True) model = UserSubscribe(user_id=user_id, vid=sub_arg[0]) session.merge(model) except Exception: session.rollback() return await bot.send(ctx, message='成功订阅(如已订阅请忽略)', at_sender=True)
def test_update_item_text(): item = TodoItem(id=1, completed=False, text="Test item three text") session.add(item) session.flush() client.put("/todo/1/", json={"text": "My new text"}) actual = session.query(TodoItem).filter_by(id=1).one() assert actual.text == "My new text"
def delete_all_users(): response = dict(message='No user found to delete') users = session.query(User).all() if users: users.delete() session.flush() response.update(message='deleted all users successfully') return response
def delete_item(todo_id: int): todo_item = session.query(TodoItem).filter(TodoItem.id == todo_id).first() if not todo_item: raise HTTPException(status_code=404, detail="Item not found") session.delete(todo_item) session.flush() session.commit() return {"status": "success"}
def update_item(todo_id: int, item: ItemModel): todo_item = session.query(TodoItem).filter(TodoItem.id == todo_id).first() if not todo_item: raise HTTPException(status_code=404, detail="Item not found") if item.text: todo_item.text = item.text if item.completed is not None: todo_item.completed = item.completed session.add(todo_item) session.flush() session.commit() return {"status": "success"}
def post(self): args = Order.parser.parse_args() data = json.loads(args['data'][0].replace( "\'", "\"")) # json 형식에 맞게 작은따옴표를 쌍따옴표로 바꾸고 dictionary화 # Orders Table # 총 금액 total_price = data['totalPrice'] try: order_time = datetime.now() # 주문 시각 order = models.Order(order_time=order_time, completed=False, total_price=total_price) # 주문 행 생성 session.add(order) session.flush() # 주문 메뉴 연결하기 위해 pk 생성 필요 # 넘겨받은 주문 메뉴 리스트 menu_list = data['menus'] for each in menu_list: # Products Table # 주문 메뉴 1개에 대한 Product Table 레코드 생성 order_menu_pk = each['menuId'] # 메뉴 pk quantity = each['quantity'] # 수량 product = models.OrderProduct(order_pk=order.order_pk, order_menu_pk=order_menu_pk, quantity=quantity) session.add(product) session.flush() # 주문 옵션을 연결하기 위해 pk 생성 필요 product_pk = product.product_pk # Order_options Table # 주문 옵션 리스트 for each_option in each['options']: option_id = each_option # 옵션 pk product_option = models.OrderOption( product_pk=product_pk, option_pk=option_id) # 주문 메뉴와 연결 session.add(product_option) session.commit() # 아무 문제 없으면 DB 반영 except Exception as err: session.rollback() # 에러 시 rollback return Response(status=400) # 에러코드 전송 session.close() # 문제 없다면 return Response(status=201) # CREATED 코드 전송
def add_book_headers(): book_header_items = browser.find_elements_by_class_name("op-book-header") for book_header in book_header_items: book_header_attr = book_header.get_attribute('class') book_name = book_header.find_element_by_tag_name('img').get_attribute( 'alt') if book_header_attr == 'op-book-header ': vegas_type = 0 elif book_header_attr == 'op-book-header vegas': vegas_type = 1 elif book_header_attr == 'op-book-header no-vegas': vegas_type = 2 new_book_header = BooksHeader(book_name, vegas_type) session.add(new_book_header) session.flush()
def log_individuals(): timestamp = Timestamp(timestamp=time.time()) user_data = get_user_data() if not user_data: return formatted_data = [(User(name=name), custom_rep[status]) for name, status in user_data if status != 'offline'] new_users = [user for user, status in formatted_data if user.name not in user_store] session.add_all(new_users) session.add(timestamp) session.commit() session.flush() user_store.update({user.name: user.ROWID for user in new_users}) datapoints = [Datapoint(user_id=user_store[user.name], timestamp_id=timestamp.ROWID, status=status) for user, status in formatted_data] session.add_all(datapoints) session.commit()
def song_post(): """ post a song """ headers = {"Location": url_for("song_post"),"Content-Type": "application/json"} ######### get the data from the form data = request.json post_song = models.Song(song_name=data["song_name"],id=data["song_id"]) ## ask sam if we really need to post seperately. post_file = models.File(song_id=data["song_id"],file_name=data["file_name"],id=data["file_id"]) if not session.query(models.Song).get(post_song.id): #consider adding a check here for duplicate fileID too session.add_all([post_song,post_file]) session.commit() else: print "************* ELSE ***************" session.rollback() session.flush() return Response(json.dumps({"status":"failed - that song already exists"}),500, mimetype="application/json") return Response(stripUnicode(data), 200, headers=headers, mimetype="application/json")
def post(self): data = Menu.parser.parse_args() #경로를 위한 변수 설정 server_path = '' data = request.form if 'category_pk' not in data.keys(): return Response(status = 404) if 'image' in request.files: #local_path 저장할 경로, server_path 이미지 접근 가능 url 저장 image = request.files['image'] local_path = os.path.join(saveImgDir, 'main/', secure_filename(image.filename)) image.save(local_path) server_path = os.path.join(serverImgDir, 'main/', secure_filename(image.filename)) main_menu = models.Menu() #request에 이미지 파일이 존재하면 객체 menu_image필드에 server_path 저장 if server_path != '': main_menu.menu_image = server_path for i in data.keys(): if i == 'category_pk': main_menu.category_pk = data['category_pk'] elif i == 'menu_name': main_menu.menu_name = data['menu_name'] elif i == 'menu_price': main_menu.menu_price = data['menu_price'] elif i == 'menu_soldout': main_menu.menu_soldout = data['menu_soldout'] elif i == 'menu_description': main_menu.menu_description = data['menu_description'] session.add(main_menu) session.flush() session.commit() session.close() return Response(status = 201)
def post(self): data = request.form server_path = '' print(3) if 'category_pk' not in data.keys(): return Response(status=404) if 'image' in request.files: image = request.files['image'] print(image.filename) local_path = os.path.join(saveImgDir, 'main/', secure_filename(image.filename)) image.save(local_path) print(local_path) server_path = os.path.join(serverImgDir, 'main/', secure_filename(image.filename)) main_menu = models.Menu() if server_path != '': main_menu.menu_image = server_path for i in data.keys(): if i == 'category_pk': main_menu.category_pk = data['category_pk'] elif i == 'menu_name': main_menu.menu_name = data['menu_name'] elif i == 'menu_price': main_menu.menu_price = data['menu_price'] elif i == 'menu_soldout': main_menu.menu_soldout = data['menu_soldout'] elif i == 'menu_description': main_menu.menu_description = data['menu_description'] session.add(main_menu) session.flush() session.commit() return Response(status=201)
def song_delete(): """ delete a song """ headers = {"Location": url_for("song_delete")} # get the data from the form data = request.json post_song = models.Song( song_name=data["song_name"], id=data["song_id"]) ## ask sam if we really need to post seperately. post_file = models.File(song_id=data["song_id"], file_name=data["file_name"], id=data["file_id"]) if session.query(models.Song).get( post_song.id ): #consider adding a check here for duplicate fileID too del_song = session.query(models.Song).get(post_song.id) session.delete(del_song) del_file = session.query(models.File).get(post_file.id) session.delete(del_file) session.commit() else: print "************* ELSE ***************" session.rollback() session.flush() return Response(json.dumps( {"status": "failed - that song doesnt exists"}), 500, mimetype="application/json") return Response(json.dumps({"status": "deleted"}), 200, headers=headers, mimetype="application/json")
def getTask(requestToken,block=True): """ -get a task out from the queue -update the task_log entry with dequeue time -return the task to the caller-crawlnode """ try: task = tm.get(block,requestToken) log.debug('trying to update the corresponding task_log with dequeue time') bindModelToDb(workspace_id=task[1].workspace_id) session.begin() task_log=session.query(model.TaskLog).filter_by(id=task[1].id).one() task_log.dequeue_time=datetime.utcnow() session.save_or_update(task_log) session.flush() log.debug("db entry updated for, task_log_id: %s, with a dequeue time." % task[1].id) log.debug('returning the task after getting from priority queue and updating dequeue time') session.commit() return task except: email_exception(str(traceback.format_exc()),interval = 600) log.exception('failed to get a task from the priority queue or failed to update the dequeue time') log.critical('failed to get a task from the priority queue or failed to update the dequeue time') if session: session.rollback()
def __putTask(task, task_identifier): """ takes a task object tries to get a connector class based on the instance_data of a conector instance, restore the old session_info for a specific url call 'put' on the priority queue """ try: if not task.connClass: task.connClass = __guessConnectorName(task.instance_data, task.workspace_id) try: tokens = config.get(section='Connector', option='%s_tokens' % str(task.connClass.lower())) if tokens: task.token = tuple([t.strip() for t in tokens.split(',') if t.strip()]) except NoOptionError: print "Ignoring tokens for %s" % (str(task.connClass.lower()), ) # connector = session.query(model.Connector).filter_by(name=task.connClass).first() connector=session.query(model.Connector).filter_by(name=task.connClass).first() if connector: task.instance_data.update(json.loads(connector.connector_data)) ## log.debug('got connector class: %s' % task.connClass) #TODO-HAS A FLAW - ONLY CHKING THE WORKSPACE ID RATHER THEN THE CONNECTOR INSTANCE ID and CHK FOR CHANGED KEYWORDS #I have task.connector_instance_id >> last connector_instance_log_id >> match uri >> session_info last_connector_instance_log=session.query(model.ConnectorInstanceLog).\ filter(model.ConnectorInstanceLog.id != task.connector_instance_log_id).\ filter(model.ConnectorInstanceLog.connector_instance_id == task.connector_instance_id).\ order_by(model.ConnectorInstanceLog._tid.desc()).first() if last_connector_instance_log: res=session.query(model.TaskLog).\ filter_by(uri=task.instance_data['uri'], connector_instance_log_id=last_connector_instance_log.id, delete_status=False).\ order_by(model.TaskLog.completion_time.desc()).first() if res: log.debug("putting last session info") task.session_info=pickle.loads(res.session_info) log.debug("trying to create a task log entry in the DB") #creating a task and putting the enqueue time session.begin() task_log = model.TaskLog() task_log.enqueue_time = datetime.utcnow() task_log.uri = task.instance_data['uri'] task_log.workspace_id = task.workspace_id task_log.connector_instance_log_id = task.connector_instance_log_id task_log.level = task.level task_log.session_info = pickle.dumps(task.session_info) session.save_or_update(task_log) session.flush() session.commit() #task_log_id=task_log.id log.info("db entry created for, task_log_id: %s, with enqueue time" % task_log.id) log.debug('trying to put a task in the priority queue') task.id = task_log.id task.instance_data['parent_extracted_entities'] = __putParentSessionInfo(task.connector_instance_id) log.debug('task put in the priority queue') #session.commit() task.task_identifier = task_identifier #have included this attribute which is different from id #as a part of task which will be used to put/remove a task from bdb tm.put((task.priority, task, task_identifier)) except: print traceback.format_exc() #email_exception(str(traceback.format_exc()),interval = 600) log.exception('failed to get a connector/get session info or failed to create taskLog/ put task in the priority queue') log.critical('failed to get a connector/get session info or failed to create taskLog/ put task in the priority queue') if session:session.rollback()
def add_oddshark_ncaa(): browser.get('https://www.oddsshark.com/ncaab/computer-picks') block_system_main = browser.find_element_by_id("block-system-main") computer_tables = block_system_main.find_elements_by_css_selector( '.table.table--striped.table--fixed-column') total_sheet_data = [] for computer_table in computer_tables: if computer_table.get_attribute( 'class') == 'table table--striped table--fixed-column': name_wraps = computer_table.find_elements_by_class_name( 'name-wrap') first_team_name_short = name_wraps[0].find_element_by_class_name( 'table__name-short').get_attribute('innerHTML') first_team_name_long = name_wraps[0].find_element_by_class_name( 'table__name-long').text second_team_name_short = name_wraps[1].find_element_by_class_name( 'table__name-short').get_attribute('innerHTML') second_team_name_long = name_wraps[1].find_element_by_class_name( 'table__name-long').text thead = computer_table.find_element_by_tag_name('thead') date_time_ary = thead.find_element_by_tag_name('th').text.split( " @ ") date_time = date_time_ary[0] + " " + date_time_ary[1] insert_game_date = date_time_ary[0] + ' ' + str( datetime.datetime.now().year) insert_game_date = datetime.datetime.strptime( insert_game_date, '%b %d %Y').date() insert_game_date = str(insert_game_date) tbody = computer_table.find_element_by_tag_name('tbody') attr_tr_list = tbody.find_elements_by_tag_name('tr') predicted_score_tds = attr_tr_list[0].find_elements_by_tag_name( 'td') computer_pick_tds = attr_tr_list[1].find_elements_by_tag_name('td') public_consensus_tds = attr_tr_list[2].find_elements_by_tag_name( 'td') consensus_bet_tds = attr_tr_list[3].find_elements_by_tag_name('td') if action_type <= 1: # store into db team1 = session.query(NCAATeam).filter( or_(NCAATeam.ShortTeamName == first_team_name_short, NCAATeam.FullTeamName == first_team_name_long)).first() team2 = session.query(NCAATeam).filter( or_(NCAATeam.ShortTeamName == second_team_name_short, NCAATeam.FullTeamName == second_team_name_long)).first() if not team1: team1 = NCAATeam(first_team_name_short, first_team_name_long) session.add(team1) session.flush() if not team2: team2 = NCAATeam(second_team_name_short, second_team_name_long) session.add(team2) session.flush() db_record = session.query(OddSharkNCAA).filter( OddSharkNCAA.Team1ID == team1.ID, OddSharkNCAA.Team2ID == team2.ID, OddSharkNCAA.Date == insert_game_date, OddSharkNCAA.Time == date_time_ary[1]).first() new_oddshark = OddSharkNCAA( team1.ID, team2.ID, insert_game_date, date_time_ary[1], predicted_score_tds[1].text, predicted_score_tds[2].text, computer_pick_tds[1].text, computer_pick_tds[2].text, public_consensus_tds[1].text, public_consensus_tds[2].text, consensus_bet_tds[1].text, consensus_bet_tds[2].text) if not db_record: session.add(new_oddshark) if action_type == 0: total_sheet_data.append( (team1.ShortTeamName, team1.FullTeamName, team2.ShortTeamName, team2.FullTeamName, insert_game_date, date_time_ary[1], predicted_score_tds[1].text, predicted_score_tds[2].text, computer_pick_tds[1].text, computer_pick_tds[2].text, public_consensus_tds[1].text, public_consensus_tds[2].text, consensus_bet_tds[1].text, consensus_bet_tds[2].text)) else: new_oddshark.ID = db_record.ID session.merge(new_oddshark) if action_type == 0 or action_type == 3: # store into airtable ncaa_first_team = airtable_ncaa_team.match( ncaa_team_info[0], first_team_name_short) ncaa_second_team = airtable_ncaa_team.match( ncaa_team_info[0], second_team_name_short) if not ncaa_first_team: first_team = airtable_ncaa_team.insert({ ncaa_team_info[0]: first_team_name_short, ncaa_team_info[1]: first_team_name_long }) else: first_team = airtable_ncaa_team.update( ncaa_first_team['id'], { ncaa_team_info[0]: first_team_name_short, ncaa_team_info[1]: first_team_name_long }) if not ncaa_second_team: second_team = airtable_ncaa_team.insert({ ncaa_team_info[0]: second_team_name_short, ncaa_team_info[1]: second_team_name_long }) else: second_team = airtable_ncaa_team.update( ncaa_second_team['id'], { ncaa_team_info[0]: second_team_name_short, ncaa_team_info[1]: second_team_name_long }) game_date = airtable_game_date.match(game_date_info[0], insert_game_date) if not game_date: game_date = airtable_game_date.insert( {game_date_info[0]: insert_game_date}) game_time = airtable_game_time.match(game_time_info[0], date_time_ary[1]) if not game_time: game_time = airtable_game_time.insert( {game_time_info[0]: date_time_ary[1]}) formula_str = "AND(Team1='" + first_team_name_short + "', Team2='" + second_team_name_short + \ "', Date='" + insert_game_date + "', Time='" + date_time_ary[1] + "')" fields = { oddshark_ncaa_info[0]: [first_team['id']], oddshark_ncaa_info[1]: [second_team['id']], oddshark_ncaa_info[2]: [game_date['id']], oddshark_ncaa_info[3]: [game_time['id']], oddshark_ncaa_info[4]: predicted_score_tds[1].text, oddshark_ncaa_info[5]: predicted_score_tds[2].text, oddshark_ncaa_info[6]: computer_pick_tds[1].text, oddshark_ncaa_info[7]: computer_pick_tds[2].text, oddshark_ncaa_info[8]: public_consensus_tds[1].text, oddshark_ncaa_info[9]: public_consensus_tds[2].text, oddshark_ncaa_info[10]: consensus_bet_tds[1].text, oddshark_ncaa_info[11]: consensus_bet_tds[2].text } record = airtable_ncaab.get_all(formula=formula_str) if record: airtable_ncaab.replace(record[0]['id'], fields) else: airtable_ncaab.insert(fields) if action_type == 0 and len(total_sheet_data) > 0: # store into gsheet df = pd.DataFrame(total_sheet_data, columns=OddSharkNCAA.table_columns) original_df = wks_ncaab.get_as_df() original_df = original_df.append(df) wks_ncaab.set_dataframe(original_df, (1, 1))
def current(): form = SwitchProjectForm() user_timezone = guess_user_timezone(request.remote_addr) current_spell = Spell.query.\ filter(Spell.end == None).\ join(Project).join(User).filter(User.id == current_user.id).\ first() # Generate a list of existing projectjects from which user can choose DEFAULT_CHOICE_NO_PROJECT = (0, "") form_choices = [DEFAULT_CHOICE_NO_PROJECT] for project in current_user.projects: form_choices.append((project.id, project.name)) # Remove current project, if any, from the set of choices if current_spell: if current_spell.project == project: form_choices.remove((project.id, project.name)) form.existing_project.choices = form_choices # If the user is currently working, they have an option to stop working if request.form.get("button") == "... or stop working": current_spell.end = datetime.now(user_timezone).replace(tzinfo=None) # Add this project to the form selection drop-down form.existing_project.choices.append( (current_spell.project_id, current_spell.project.name)) current_spell = None session.commit() # Otherwise, the user can choose a new or existing project to work on elif form.validate_on_submit(): # Close the current project, if one exists if current_spell: current_spell.end = datetime.now(user_timezone).\ replace(tzinfo=None) form.existing_project.choices.append( (current_spell.project_id, current_spell.project.name)) # If the user wants to start on a new project, create it if form.new_project.data: current_project = Project(user_id=current_user.id, name=form.new_project.data) # Add this to the projects table session.add(current_project) session.flush() # Otherwise, identify the existing project the user selected else: current_project = Project.query.\ filter(Project.id == form.existing_project.data).\ first() # Remove this project from the form selection drop-down form.existing_project.choices.remove( (current_project.id, current_project.name)) # Create a new database record for that project name current_spell = Spell( project_id=current_project.id, start=datetime.now(user_timezone).replace(tzinfo=None)) session.add(current_spell) session.commit() # Sort choices alphabetically by project name form.existing_project.choices.sort(key=itemgetter(1)) return render_template("current.html", form=form, current_spell=current_spell)
def add_teams_and_odds(): left_column_wrapper = browser.find_element_by_class_name( 'op-left-column-wrapper') left_odd_not_futures = left_column_wrapper.find_element_by_class_name( 'not-futures') not_future_odd_results = browser.find_element_by_id('op-results') not_future_odd_details = not_future_odd_results.find_elements_by_class_name( 'op-item-row-wrapper') odd_detail_index = 0 left_div_list_not_futures = left_odd_not_futures.find_elements_by_tag_name( 'div') for div_item in left_div_list_not_futures: class_name = div_item.get_attribute('class') if 'op-separator-bar' in class_name: date_json = json.loads(div_item.get_attribute('data-op-date')) full_date = date_json['full_date'] short_date = date_json['short_date'] date_group_name = date_json['group_name'] elif 'op-matchup-wrapper' in class_name: time = div_item.find_element_by_class_name("op-matchup-time").text top_rotation_number = div_item.find_element_by_class_name( "op-rotation-top").text bottom_rotation_number = div_item.find_element_by_class_name( "op-rotation-bottom").text top_team_attr = div_item.find_element_by_class_name( "op-team-top").get_attribute('data-op-name') bottom_team_attr = div_item.find_element_by_class_name( "op-team-bottom").get_attribute('data-op-name') top_team_json = json.loads(top_team_attr) bottom_team_json = json.loads(bottom_team_attr) new_top_team = TeamsList(top_team_json['full_name'], top_team_json['short_name']) new_bottom_team = TeamsList(bottom_team_json['full_name'], bottom_team_json['short_name']) # add team session.add(new_top_team) session.add(new_bottom_team) session.flush() new_odd = OddsList(new_top_team.id, new_bottom_team.id, top_rotation_number, bottom_rotation_number, full_date, short_date, date_group_name, time) # add odd session.add(new_odd) session.flush() # odd detail part book_odd_details = not_future_odd_details[ odd_detail_index].find_elements_by_class_name( 'op-item-wrapper') for book_odd_detail in book_odd_details: first_row = book_odd_detail.find_element_by_class_name( 'op-first-row') op_spread = first_row.find_element_by_class_name('op-spread') class_name = op_spread.get_attribute('class') class_name = class_name.replace('op-item op-spread op-', '') book_header_id = session.query(BooksHeader).filter( func.lower(BooksHeader.bookName) == class_name).first().id first_spread_info = json.loads( op_spread.get_attribute('data-op-info')) first_spread_total = json.loads( op_spread.get_attribute('data-op-total')) first_spread_money_line = json.loads( op_spread.get_attribute('data-op-moneyline')) op_spread_price = first_row.find_element_by_class_name( 'spread-price') first_spread_price_info = json.loads( op_spread_price.get_attribute('data-op-info')) first_spread_price_total = json.loads( op_spread_price.get_attribute('data-op-overprice')) second_row = book_odd_detail.find_element_by_class_name( 'op-second-row') second_op_spread = second_row.find_element_by_class_name( 'op-spread') second_spread_info = json.loads( second_op_spread.get_attribute('data-op-info')) second_spread_total = json.loads( second_op_spread.get_attribute('data-op-total')) second_spread_money_line = json.loads( second_op_spread.get_attribute('data-op-moneyline')) second_op_spread_price = second_row.find_element_by_class_name( 'spread-price') second_spread_price_info = json.loads( second_op_spread_price.get_attribute('data-op-info')) second_spread_price_total = json.loads( second_op_spread_price.get_attribute('data-op-underprice')) new_odd_detail = OddDetailsList( book_header_id, new_odd.id, first_spread_info['fullgame'], first_spread_info['firsthalf'], first_spread_info['secondhalf'], first_spread_money_line['fullgame'], first_spread_money_line['firsthalf'], first_spread_money_line['secondhalf'], first_spread_total['fullgame'], first_spread_total['firsthalf'], first_spread_total['secondhalf'], first_spread_price_info['fullgame'], first_spread_price_info['firsthalf'], first_spread_price_info['secondhalf'], first_spread_price_total['fullgame'], first_spread_price_total['firsthalf'], first_spread_price_total['secondhalf'], second_spread_info['fullgame'], second_spread_info['firsthalf'], second_spread_info['secondhalf'], second_spread_money_line['fullgame'], second_spread_money_line['firsthalf'], second_spread_money_line['secondhalf'], second_spread_total['fullgame'], second_spread_total['firsthalf'], second_spread_total['secondhalf'], second_spread_price_info['fullgame'], second_spread_price_info['firsthalf'], second_spread_price_info['secondhalf'], second_spread_price_total['fullgame'], second_spread_price_total['firsthalf'], second_spread_price_total['secondhalf']) session.add(new_odd_detail) odd_detail_index += 1 print("added one row") else: continue
import settings from datetime import datetime from database import session from database.models import Message from channel import Channel while True: message = session.query(Message).filter( Message.sent_at.is_(None), Message.is_success.is_(None)).first() if message is None: raise ValueError("Message not found") try: channel = Channel(settings.BOT_ACCESS_TOKEN, settings.CHAT_ID) if message.image is not None: channel.send_photo(photo=message.image, caption=message.text) else: channel.send_message(text=message.text) except: message.is_success = False else: message.sent_at = datetime.now() message.is_success = True session.flush() if message.is_success: break
def add_curated_picks_wise_ncaab(): total_sheet_data = [] browser.get('https://www.pickswise.com/sports/college-basketball/') div_list = browser.find_elements_by_css_selector( '.ContentPanel.ContentPanel--block.ContentPanel--no-padding') for div in div_list: datetime_info = div.find_element_by_tag_name('h2').get_attribute( 'innerHTML').strip() if '<br>' not in datetime_info: continue datetime_info = datetime_info.split('<br>')[0] datetime_info = datetime_info.strip() datetime_info = datetime_info.split(', ')[1] date = datetime_info.split(' - ')[0][:-2] time = datetime_info.split(' - ')[1] time = time.split(' ')[0] date = date + "," + str(datetime.datetime.now().year) date = datetime.datetime.strptime(date, '%b %d,%Y').date() date = str(date) try: team_list = div.find_element_by_class_name( 'PreviewCard__teams-container').find_elements_by_class_name( 'Pick') first_team_name = team_list[0].find_element_by_class_name( 'Pick__team-name').text second_team_name = team_list[1].find_element_by_class_name( 'Pick__team-name').text first_team_name = first_team_name.strip() second_team_name = second_team_name.strip() pick_list = div.find_element_by_css_selector('.TabbedContent__tab.TabbedContent__tab--active') \ .find_elements_by_class_name('Pick') first_team_pick_outcome = pick_list[0].find_element_by_class_name( 'Pick__outcome').get_attribute('innerHTML') first_team_pick_market = pick_list[0].find_element_by_class_name( 'Pick__market').get_attribute('innerHTML') first_team_prediction = pick_list[0].find_element_by_css_selector( '.Button.Button--border-red').get_attribute('innerHTML') second_team_pick_outcome = pick_list[1].find_element_by_class_name( 'Pick__outcome').get_attribute('innerHTML') second_team_pick_market = pick_list[1].find_element_by_class_name( 'Pick__market').get_attribute('innerHTML') second_team_prediction = pick_list[1].find_element_by_css_selector( '.Button.Button--border-red').get_attribute('innerHTML') if action_type <= 1: team1 = session.query(NCAATeam).filter( or_(NCAATeam.ShortTeamName == first_team_name, NCAATeam.FullTeamName == first_team_name)).first() team2 = session.query(NCAATeam).filter( or_(NCAATeam.ShortTeamName == second_team_name, NCAATeam.FullTeamName == second_team_name)).first() if not team1: team1 = NCAATeam('', first_team_name) session.add(team1) session.flush() if not team2: team2 = NCAATeam('', second_team_name) session.add(team2) session.flush() db_record = session.query(PicksWiseNCAA).filter( PicksWiseNCAA.Team1ID == team1.ID, PicksWiseNCAA.Team2ID == team2.ID, PicksWiseNCAA.Date == date, PicksWiseNCAA.Time == time).first() new_pickwise = PicksWiseNCAA( team1.ID, team2.ID, first_team_prediction, first_team_pick_outcome, first_team_pick_market, second_team_prediction, second_team_pick_outcome, second_team_pick_market, date, time) if not db_record: session.add(new_pickwise) if action_type == 0: total_sheet_data.append( (team1.ShortTeamName, team1.FullTeamName, team2.ShortTeamName, team2.FullTeamName, date, time, first_team_prediction, first_team_pick_outcome, first_team_pick_market, second_team_prediction, second_team_pick_outcome, second_team_pick_market)) else: new_pickwise.ID = db_record.ID session.merge(new_pickwise) if action_type == 2: team1_formula_str = 'OR(SUBSTITUTE({' + ncaa_team_info[0] + '}, "\'", " ")="' + \ first_team_name.replace("'", " ") + '", SUBSTITUTE({' + \ ncaa_team_info[1] + '}, "\'", " ")="' + \ first_team_name.replace("'", " ") + '")' team2_formula_str = 'OR(SUBSTITUTE({' + ncaa_team_info[0] + '}, "\'", " ")="' + \ second_team_name.replace("'", " ") + '", SUBSTITUTE({' + \ ncaa_team_info[1] + '}, "\'", " ")="' + \ second_team_name.replace("'", " ") + '")' ncaa_first_team = airtable_ncaa_team.get_all( formula=team1_formula_str) ncaa_second_team = airtable_ncaa_team.get_all( formula=team2_formula_str) if not ncaa_first_team: ncaa_first_team = airtable_ncaa_team.insert( {ncaa_team_info[0]: first_team_name}) else: ncaa_first_team = ncaa_first_team[0] if not ncaa_second_team: ncaa_second_team = airtable_ncaa_team.insert( {ncaa_team_info[0]: second_team_name}) else: ncaa_second_team = ncaa_second_team[0] game_date = airtable_game_date.match(game_date_info[0], date) if not game_date: game_date = airtable_game_date.insert( {game_date_info[0]: date}) game_time = airtable_game_time.match(game_time_info[0], time) if not game_time: game_time = airtable_game_time.insert( {game_time_info[0]: time}) formula_str = 'AND(SUBSTITUTE({Team1}, "\'", " ")="' + \ ncaa_first_team['fields']['NCAA Team Name'].replace("'", " ") + \ '", SUBSTITUTE({Team2}, "\'", " ")="' + \ ncaa_second_team['fields']['NCAA Team Name'].replace("'", " ") + '", {Date}="' + date + '")' fields = { pickwise_ncaa_info[0]: [ncaa_first_team['id']], pickwise_ncaa_info[1]: [ncaa_second_team['id']], pickwise_ncaa_info[2]: [game_date['id']], pickwise_ncaa_info[3]: [game_time['id']], pickwise_ncaa_info[4]: first_team_prediction, pickwise_ncaa_info[5]: second_team_prediction, pickwise_ncaa_info[6]: first_team_pick_outcome, pickwise_ncaa_info[7]: second_team_pick_outcome, pickwise_ncaa_info[8]: first_team_pick_market, pickwise_ncaa_info[9]: second_team_pick_market } record = airtable_pickwise_ncaa.get_all(formula=formula_str) if record: airtable_pickwise_ncaa.replace(record[0]['id'], fields) else: airtable_pickwise_ncaa.insert(fields) except NoSuchElementException: print("No Pickwise NCAAB data") continue if action_type == 0 and len(total_sheet_data) > 0: # store into gsheet df = pd.DataFrame(total_sheet_data, columns=PicksWiseNCAA.gsheet_table_columns) original_df = wks_pickwise_ncaa.get_as_df() original_df = original_df.append(df) wks_pickwise_ncaa.set_dataframe(original_df, (1, 1))
def add_team_score(): time.sleep(5) total_sheet_data = [] date_selections = browser.find_element_by_id('cboUpcomingDates') date_info = date_selections.find_elements_by_tag_name( 'option')[0].get_attribute('innerHTML').split(', ') date = date_info[1] + ", " + date_info[2] date = datetime.datetime.strptime(date, '%B %d, %Y').date() date = str(date) table = browser.find_element_by_id('myTable4') td_list = table.find_elements_by_xpath( '//*[starts-with(@id, "tdUpcoming_") and not(contains(@class, "filler"))]' ) new_team_scores = [ HaslaMetrics(), HaslaMetrics(), HaslaMetrics(), HaslaMetrics() ] new_team_scores[0].Date = date new_team_scores[1].Date = date new_team_scores[2].Date = date new_team_scores[3].Date = date add_index = 0 for td in td_list: td_text = td.get_attribute('innerHTML') td_id = td.get_attribute('id') # if len(td_text) == 0 and '_sc' not in td_id: if len(td_text) == 0: continue td_infos = td_id.split('_') td_infos[1] = int(td_infos[1]) td_infos[1] -= 1 td_infos[1] %= 4 if td_infos[2] == '1': if '_sc' not in td_id: first_team_name = td.find_element_by_tag_name( 'a').get_attribute('innerHTML') new_team_scores[td_infos[1]].Team1Name = first_team_name team1 = session.query(NCAATeam).filter( or_(NCAATeam.ShortTeamName == first_team_name, NCAATeam.FullTeamName == first_team_name)).first() if action_type <= 1: if not team1: team1 = NCAATeam(first_team_name) session.add(team1) session.flush() new_team_scores[td_infos[1]].Team1ID = team1.ID new_team_scores[td_infos[1]].Team1Rank = int( td.find_element_by_tag_name('sub').get_attribute( 'innerHTML')) else: new_team_scores[td_infos[1]].Team1Score = float( td.get_attribute('innerHTML')) else: if '_sc' not in td_id: second_team_name = td.find_element_by_tag_name( 'a').get_attribute('innerHTML') new_team_scores[td_infos[1]].Team2Name = second_team_name team2 = session.query(NCAATeam).filter( or_(NCAATeam.ShortTeamName == second_team_name, NCAATeam.FullTeamName == second_team_name)).first() if action_type <= 1: if not team2: team2 = NCAATeam(second_team_name) session.add(team2) session.flush() new_team_scores[td_infos[1]].Team2ID = team2.ID new_team_scores[td_infos[1]].Team2Rank = int( td.find_element_by_tag_name('sub').get_attribute( 'innerHTML')) else: new_team_scores[td_infos[1]].Team2Score = float( td.get_attribute('innerHTML')) add_index += 1 if add_index % 16 == 0: for idx in range(4): if action_type <= 1: db_record = session.query(HaslaMetrics).filter( HaslaMetrics.Team1ID == new_team_scores[idx].Team1ID, HaslaMetrics.Team2ID == new_team_scores[idx].Team2ID, HaslaMetrics.Date == date).first() if not db_record: session.add(new_team_scores[idx]) if action_type == 0: first_team = session.query(NCAATeam).filter( NCAATeam.ID == new_team_scores[idx].Team1ID).first() second_team = session.query(NCAATeam).filter( NCAATeam.ID == new_team_scores[idx].Team2ID).first() total_sheet_data.append( (first_team.ShortTeamName, first_team.FullTeamName, second_team.ShortTeamName, second_team.FullTeamName, new_team_scores[idx].Date, new_team_scores[idx].Team1Rank, new_team_scores[idx].Team2Rank, new_team_scores[idx].Team1Score, new_team_scores[idx].Team2Score)) else: new_team_scores[idx].ID = db_record.ID session.merge(new_team_scores[idx]) elif action_type == 0 or action_type == 2: team1_formula_str = 'OR(SUBSTITUTE({' + ncaa_team_info[0] + '}, "\'", " ")="' + \ new_team_scores[idx].Team1Name.replace("'", " ") + '", SUBSTITUTE({' + \ ncaa_team_info[1] + '}, "\'", " ")="' + \ new_team_scores[idx].Team1Name.replace("'", " ") + '")' team2_formula_str = 'OR(SUBSTITUTE({' + ncaa_team_info[0] + '}, "\'", " ")="' + \ new_team_scores[idx].Team2Name.replace("'", " ") + '", SUBSTITUTE({' + \ ncaa_team_info[1] + '}, "\'", " ")="' + \ new_team_scores[idx].Team2Name.replace("'", " ") + '")' ncaa_first_team = airtable_ncaa_team.get_all( formula=team1_formula_str) ncaa_second_team = airtable_ncaa_team.get_all( formula=team2_formula_str) if not ncaa_first_team: ncaa_first_team = airtable_ncaa_team.insert({ ncaa_team_info[0]: new_team_scores[idx].Team1Name }) else: ncaa_first_team = ncaa_first_team[0] if not ncaa_second_team: ncaa_second_team = airtable_ncaa_team.insert({ ncaa_team_info[0]: new_team_scores[idx].Team2Name }) else: ncaa_second_team = ncaa_second_team[0] game_date = airtable_game_date.match( game_date_info[0], date) if not game_date: game_date = airtable_game_date.insert( {game_date_info[0]: date}) formula_str = 'AND(SUBSTITUTE({Team1}, "\'", " ")="' + \ ncaa_first_team['fields']['NCAA Team Name'].replace("'", " ") + \ '", SUBSTITUTE({Team2}, "\'", " ")="' + \ ncaa_second_team['fields']['NCAA Team Name'].replace("'", " ") + '", {Date}="' + date + '")' fields = { hasla_metrics_info[0]: [ncaa_first_team['id']], hasla_metrics_info[1]: [ncaa_second_team['id']], hasla_metrics_info[2]: [game_date['id']], hasla_metrics_info[3]: new_team_scores[idx].Team1Rank, hasla_metrics_info[4]: new_team_scores[idx].Team2Rank, hasla_metrics_info[5]: float(new_team_scores[idx].Team1Score), hasla_metrics_info[6]: float(new_team_scores[idx].Team2Score), } record = airtable_haslametrics.get_all(formula=formula_str) if record: airtable_haslametrics.replace(record[0]['id'], fields) else: airtable_haslametrics.insert(fields) new_team_scores = [ HaslaMetrics(), HaslaMetrics(), HaslaMetrics(), HaslaMetrics() ] new_team_scores[0].Date = date new_team_scores[1].Date = date new_team_scores[2].Date = date new_team_scores[3].Date = date if action_type == 0 and len(total_sheet_data) > 0: # store into gsheet df = pd.DataFrame(total_sheet_data, columns=HaslaMetrics.gsheet_table_columns) original_df = wks.get_as_df() original_df = original_df.append(df) wks.set_dataframe(original_df, (1, 1))
def add_vegas_insider(): total_sheet_data = [] browser.get('https://www.vegasinsider.com/college-basketball/stats/iskoe-spreadsheet/') season_name = browser.find_element_by_class_name('viHeaderNorm').text season_name = season_name.split(' ')[0] tr_list = browser.find_element_by_class_name('viBodyBorderNorm').find_elements_by_tag_name('tr') for index, tr in enumerate(tr_list): if index <= 3: continue td_list = tr.find_elements_by_tag_name('td') try: team_name = td_list[1].find_element_by_class_name('tableText').get_attribute('innerHTML').strip() except NoSuchElementException: team_name = td_list[1].get_attribute('innerHTML').strip() # print(team_name) if action_type <= 1: team1 = session.query(NCAATeam).filter(or_(NCAATeam.ShortTeamName == team_name, NCAATeam.FullTeamName == team_name)).first() if not team1: team1 = NCAATeam(team_name) session.add(team1) session.flush() db_record = session.query(VegasInsider).filter(VegasInsider.Season == season_name, VegasInsider.TeamID == team1.ID).first() new_vegasinsider = VegasInsider(season_name, team1.ID, td_list[0].text, td_list[2].text, td_list[3].text, td_list[4].text, td_list[5].text, td_list[6].text, td_list[7].text, td_list[8].text, td_list[9].text, td_list[10].text, td_list[11].text, td_list[12].text, td_list[13].text, td_list[14].text, td_list[15].text, td_list[16].text, td_list[17].text, td_list[18].text, td_list[19].text, td_list[20].text, td_list[21].text, td_list[22].text, td_list[23].text) if not db_record: session.add(new_vegasinsider) if action_type == 0: total_sheet_data.append((season_name, team1.ShortTeamName, team1.FullTeamName, td_list[0].text, td_list[2].text, td_list[3].text, td_list[4].text, td_list[5].text, td_list[6].text, td_list[7].text, td_list[8].text, td_list[9].text, td_list[10].text, td_list[11].text, td_list[12].text, td_list[13].text, td_list[14].text, td_list[15].text, td_list[16].text, td_list[17].text, td_list[18].text, td_list[19].text, td_list[20].text, td_list[21].text, td_list[22].text, td_list[23].text)) else: new_vegasinsider.ID = db_record.ID session.merge(new_vegasinsider) if action_type == 2: team_formula_str = 'OR(SUBSTITUTE({' + ncaa_team_info[0] + '}, "\'", " ")="' + \ team_name.replace("'", " ") + '", SUBSTITUTE({' + \ ncaa_team_info[1] + '}, "\'", " ")="' + \ team_name.replace("'", " ") + '")' ncaa_team = airtable_ncaa_team.get_all(formula=team_formula_str) if not ncaa_team: ncaa_team = airtable_ncaa_team.insert( {ncaa_team_info[0]: team_name}) else: ncaa_team = ncaa_team[0] season = airtable_season.match(season_info[0], season_name) if not season: season = airtable_season.insert({season_info[0]: season_name}) formula_str = 'AND(SUBSTITUTE({Team}, "\'", " ")="' + \ ncaa_team['fields']['NCAA Team Name'].replace("'", " ") + \ '", {Season}="' + season_name + '")' fields = { vegasinsider_ncaa_info[0]: [season['id']], vegasinsider_ncaa_info[1]: [ncaa_team['id']], vegasinsider_ncaa_info[2]: int(td_list[0].text), vegasinsider_ncaa_info[3]: float(td_list[2].text), vegasinsider_ncaa_info[4]: float(td_list[3].text), vegasinsider_ncaa_info[5]: float(td_list[4].text), vegasinsider_ncaa_info[6]: float(td_list[5].text), vegasinsider_ncaa_info[7]: float(td_list[6].text), vegasinsider_ncaa_info[8]: int(td_list[7].text), vegasinsider_ncaa_info[9]: int(td_list[8].text), vegasinsider_ncaa_info[10]: float(td_list[9].text), vegasinsider_ncaa_info[11]: float(td_list[10].text), vegasinsider_ncaa_info[12]: float(td_list[11].text), vegasinsider_ncaa_info[13]: int(td_list[12].text), vegasinsider_ncaa_info[14]: int(td_list[13].text), vegasinsider_ncaa_info[15]: int(td_list[14].text), vegasinsider_ncaa_info[16]: float(td_list[15].text), vegasinsider_ncaa_info[17]: float(td_list[16].text), vegasinsider_ncaa_info[18]: float(td_list[17].text), vegasinsider_ncaa_info[19]: float(td_list[18].text), vegasinsider_ncaa_info[20]: float(td_list[19].text), vegasinsider_ncaa_info[21]: float(td_list[20].text), vegasinsider_ncaa_info[22]: float(td_list[21].text), vegasinsider_ncaa_info[23]: int(td_list[22].text), vegasinsider_ncaa_info[24]: int(td_list[23].text) } record = airtable_vegasinsider_ncaa.get_all(formula=formula_str) if record: airtable_vegasinsider_ncaa.replace(record[0]['id'], fields) else: airtable_vegasinsider_ncaa.insert(fields) if action_type == 0 and len(total_sheet_data) > 0: # store into gsheet df = pd.DataFrame(total_sheet_data, columns=VegasInsider.gsheet_table_columns) original_df = wks.get_as_df() original_df = original_df.append(df) wks.set_dataframe(original_df, (1, 1))
def create_new_user(**kwargs): user_obj = User(**kwargs) session.add(user_obj) session.flush() return user_obj
def current(): form = SwitchProjectForm() user_timezone = guess_user_timezone(request.remote_addr) current_spell = Spell.query.\ filter(Spell.end == None).\ join(Project).join(User).filter(User.id == current_user.id).\ first() # Generate a list of existing projectjects from which user can choose DEFAULT_CHOICE_NO_PROJECT = (0, "") form_choices = [DEFAULT_CHOICE_NO_PROJECT] for project in current_user.projects: form_choices.append((project.id, project.name)) # Remove current project, if any, from the set of choices if current_spell: if current_spell.project == project: form_choices.remove((project.id, project.name)) form.existing_project.choices = form_choices # If the user is currently working, they have an option to stop working if request.form.get("button") == "... or stop working": current_spell.end = datetime.now(user_timezone).replace(tzinfo=None) # Add this project to the form selection drop-down form.existing_project.choices.append( (current_spell.project_id, current_spell.project.name)) current_spell = None session.commit() # Otherwise, the user can choose a new or existing project to work on elif form.validate_on_submit(): # Close the current project, if one exists if current_spell: current_spell.end = datetime.now(user_timezone).\ replace(tzinfo=None) form.existing_project.choices.append( (current_spell.project_id, current_spell.project.name)) # If the user wants to start on a new project, create it if form.new_project.data: current_project = Project( user_id=current_user.id, name=form.new_project.data ) # Add this to the projects table session.add(current_project) session.flush() # Otherwise, identify the existing project the user selected else: current_project = Project.query.\ filter(Project.id == form.existing_project.data).\ first() # Remove this project from the form selection drop-down form.existing_project.choices.remove( (current_project.id, current_project.name)) # Create a new database record for that project name current_spell = Spell( project_id=current_project.id, start=datetime.now(user_timezone).replace(tzinfo=None) ) session.add(current_spell) session.commit() # Sort choices alphabetically by project name form.existing_project.choices.sort(key=itemgetter(1)) return render_template( "current.html", form=form, current_spell=current_spell)
def add_curated_picks_ncaab(): total_sheet_data = [] browser.get('https://www.teamrankings.com/ncaa-basketball-betting-picks/') time.sleep(3) body = browser.find_element_by_id( 'DataTables_Table_0').find_element_by_tag_name('tbody') tr_list = body.find_elements_by_tag_name('tr') first_td_list = tr_list[0].find_elements_by_tag_name('td') if len(first_td_list) == 1: print("no TeamRanking-NCAA data") return date = Select(browser.find_element_by_class_name( 'redirectOnChange')).first_selected_option.text date_info = date.split(' ')[1] + "," + date.split(' ')[2] + "," + str( datetime.datetime.now().year) date = datetime.datetime.strptime(date_info, '%b,%d,%Y').date() date = str(date) for tr in tr_list: td_list = tr.find_elements_by_tag_name('td') rot_list = td_list[0].get_attribute('innerHTML').strip().split('<br>') team_list = td_list[1].get_attribute('innerHTML').strip().split('<br>') team_list[0] = team_list[0].strip() team_list[1] = team_list[1].strip() game_winner = td_list[2].find_element_by_class_name( 'picks-block-in').text.strip() ATS = td_list[3].find_element_by_class_name( 'picks-block-in').text.strip() total = td_list[4].find_element_by_class_name( 'picks-block-in').text.strip() money_line_value = td_list[5].find_element_by_class_name( 'picks-block-in').text.strip() if action_type <= 1: team1 = session.query(NCAATeam).filter( or_(NCAATeam.ShortTeamName == team_list[0], NCAATeam.FullTeamName == team_list[0])).first() team2 = session.query(NCAATeam).filter( or_(NCAATeam.ShortTeamName == team_list[1], NCAATeam.FullTeamName == team_list[1])).first() if not team1: team1 = NCAATeam(team_list[0]) session.add(team1) session.flush() if not team2: team2 = NCAATeam(team_list[1]) session.add(team2) session.flush() db_record = session.query(TeamRankingNCAA).filter( TeamRankingNCAA.Team1ID == team1.ID, TeamRankingNCAA.Team2ID == team2.ID, TeamRankingNCAA.Date == date).first() new_teamranking = TeamRankingNCAA(team1.ID, int(rot_list[0]), team2.ID, int(rot_list[1]), game_winner, ATS, total, money_line_value, date) if not db_record: session.add(new_teamranking) if action_type == 0: total_sheet_data.append( (team1.ShortTeamName, team1.FullTeamName, team2.ShortTeamName, team2.FullTeamName, rot_list[0], rot_list[1], game_winner, ATS, total, money_line_value, date)) else: new_teamranking.ID = db_record.ID session.merge(new_teamranking) if action_type == 0 or action_type == 2: team1_formula_str = 'OR(SUBSTITUTE({' + ncaa_team_info[0] + '}, "\'", " ")="' + \ team_list[0].replace("'", " ") + '", SUBSTITUTE({' + \ ncaa_team_info[1] + '}, "\'", " ")="' + \ team_list[0].replace("'", " ") + '")' team2_formula_str = 'OR(SUBSTITUTE({' + ncaa_team_info[0] + '}, "\'", " ")="' + \ team_list[1].replace("'", " ") + '", SUBSTITUTE({' + \ ncaa_team_info[1] + '}, "\'", " ")="' + \ team_list[1].replace("'", " ") + '")' ncaa_first_team = airtable_ncaa_team.get_all( formula=team1_formula_str) ncaa_second_team = airtable_ncaa_team.get_all( formula=team2_formula_str) if not ncaa_first_team: ncaa_first_team = airtable_ncaa_team.insert( {ncaa_team_info[0]: team_list[0]}) else: ncaa_first_team = ncaa_first_team[0] if not ncaa_second_team: ncaa_second_team = airtable_ncaa_team.insert( {ncaa_team_info[0]: team_list[1]}) else: ncaa_second_team = ncaa_second_team[0] game_date = airtable_game_date.match(game_date_info[0], date) if not game_date: game_date = airtable_game_date.insert( {game_date_info[0]: date}) formula_str = 'AND(SUBSTITUTE({Team1}, "\'", " ")="' + \ ncaa_first_team['fields']['NCAA Team Name'].replace("'", " ") + \ '", SUBSTITUTE({Team2}, "\'", " ")="' + \ ncaa_second_team['fields']['NCAA Team Name'].replace("'", " ") + '", {Date}="' + date + '")' fields = { teamranking_ncaa_info[0]: [ncaa_first_team['id']], teamranking_ncaa_info[1]: [ncaa_second_team['id']], teamranking_ncaa_info[2]: [game_date['id']], teamranking_ncaa_info[3]: int(rot_list[0]), teamranking_ncaa_info[4]: int(rot_list[1]), teamranking_ncaa_info[5]: game_winner, teamranking_ncaa_info[6]: ATS, teamranking_ncaa_info[7]: total, teamranking_ncaa_info[8]: money_line_value } record = airtable_teamranking_ncaa.get_all(formula=formula_str) if record: airtable_teamranking_ncaa.replace(record[0]['id'], fields) else: airtable_teamranking_ncaa.insert(fields) if action_type == 0 and len(total_sheet_data) > 0: # store into gsheet df = pd.DataFrame(total_sheet_data, columns=TeamRankingNCAA.gsheet_table_columns) original_df = wks_ncaa.get_as_df() original_df = original_df.append(df) wks_ncaa.set_dataframe(original_df, (1, 1))
def save_to_db(**kwargs): user_obj = User(**kwargs) session.add(user_obj) session.flush()
def newItem(): """This route is used behind the scenes to view an item. It forwards the request on to viewCatItem. Requires a user to be authenticated. Notes: This route could be changed to reflect which category it will be in. Returns: A GET request presents the user with a form for creating a new Item. A POST request processes the user's input from the form and adds the new item. """ # A user session must exist to add an item. if isActiveSession() is False: return redirect(url_for("listItem")) if request.method == "POST": # Process the new Item from the submitted form. # Make sure that an item associated with this category doesn't already have # the name of the one submitted in the form. category = Category.query.filter_by(name=request.form["category"]).one() newItemName = request.form["name"] try: # We should find either zero or one item in a category with a given # name. items = Item.query.filter_by(cat_id=category.id, name=newItemName).one_or_none() print "newItem: items = {0}".format(items) except MultipleResultsFound as e: # We more than one item with the newItemName in it's category. print "Multiple " + e flash("{0} items named {1} in {2} already.".format(len(items), newItemName, category.name)) return redirect(url_for("viewCategory", key=category.id)) if items is None: # This is a new Item for this category and it's name is unique # in the category. # Handle uploaded image picture = request.files["picture"] pictureUrl = processImageUpload(picture) # Create the New Item and add it to the Database newItem = Item( name=request.form["name"], dateCreated=datetime.strptime(request.form["created"], "%Y-%m-%d"), cat_id=category.id, description=request.form["description"], user_id=getSessionUserInfo()["id"], picture=pictureUrl, ) session.add(newItem) session.flush() session.commit() flash("New item created!") # Present the user with a view of the new item return redirect(url_for("viewItem", key=newItem.id)) else: # Alert the user to an already exisiting item with the specified name. flash("An item with the name {0} already exists in {1}.".format(newItemName, category.name)) # Send the user back to the newItem Form. return redirect(url_for("newItem")) else: # Present the User with the New Item Form return render_template( "generic.html", modelType="item", viewType=os.path.join("partials", "new.html"), traits=Item.defaultTraits() )
def __createTasks(connector_instances, priority): """ - gets a calculated frequency from the taskfeeder - calculates a priority based on the frequency - priority=96/frequency+1 - frequency|priority - - 1 - such tasks do not come from schedule - online, one time, weekly, monthly - 96 2 - 15 mins job - | | - 1 97 - once in a day - read scheduled connector instances from db according to the frequency - creates connector instance log and conector instance data for each connector instance - create task objects - putTask - if applyKeywords ==False, put keywords=None """ try: log.debug("iterating over connector instances, to create tasks") #to give the online tasks priority = 1 for connector_instance in connector_instances: try: log.debug('trying to create a connector instance log') if not __enqueueConnector(connector_instance): continue task_identifier = __getTaskIdentifier(connector_instance.workspace_id, json.loads(connector_instance.instance_data)['uri'] , priority,json.loads(connector_instance.\ instance_data).get('instance_filter_words')) print task_identifier if task_identifier: session.begin() print connector_instance.id connector_instance_log = model.ConnectorInstanceLog() connector_instance_log.connector_instance_id=connector_instance.id print connector_instance_log session.save_or_update(connector_instance_log) session.flush() print "got a connector instance log" log.debug("got a connector instance log") log.debug("trying to recreate connector instance data from connector and instance data") #re-construct the instance_data (merge the connector data in) task= __createTask(connector_instance, connector_instance_log.id,priority) log.debug("task created") print "task created" session.commit() #NOT SURE ABOUT THE LOCATION OF COMMIT log.debug('calling tm putTask') print "calling tm puttask" __putTask(task,task_identifier) print "return from tm puttask" log.debug('return from tm putTask') else: log.info('task already enqueued , so not enqueuing again') except: print traceback.format_exc() log.exception('one of the scheduled task failed to be read') log.critical('one of the scheduled task failed to be read') session.rollback() log.debug('all scheduled tasks created, iteration done') except: log.exception('failed to read schedule') log.critical('failed to read schedule')
def captura(url): b = init_browser() r = requests.get(url) soup = BeautifulSoup(r.text, 'html.parser') links = soup.select('a.puc-wp-encontre-seu-curso-link-curso') unidades = soup.select('h3.puc-wp-encontre-seu-curso-unidade') for i in unidades: u = Unidade() u.unidade = i.text session.add(u) session.flush() for l in links: link = l['href'] b.get(l['href']) b.implicitly_wait(2) sp_curso = BeautifulSoup(b.page_source, 'html.parser') curso = sp_curso.select_one('h2.puc-pl-titulo-pg').text.strip() ac = sp_curso.select_one('.puc-pl-pos-graduacao-conteudo-area-conhecimento ul') conhecimentos = [] for litag in ac.find_all('li'): conhecimentos.append(litag.text) div = sp_curso.find(id="ctl00_PlaceHolderMain_ctl08__ControlWrapper_RichHtmlField") texto = '' ptags = div.find_all("p") for tag in ptags: if len(tag.text) > 20: texto += '\r' + tag.text spantags = div.find_all("span") for tag in spantags: if len(tag.text) > 20: texto += '\r' + tag.text divs = div.find_all("div") for tag in divs: if len(tag.text) > 20: texto += "\r" + tag.text m = re.search('(.*)\\s*(?=Área)', texto) try: descricao = m.group(0) except AttributeError: descricao = None pass c = Curso() c.nome_curso = curso c.descricao = descricao c.texto = texto c.link = link c.unidade_id = 1 session.add(c) session.flush() ultags = div.find_all("ul") obj = [] try: for litag in ultags[0].find_all('li'): obj.append(litag.text) except IndexError: pass for i in obj: o = Objetivo() o.objetivo = i o.curso_id = c.id session.add(o) programa = [] try: for litag in ultags[1].find_all('li'): programa.append(litag.text) except IndexError: pass for i in programa: p = Programa() p.programa = i p.curso_id = c.id session.add(p) session.commit() b.quit()
def create_item(item: ItemModel): todo_item = TodoItem(**item.dict()) session.add(todo_item) session.flush() session.commit() return {"status": "success"}