def count(id): logger.info(f'Adding task for id: {id}') session = Session() task = session.query(Tasks).filter_by(id=id).first() res = Results(address=task.address, words_count=0, http_status_code=0) try: scrpr = Scrapper(task.address) except: scrpr = None if scrpr: err = scrpr.get_page() if not err: task.http_status_code, matches = scrpr.count_matches() task.task_status = 'FINISHED' res = Results(address=task.address, words_count=matches, http_status_code=task.http_status_code) else: print(err) session.add(res) session.commit() logger.info(task) logger.info(res)
def move(index: int): game_state = json.loads(redis_client.get('game_state').decode()) moved = False if not game_state['win']: values = game_state['values'] empty_index = values.index('') movable_indexes = {empty_index - 4, empty_index + 4} if (empty_index + 1) % 4: movable_indexes.add(empty_index + 1) if (empty_index - 1) % 4 != 3: movable_indexes.add(empty_index - 1) if index in movable_indexes: values[empty_index] = values[index] values[index] = '' game_state['values'] = values moved = True game_state['move_count'] += 1 if values[-1] == '': last_value = values[0] win = True for value in values[1:-1]: if value < last_value: win = False break last_value = value game_state['win'] = win if win and DB_ACTIVE: result = Results(move_count=game_state['move_count']) db.session.add(result) db.session.commit() game_state = json.dumps(game_state) if moved: redis_client.set('game_state', game_state) return game_state
def upload(): """Upload Zipped Source""" if 'file' in request.files: filen = request.files['file'] _, extension = os.path.splitext(filen.filename.lower()) # Check for Valid ZIP if (filen and filen.filename and extension in settings.UPLD_ALLOWED_EXTENSIONS and filen.mimetype in settings.UPLD_MIME): filename = secure_filename(filen.filename) # Make upload dir if not os.path.exists(settings.UPLOAD_FOLDER): os.makedirs(settings.UPLOAD_FOLDER) # Save file zip_file = os.path.join(app.config['UPLOAD_FOLDER'], filename) filen.save(zip_file) # Get zip hash get_zip_hash = utils.gen_sha256_file(zip_file) # check if already scanned res = Results.query.filter(Results.scan_hash == get_zip_hash) if not res.count(): # App analysis dir app_dir = os.path.join(app.config['UPLOAD_FOLDER'], get_zip_hash + "/") # Make app analysis dir if not os.path.exists(app_dir): os.makedirs(app_dir) # Unzip utils.unzip(zip_file, app_dir) # Do scan scan_results = general_code_analysis([app_dir]) print "[INFO] Static Analysis Completed!" _, sha2_hashes, hash_of_sha2 = utils.gen_hashes([app_dir]) tms = datetime.datetime.fromtimestamp( time.time()).strftime('%Y-%m-%d %H:%M:%S') # Save Result print "[INFO] Saving Scan Results!" res_db = Results( get_zip_hash, [app_dir], sha2_hashes, hash_of_sha2, scan_results['sec_issues'], scan_results['good_finding'], scan_results['missing_sec_header'], scan_results['files'], scan_results['total_count'], scan_results['vuln_count'], [], [], tms, ) db_session.add(res_db) db_session.commit() return jsonify({ "status": "success", "url": "result/" + get_zip_hash }) return jsonify({"status": "error", "desc": "Upload Failed!"})
def create_question(): if not session.get('admin'): abort(400) form = Question_CreateForm() if form.validate_on_submit(): question = form.question.data select1 = form.select1.data select2 = form.select2.data select3 = form.select3.data select4 = form.select4.data if Questions.query.filter_by(id=1).first(): result = Results(question=question, select1=select1, select2=select2, select3=select3, select4=select4) db.session.add(result) que = Questions.query.filter_by(id=1).first() que.question = question que.select1 = select1 que.select2 = select2 que.select3 = select3 que.select4 = select4 db.session.add(que) flash('增加成功') return redirect(url_for('admin_coupon.question')) else: que = Questions(id=1, question=question, select1=select1, select2=select2, select3=select3, select4=select4) result = Results(question=question, select1=select1, select2=select2, select3=select3, select4=select4) db.session.add(que) db.session.add(result) flash('增加成功') return redirect(url_for('admin_coupon.question')) return render_template('create_question.html', form=form)
def save_to_db(self,const): # store in db, uses self.data Extract objects, iterate through and generate the appropriate injections for the db if const is "search_term": s_db = Search(date=timezone.now(),term=self.data[0].search_term) print "Adding %s data into db."% s_db s_db.save() for q in self.data: print q # save data around Search term for each Extract object in self.data # each Extract object has multiple links, get them all and associate to the created search term try: for url in q.job_urls: l_db = Links(search=s_db, link=url) l_db.save() # each Extract object has a single location, get it and associate it to search term if q.loc != "": loc_db = Location(city=q.city,state=q.state) loc_db.save() # each Extract object has a summary attribute that has all the data, modify the data pool to fit the parameters specified by user # and store the data in a Results table associated to its Search table summary = q.pool_summary(pos=self.pos, with_filter=self.with_filter, lower=self.lower, with_bigrams=self.with_bigrams) data = summary[('Word', 'Word_Count', 'POS_Tag')] for tup in data: w = str(tup[0]) c = tup[1] try: p = str(tup[2]) except IndexError: p = "" r_db = Results(search=s_db,location=loc_db,word=w,count=c,pos=p,is_bigram=self.with_bigrams) r_db.save() except: if q.loc != "": loc_db = Location(city=q.city,state=q.state) loc_db.save() r_db = Results(search=s_db,location=loc_db,word="N/A",count=0,pos="",is_bigram=False) r_db.save()
def api_polls(): if request.method == 'POST': # get the poll and save it in the database result = json.loads(request.data) print(result) print('session user id is: ' + str(session['user'])) for key, value in result.items(): r = Results(user_id=session['user'], skill=key, option=value) print(r.skill) db.session.add(r) db.session.commit() if not value: return jsonify({'error': 'value for {} is empty'.format(key)}) if 'user' in session: session.pop('user') logout_user() flash('Thanks for completing the survey!') return jsonify({'message': 'Survey was completed succesfully'}) else: # query the db and return all the polls as json # polls = Topics.query.join(Polls).all() all_polls = [{ 'title': 'Which side is going to win the EPL this season', 'options': [{ 'name': 'Arsenal', 'vote_count': None }, { 'name': 'Spurs', 'vote_count': None }] }, { 'title': 'Whos better liverpool or city', 'options': [{ 'name': 'Liverpool FC', 'vote_count': None }, { 'name': 'Manchester city', 'vote_count': None }] }] resp = jsonify(all_polls) print(resp) return resp
def handler(message): msg = pickle.loads(message.body) task_id = int(msg.get('task_id')) result = msg.get('result') task_item = Tasks.query.filter_by(id=task_id).first() result_item = Results(address=task_item.address, words_count=result.get('words_count', None)) try: status_code = int(result.get('status_code')) task_item.http_status = status_code result_item.http_status_code = status_code except: pass db.session.add(task_item) db.session.add(result_item) db.session.commit() print(task_item.address, result) return True
def getvidDetails(ids): """ Retrive title and image from YoutubeAPI Initially retrieves the title and image of the requested videos from the YoutubeAPI, then dumps the data onto the database for later use to display to the user Keyword Arguments: ids -- List of ids retrieved from the search in getvidId() """ print('Inside getviddetails') key = DEVELOPER_KEY region = "IN" url = "https://www.googleapis.com/youtube/v3/videos?part=snippet&id={ids}&key={api_key}" r = requests.get(url.format(ids=",".join(ids), api_key=key)) js = r.json() items = js["items"] for item in items: try: result = Results(vid_name=item["snippet"]["title"], vid_img=item["snippet"]["thumbnails"]["high"]["url"], vid_id=item["id"]) print(type(result)) dbresults.append(result) db.session.add(result) print('Added result to session') db.session.commit() print('DB Addition Success') except Exception as e: db.session.rollback() print("Unable to add item to database.") print(e) finally: db.session.close() yield item["id"], item["snippet"]["title"], item["snippet"]["thumbnails"]["high"]["url"]
def load_tournament_file(): path = os.environ['APP_PATH'] with open( path + '/src/jsons/tournaments.json' ) as f: data = json.load( f ) # casino cache so not to request for same casinos path_cache = os.environ['APP_PATH'] + '/src/jsons/casinos.json' if os.path.exists( path_cache ): with open( path_cache ) as f: cache = json.load(f) else: cache = {} for r in data: # Do not add these to Swap Profit if r['Tournament'].strip() == '' or \ 'satelite' in r['Tournament'].lower() or \ r['Results Link'] == False: continue trmnt = Tournaments.query.get( r['Tournament ID'] ) trmnt_name, flight_day = utils.resolve_name_day( r['Tournament'] ) start_at = datetime.strptime( r['Date'][:10] + r['Time'], '%Y-%m-%d%H:%M:%S' ) trmntjson = { 'id': r['Tournament ID'], 'name': trmnt_name, 'start_at': start_at, 'results_link': str( r['Results Link'] ).strip() } flightjson = { 'start_at':start_at, 'day': flight_day } if trmnt is None: casino = cache.get( r['Casino ID'] ) print("THIS CASINO", f'{casino}') trmntjson = { **trmntjson, 'casino': casino['name'], 'address': casino['address'].strip(), 'city': casino['city'].strip(), 'state': casino['state'].strip(), 'zip_code': str( casino['zip_code'] ).strip(), 'longitude': float( casino['longitude'] ), 'latitude': float( casino['latitude'] ) } # Create tournament trmnt = Tournaments( **trmntjson ) db.session.add( trmnt ) db.session.flush() # Create flight db.session.add( Flights( tournament_id=trmnt.id, **flightjson )) else: # Create flight db.session.add( Flights( tournament_id=trmnt.id, **flightjson )) db.session.commit() return True ''' { "api_token": 1 "tournament_id": 45, "tournament_buyin": 150, "users": { "*****@*****.**": { "place": 11, "winnings": 200 } } } ''' trmnt_data = {} print('hello') for index, r in df.iterrows(): # print('r', r) # Get the trmnt data that's in the first row if index == 0: # Check trmnt existance trmnt = Tournaments.query.get( r['Tournament ID'] ) print('trmnt.buy_in', trmnt.buy_in) if trmnt is None: return None, { 'error':'This tournament ID was not found: '+ str(r['Tournament ID']) } print('tournament', trmnt) trmnt.results_link = (os.environ['API_HOST'] + '/results/tournament/' + str(r['Tournament ID']) ) # Check to see if file was uploaded already entry = Results.query.filter_by( tournament_id = r['Tournament ID'] ).first() if entry is not None: return None, { 'error':'This tournament ID has already been uploaded: '+ str(trmnt.id) } # Swap Profit JSON trmnt_data = { 'api_token': utils.sha256( os.environ['API_TOKEN'] ), 'tournament_id': trmnt.id, 'tournament_buyin': trmnt.buy_in, 'users': {} } user_id = r['User ID'] or None # Add user to the Swap Profit JSON if user_id: user = Users.query.get( user_id ) if user is None: db.session.rollback() return None, { 'error':'Couldn\'t find user with ID: '+ str(user_id) } # Swap Profit JSON trmnt_data['users'][user.email] = { 'place': r['Place'], 'winnings': r['Winnings'] # 'user_id': user.id } # Add to PokerSociety database db.session.add( Results( tournament_id = trmnt_data['tournament_id'], user_id = user_id, full_name = r['Full Name'], place = r['Place'], nationality = r['Nationality'], winnings = r['Winnings'] )) # If no errors, commit all data db.session.commit() # swapprofit = Subscribers.query.filter_by(company_name='Swap Profit').first() # if swapprofit is None: # return 'Swap Profit not a subscriber' # resp = requests.post( # os.environ['SWAPPROFIT_API_HOST'] + '/results/update', # json=trmnt_data ) # print('resp', resp) return trmnt_data, { 'message': 'Results excel processed successfully' }
def results(request, query_id): """ :param request: :param query_id: :return: :raise: """ if request.user.is_authenticated(): reponseToPresent = [] categories_counter = [] positive_counter = 0 negative_counter = 0 neutral_counter = 0 try: ## Must store the response, if there is no response, otherwise return the stored one. ## IF NOT STORED query = Query.objects.get(id=query_id) query_params = Query_properties.objects.filter(query=query) results = Results.objects.filter(query=query) #run for all categories list_properties = get_query_properties(query) properties = list_properties["Properties"] # all the available properties, e.g. keywords, twitter, facebook #print "properties: %s" %properties phrases = list_properties["Phrases"] #print "phrases: %s" %phrases keywords = list_properties["Keywords"] #print "keywords: %s" %keywords twitter_usernames = list_properties["Twitter"] facebook_pages = list_properties["Facebook"] query_properties = '' # This is the string that forms the properties query (query_string) phrase_properties = '' # This is the string that forms the phrase query (match_phrase)' twitter_properties = '' facebook_properties = '' ## Run the query or bring the results from the Database if results: #bring it from the database response = results.__getitem__(0).results response = json.loads(response) else: #make a new query lang = Query_languages.objects.get(query=query_id) ##### # Get all the properties, keywords, phrases, twitter usernames ##### for kwrd in keywords.keys(): temp = '' for keyword_prop in keywords[kwrd]: temp += "%s," % keyword_prop if query.venn == 'OR': query_properties += '%s,' % remove_comma_at_the_end(temp) else: query_properties += '+(%s)' % remove_comma_at_the_end(temp) query_properties = query_properties.replace('+()', '') #Remove any empty keyword query_properties = remove_comma_at_the_end(query_properties) if query_properties != '': #if empty list, no properties, no query string, go to phrases if lang: if lang.language == "es": query_properties = '{"query_string":{"query":"%s","fields":["%s"]}}' % ( query_properties, "text_no_url_es") elif lang.language == "en": query_properties = '{"query_string":{"query":"%s","fields":["%s"]}}' % ( query_properties, "text_no_url") else: query_properties = '{"query_string":{"query":"%s","fields":["%s","%s"]}}' % ( query_properties, "text_no_url", "text_no_url_es") else: query_properties = '{"query_string":{"query":"%s","fields":["%s"]}}' % ( query_properties, "text_no_url") # Create the phrase query for phrase_list in phrases.keys(): for phrase in phrases[phrase_list]: if lang: if lang.language == "es": phrase_properties += '{"match_phrase":{"doc.text_no_url_es":"%s"}},' % phrase elif lang.language == "en": phrase_properties += '{"match_phrase":{"doc.text_no_url":"%s"}},' % phrase else: phrase_properties += '{"match_phrase":{"doc.text_no_url":"%s"}},{"match_phrase":{"doc.text_no_url_es":"%s"}},' % ( phrase, phrase) else: phrase_properties += '{"match_phrase":{"doc.text_no_url":"%s"}},' % phrase phrase_properties = remove_comma_at_the_end(phrase_properties) for twitter_username in twitter_usernames: twitter_properties += '{"match_phrase_prefix" : { "doc.user_screen_name":"twitter:%s" }},' % twitter_username.replace( " ", "").replace("@", "") twitter_properties = remove_comma_at_the_end(twitter_properties) for facebook_page in facebook_pages: facebook_properties += '{"match_phrase_prefix" : { "doc.user_screen_name":"facebook:%s" }},' % facebook_page.replace( " ", "") facebook_properties = remove_comma_at_the_end(facebook_properties) ### #query constructor ### query_all = '' if (query_properties != ''): query_all += '%s,' % query_properties if (phrase_properties != ''): query_all += '%s,' % phrase_properties if (twitter_properties != ''): query_all += '%s,' % twitter_properties if (facebook_properties != ''): query_all += '%s,' % facebook_properties query_all = remove_comma_at_the_end(query_all) query_all = '{"query":{"filtered":{"query":{"bool":{"should":[%s],"minimum_should_match" : 1}},"filter":{"bool":{"must":[{"range":{"doc.created_at":{"from":"%s","to":"%s"}}}],"_cache":true}}}},"from":0,"size":10000, "sort":["_score"]}' % ( query_all, int(time.mktime(query.from_date.timetuple()) * 1000), int(time.mktime(query.to_date.timetuple()) * 1000)) print query_all response = parse_query_for_sentiments(query_all) newResponse = Results(query=query, results=json.dumps(response), updated=datetime.now()) newResponse.save() ## count the occurrences of keywords in in response for property in properties.keys(): word_counter = [] r = re.compile("|".join(r"\b%s\b" % w.lower() for w in properties[property].split(",")), re.I) # temporary solution to double counting... number = Counter(re.findall(r, "")) for message in response: #dict_you_want = { "text": message["_source"]["doc"]["text"] } #print dict_you_want number = number + Counter(re.findall(r, (message["_source"]["doc"]["text"]).lower().replace("@", " ").replace("#", " "))) # for lala in properties[property].split(","): # print number[lala] # print lala for phrase in properties[property].split(","): # number = json.dumps(response).count(phrase) text = '{"name":"%s","times":%i, "sentiment":%i, "positive":%i, "negative":%i, "neutral":%i}' % ( phrase.lower(), number[phrase.lower()], 0, 0, 0, 0) #print text word_counter.append(json.loads(text)) text = {} text["category"] = property text["properties"] = word_counter categories_counter.append(text) for message in response: doc_text = message["_source"]["doc"]["text"] if message["_source"]["doc"]["senti_tag"] == "positive": #for pie diagram metrics positive_counter += 1 elif message["_source"]["doc"]["senti_tag"] == "negative": # for pie diagram metrics negative_counter += 1 elif message["_source"]["doc"]["senti_tag"] == "neutral": neutral_counter += 1 #if message["_score"] > 0.05: if True: reponseToPresent.append(message["_source"]) ##print "Just Added: %s" %message["_source"]["doc"] try: for category in categories_counter: r2 = re.compile("|".join(r"\b%s\b" % w["name"].lower() for w in category["properties"]), re.I) number2 = Counter(re.findall(r2, ( json.dumps(message["_source"]["doc"]["text"])).lower().replace("@", " ").replace("#", " "))) if True: for property in category["properties"]: if message["_source"]["doc"]["senti_tag"] == "positive": if (number2[property["name"].lower()]) > 0: property["sentiment"] = property["sentiment"] + 1 property["positive"] = property["positive"] + 1 elif message["_source"]["doc"]["senti_tag"] == "negative": if (number2[property["name"].lower()]) > 0: property["sentiment"] = int(property["sentiment"]) - 1 property["negative"] = property["negative"] + 1 elif message["_source"]["doc"]["senti_tag"] == "neutral": if (number2[property["name"].lower()]) > 0: property["neutral"] = property["neutral"] + 1 except: continue except ValueError: #print ValueError.message raise Http404() return render(request, "results.html", {"query_id": query.id, "query_name": query.name, "query": query_params, "response": reponseToPresent, "positive": positive_counter, "negative": negative_counter, "neutral": neutral_counter, "categories": categories_counter}) else: return HttpResponseRedirect("/")
first_number = False while not first_number: my_string = input() if parse_expression(my_string): first_number, second_number, action = parse_expression( my_string) break else: continue our_example = Calculator(first_number, second_number, action) if our_example.calculate(): print("Your result is: ", round(our_example.calculate(), 4)) to_alchemy_results = Results(first_number, action, second_number, our_example.calculate(), our_user_id) alchemy_actions.add_res(to_alchemy_results) else: print("It's not possible to divide by zero!") print("Do you want to continue? (y/n)") while True: again = (input()) if again == "y": break elif again == "n": break else: print("Try again !")
def update_events(self, date_list): """ For all promotions in the database, search for new results and add to DB Parameters ---------- date_list : list List of dates to retrieve results for Returns ------- updated_shows :list Simple list of updated shows for use in notifications """ logging.info("Updating events") # Build updated_shows list used later for notifications updated_shows = [] logging.debug(Promotions.objects()) # Get list of promotions from database for promotion in Promotions.objects(): logging.info(f"Finding Events for {promotion.name}") logging.debug(f"Creating Promotion object for {promotion['name']}") # For each promotion, build a list of events events = self.get_events(promotion, date_list) if events: # Continue only if there are any events found for the promotion in the time frame logging.info(f"Found events for {promotion.name}") for event in events: # For each event, add the promotion name to its attributes event['promotion'] = promotion.name # Check whether the show already exists, based on the event name and date if not Results.objects(title=event['title'], date=event['date']): # If it doesn't already exist, save it to the db and add to the list of updated shows db_show = Results(**event).save() logging.info( f"Saved document ID {db_show.id} for {event['promotion']}, {event['title']}, {event['date']}" ) updated_shows.append(event['promotion'] + " - " + event['title']) else: # If show is already in the db, update the details update = Results.objects( title=event['title'], date=event['date']).update(**event, full_result=True) if update.modified_count > 0: logging.info( f"Updated DB entry for {event['promotion']}, {event['title']}, {event['date']}" ) else: logging.info( f"DB entry exists for {event['promotion']}, {event['title']}, {event['date']}" ) else: logging.info(f"No events found for {promotion.name}") # Create string of updated shows for notifications updated_shows = '\n'.join(updated_shows) return updated_shows
def write_to_db(name, value): result = Results(name=name, value=value) db.add(result)
def question(): """Let the user answer the trivia question.""" # "GET" method if request.method == "GET": # get trivia file from online API triviafile = getTrivia(Choice.query.get(1).choice) # create variables question, correct_answer, incorrect_answer1, incorrect_answer2, incorrect_answer3 = triviaItems(triviafile) # create shuffable variables for db answer1, answer2, answer3, answer4 = shuffle(correct_answer, incorrect_answer1, incorrect_answer2, incorrect_answer3) # if table is empty, insert values if Results.query.get(1) is None: # store question and answers into database result = Results(question, answer1, answer2, answer3, answer4, correct_answer) db.session.add(result) db.session.commit() # update the table otherwise else: Results.query.get(1).question = question Results.query.get(1).answer1 = answer1 Results.query.get(1).answer2 = answer2 Results.query.get(1).answer3 = answer3 Results.query.get(1).answer4 = answer4 Results.query.get(1).correct_answer = correct_answer db.session.commit() # query for question and results trivia = Results.query.get(1) return render_template('question.html', trivia=trivia) # "POST" method else: # create dictionary with correct answer and incorrect answers answerdict = {"answer1" : "incorrect", "answer2" : "incorrect", "answer3" : "incorrect", "answer4" : "incorrect"} if Results.query.get(1).correct_answer == Results.query.get(1).answer1: answerdict["answer1"] = "correct" elif Results.query.get(1).correct_answer == Results.query.get(1).answer2: answerdict["answer2"] = "correct" elif Results.query.get(1).correct_answer == Results.query.get(1).answer3: answerdict["answer3"] = "correct" elif Results.query.get(1).correct_answer == Results.query.get(1).answer4: answerdict["answer4"] = "correct" # if user is not logged in if session.get("user_id") == None: # correct answer if answerdict[request.form.get("answer")] == "correct": flash("Answer is correct!", "success") return redirect(url_for("proceed")) # incorrect answer else: flash("Answer is wrong!", "danger") return redirect(url_for("proceed")) # if user is logged in else: # correct answer if answerdict[request.form.get("answer")] == "correct": flash("Answer is correct! You have earned 10 points!", "success") # add 10 points to user's current score User.query.get(current_user.id).currentscore += 10 # update high score if broken if User.query.get(current_user.id).currentscore > User.query.get(current_user.id).highscore: User.query.get(current_user.id).highscore += 10 db.session.commit() return redirect(url_for("proceed_online")) # incorrect answer else: flash("Answer is wrong! Your score has been reset to 0.", "danger") # reset current score to 0 User.query.get(current_user.id).currentscore = 0 db.session.commit() return redirect(url_for("proceed_online"))