def relevanced(): client_ip = request.environ['REMOTE_ADDR'] args = request.args input_params = '/relevance-extraction?{}'.format(args) visitor_logger.post_visit_data('web', client_ip, int(time.time()), input_params, 'get') return render_template("relevance.html")
def activity_time_trend(): client_ip = request.environ['REMOTE_ADDR'] args = request.args input_params = '/activity_time_trend?{}'.format(args) visitor_logger.post_visit_data('web', client_ip, int(time.time()), input_params, 'get') return render_template("activity_time_trend.html")
def post(self): # Parse GET parameters parser = reqparse.RequestParser() parser.add_argument('caption', type=str) parser.add_argument('timestamp', type=str) #1433435470 args = parser.parse_args(strict=True) # Internal function for the API request if args['timestamp'] == None: args['timestamp'] = int(time.time()) # Logger client_ip = request.environ['REMOTE_ADDR'] input_params = '/feature_extraction.json?caption={}×tamp={}'.format( args['caption'], args['timestamp']) visitor_logger.post_visit_data('api', client_ip, int(time.time()), input_params, 'get') value = category_classifier.feature_extraction(args['caption'], args['timestamp']) value['caption'] = args['caption'] value['timestamp'] = int(args['timestamp']) return value
def mood(): client_ip = request.environ['REMOTE_ADDR'] post = request.args.get('post') input_params = '/mood?post={}'.format(post) visitor_logger.post_visit_data('web', client_ip, int(time.time()), input_params, 'get') return render_template("mood.html", post=post)
def sampleHotspotPlaceness(): sns_url = request.args.get('sns_url') client_ip = request.environ['REMOTE_ADDR'] input_params = '/hotspot-placeness?sns_url={}'.format(sns_url) visitor_logger.post_visit_data('web', client_ip, int(time.time()), input_params, 'get') # Data template proc_data = { 'location': {}, 'season': '1', 'mood': '2', 'activity': '3', 'isWeekend': '4', 'maen': '5' } data_dict = instagram_post_crawler.extract_instagram_data(sns_url) proc_data['location']['name'] = data_dict['location']['name'] proc_data['location']['id'] = data_dict['location']['id'] #print(data_dict) feature_ext_result = category_classifier.feature_extraction( data_dict['caption'], data_dict['timestamp']) return render_template("hotspot_placeness.html", requested_url=sns_url, feature_ext_result=feature_ext_result, post_data=data_dict, proc_data=proc_data)
def getHotspotsOfDistrict(): district = request.args.get('district') client_ip = request.environ['REMOTE_ADDR'] input_params = '/district-hotspots?district={}'.format(district) visitor_logger.post_visit_data('web', client_ip, int(time.time()), input_params, 'get') hotspots = district_classifier.getHotspots(district) return render_template("sample.html", district=district, hotspots=hotspots)
def sampleDistrict(): district = request.args.get('district') activity = request.args.get('activity') age = request.args.get('age') client_ip = request.environ['REMOTE_ADDR'] input_params = '/placeness?district={}&activity={}&age={}'.format( district, activity, age) visitor_logger.post_visit_data('web', client_ip, int(time.time()), input_params, 'get') return render_template("sample.html", district=district, activity=activity, age=age)
def get(self): # Parse GET parameters parser = reqparse.RequestParser() parser.add_argument('lat', type=str) parser.add_argument('lon', type=str) parser.add_argument('name', type=str) parser.add_argument('count', type=str) args = parser.parse_args(strict=True) districtName = '' count = 3 # default count value # Logger client_ip = request.environ['REMOTE_ADDR'] input_params = '/district_placeness_extraction.json?lat={}&lon={}&name={}&count={}'.format( args['lat'], args['lon'], args['name'], args['count']) visitor_logger.post_visit_data('api', client_ip, int(time.time()), input_params, 'get') # Internal function for the API request if (args['lat'] is not None and args['lon'] is not None): lat = float(args['lat']) lon = float(args['lon']) districtName = district_classifier.getDistrict(lat, lon) elif (args['name'] is not None): districtName = args['name'] else: # error: place not specified return { 'error': 401, 'msg': 'bad request (GPS or placename not specified)' } # parse count if (args['count'] is not None): count = int(args['count']) if (districtName is not ''): value = extract_placeness(districtName.lower(), count) if (value is not None): return { 'name': value['name'], 'count': value['count'] }, status.HTTP_200_OK else: return { 'msg': 'Placeness not found' }, status.HTTP_404_NOT_FOUND
def hotspot_mood(): client_ip = request.environ['REMOTE_ADDR'] hotspot_id = request.args.get('hotspot_name') mode = request.args.get('mode') threshold = float(request.args.get('threshold')) input_params = '/hotspot_mood?hotspot_id={}'.format(hotspot_id) visitor_logger.post_visit_data('web', client_ip, int(time.time()), input_params, 'get') result = getHotspotMoodDistribution(threshold, mode, hotspot_id) mood = result['Mood'] mood_credibility = result['MoodCredibility'] hotspot = result['Hotspot'] return render_template("hotspot_mood.html", mood=mood, mood_credibility=mood_credibility, hotspot=hotspot, hotspot_id=hotspot_id)
def get(self): # Parse GET parameters parser = reqparse.RequestParser() parser.add_argument('hotspot_id', type=str) args = parser.parse_args(strict=True) # Logger client_ip = request.environ['REMOTE_ADDR'] input_params = '/hotspot_placeness_extraction.json?hotspot_id={}'.format( args['hotspot_id']) visitor_logger.post_visit_data('api', client_ip, int(time.time()), input_params, 'get') # Internal function for the API request value = category_classifier.hotspot_placeness_extraction( args['hotspot_id']) return value
def gmap(): #print(' POST data:', request.form['locations']) locations = ast.literal_eval(request.form['locations']) district = request.form['district'] client_ip = request.environ['REMOTE_ADDR'] input_params = '/gmap?district={}'.format(district) visitor_logger.post_visit_data('web', client_ip, int(time.time()), input_params, 'post') center = district_classifier.getCenter(district) #print(district, 'center:', center) return render_template("gmap2.html", locations=json.dumps(locations), center_lat=center[0], center_lon=center[1])
def get(self): # Parse GET parameters parser = reqparse.RequestParser() parser.add_argument('lat', type=str) parser.add_argument('lon', type=str) args = parser.parse_args(strict=True) lat = float(args['lat']) lon = float(args['lon']) # Logger client_ip = request.environ['REMOTE_ADDR'] input_params = '/district.json?lat={}&lon={}'.format(lat, lon) visitor_logger.post_visit_data('api', client_ip, int(time.time()), input_params, 'get') # Internal function for the API request districtName = district_classifier.getDistrict(lat, lon) return {'name': districtName}, status.HTTP_200_OK
def get(self): # Parse GET parameters parser = reqparse.RequestParser() parser.add_argument('hotspot_id', type=str) parser.add_argument('placeness', type=str) parser.add_argument('feedback_score', type=str) args = parser.parse_args(strict=True) # Logger client_ip = request.environ['REMOTE_ADDR'] input_params = '/user_feedback.json?hotspot_id={}&placeness={}&feedback_score={}'.format( args['hotspot_id'], args['placeness'], args['feedback_score']) visitor_logger.post_visit_data('api', client_ip, int(time.time()), input_params, 'get') value = category_classifier.user_feedback(args) return value
def get(self): # Parse GET parameters parser = reqparse.RequestParser() parser.add_argument('hotspot_name', type=str) parser.add_argument('mode', type=str) parser.add_argument('threshold', type=float) args = parser.parse_args(strict=True) # Logger client_ip = request.environ['REMOTE_ADDR'] input_params = '/mood?hotspot_name={}&mode={}&threshold={}'.format( args['hotspot_name'], args['mode'], args['threshold']) visitor_logger.post_visit_data('api', client_ip, int(time.time()), input_params, 'get') result = getHotspotMoodDistribution(args['threshold'], args['mode'], args['hotspot_name']) if (result is None): return {'msg': 'No matched place id'}, status.HTTP_404_NOT_FOUND else: return result
def sampleHotspot(): hotspot_id = request.args.get('hotspot_id') client_ip = request.environ['REMOTE_ADDR'] input_params = '/hotspot?hotspot_id={}'.format(hotspot_id) visitor_logger.post_visit_data('web', client_ip, int(time.time()), input_params, 'get') hotspot_name = location_metadata.getLocationName(hotspot_id) value = category_classifier.hotspot_placeness_extraction(hotspot_id) placeness_list = value['placeness'] placeness_list_sorted = sorted(placeness_list, key=lambda k: k['count'], reverse=True) value_sorted = {} value_sorted['hotspot_id'] = hotspot_id value_sorted['placeness'] = placeness_list_sorted return render_template("hotspot.html", hotspot_id=hotspot_id, hotspot_name=hotspot_name, value=value_sorted)
def post(self): parser = reqparse.RequestParser() parser.add_argument('name', type=str) parser.add_argument('nw_lat', type=float) parser.add_argument('nw_lon', type=float) parser.add_argument('se_lat', type=float) parser.add_argument('se_lon', type=float) args = parser.parse_args(strict=True) if (args['name'] is not None): # addDistrictToDict(name, gps_nw, gps_se): gps_nw = () gps_se = () gps_nw['lat'] = args['nw_lat'] gps_nw['lon'] = args['nw_lon'] gps_se['lat'] = args['se_lat'] gps_se['lon'] = args['se_lon'] district_classifier.addDistrictToDict(args['name'], gps_nw, gps_se) # Logger client_ip = request.environ['REMOTE_ADDR'] input_params = '/district.json/new?name={}&nw_lat={}&nw_lon={}&se_lat={}&se_lon={}'.format( args['name'], args['nw_lat'], args['nw_lon'], args['se_lat'], args['se_lon'], ) visitor_logger.post_visit_data('api', client_ip, int(time.time()), input_params, 'post') return {'msg': 'ok'}, status.HTTP_200_OK else: return { 'msg': 'place name not specified.' }, status.HTTP_400_BAD_REQUEST
def get(self): method = 'pagerank' graph = 'gexf/tf-idf_placeness_venue_with_cutoff.gexf' parser = reqparse.RequestParser() parser.add_argument('method', type=str) parser.add_argument('time_kwd', type=str) parser.add_argument('with_kwd', type=str) parser.add_argument('occasion_kwd', type=str) parser.add_argument('mood_kwd', type=str) parser.add_argument('weather_kwd', type=str) parser.add_argument('weight_date', type=str) parser.add_argument('weight_with', type=str) parser.add_argument('weight_occasion', type=str) parser.add_argument('weight_mood', type=str) parser.add_argument('weight_weather', type=str) parser.add_argument('weight_query', type=str) parser.add_argument('topk', type=int) args = parser.parse_args(strict=True) # Logger client_ip = request.environ['REMOTE_ADDR'] input_params = '/relevance_extraction.json?method={}&time_kwd={}&with_kwd={}&occasion_kwd={}&mood_kwd={}&weather_kwd={}&weight_date={}&weight_with={}&weight_occasion={}&weight_weather={}&weight_query={}&topk={}'.format( args['method'], args['time_kwd'], args['with_kwd'], args['occasion_kwd'], args['mood_kwd'], args['weather_kwd'], args['weight_date'], args['weight_with'], args['weight_occasion'], args['weight_mood'], args['weight_weather'], args['weight_query'], args['topk']) visitor_logger.post_visit_data('api', client_ip, int(time.time()), input_params, 'get') if args['method'] is not None: method = args['method'] occasion_kwds = [] if args['occasion_kwd'] is not None: occasion_kwds = [args['occasion_kwd']] mood_kwds = [] if args['mood_kwd'] is not None: occasion_kwds = [args['mood_kwd']] with_kwds = [] if args['with_kwd'] is not None: occasion_kwds = [args['with_kwd']] time_kwds = [] if args['time_kwd'] is not None: occasion_kwds = [args['time_kwd']] weather_kwds = [] if args['weather_kwd'] is not None: occasion_kwds = [args['weather_kwd']] weight_mood = 0.01 if args['weight_mood'] is not None: weight_mood = float(args['weight_mood']) weight_time = 0.1 if args['weight_date'] is not None: weight_time = float(args['weight_date']) weight_occasion = 1.0 if args['weight_occasion'] is not None: weight_occasion = float(args['weight_occasion']) weight_with = 1.0 if args['weight_with'] is not None: weight_with = float(args['weight_with']) weight_weather = 1.0 if args['weight_weather'] is not None: weight_weather = float(args['weight_weather']) alpha = 0.1 if args['weight_query'] is not None: alpha = float(args['weight_query']) user_kwds = [] q = time_kwds + with_kwds + occasion_kwds + mood_kwds + weather_kwds + user_kwds topk = 10 if args['topk'] is not None: topk = int(args['topk']) if method == "community": graphs = glob.glob('gexf/subgraphs/*.gexf') recs = [] for g in graphs: recs.append(relevance()) recs[-1].load_network(g) recs[-1].load_placeness_keywords() print("Assigning random walkers onto the network..") print("Done") print( "Estimating the relevance of venues given the query using the subgraphs…" ) merged_result = dict() dummy_times = [] running_times = [] for rec in recs: result = rec.query(time_kwd=time_kwds, with_kwd=with_kwds, occasion_kwd=occasion_kwds, mood_kwd=mood_kwds, weather_kwd=weather_kwds, user_kwd=user_kwds, topk=topk, weight_query=alpha, weight_time=weight_time, weight_mood=weight_mood, weight_occasion=weight_occasion, weight_with=weight_with, weight_weather=weight_weather) dummy_times.append(result[1]) num_nodes = rec.graph.number_of_nodes() num_query_nodes = 0 if type(q) is list: for q_i in q: num_query_nodes += len(rec.get_placeness(q_i)) else: num_query_nodes = len(rec.get_placeness(q)) for res in result[0]: merged_result[venue_metadata[ res[0]]['name']] = res[1] * num_nodes * num_query_nodes running_times.append(np.sum(dummy_times)) response = { 'venues': OrderedDict( sorted(merged_result.items(), key=operator.itemgetter(1), reverse=True)[:topk]) } print("Results:") print(response) return response else: rec = relevance() rec.load_network(graph) rec.load_placeness_keywords() print("Assigning random walkers onto the network..") print("Done") print("Estimating the relevance of venues given the query…") result = rec.query(time_kwd=time_kwds, with_kwd=with_kwds, occasion_kwd=occasion_kwds, mood_kwd=mood_kwds, weather_kwd=weather_kwds, user_kwd=user_kwds, topk=topk, weight_query=alpha, weight_time=weight_time, weight_mood=weight_mood, weight_occasion=weight_occasion, weight_with=weight_with, weight_weather=weight_weather) result_obj = {} for res in result[0]: result_obj[venue_metadata[res[0]]['name']] = res[1] response = { 'venues': OrderedDict( sorted(result_obj.items(), key=operator.itemgetter(1), reverse=True)) } print("Results:") print(response) return response
def get(self): # Parse GET parameters parser = reqparse.RequestParser() parser.add_argument('post', type=str) parser.add_argument('mode', type=str) parser.add_argument('threshold', type=float) args = parser.parse_args(strict=True) # Logger client_ip = request.environ['REMOTE_ADDR'] input_params = '/mood?post={}&mode={}&threshold={}'.format( args['post'], args['mode'], args['threshold']) visitor_logger.post_visit_data('api', client_ip, int(time.time()), input_params, 'get') textTotal = args['post'] threshold = 0.2 if args['threshold']: threshold = removeOutBound(args['threshold'], 0, 1.0) # choose 'binary' or None thresholdMode = args['mode'] model = Word2Vec.load("./load/model/insta_160929.model") with open('./load/' + 'moodWords.json', 'r') as infile: for line in infile: # json_string = line.read().decode() moodWordSeed = json.loads(line) list_friendlyWords = moodWordSeed['friendly'] list_crampWords = moodWordSeed['cramp'] list_traditionalWords = moodWordSeed['traditional'] list_modernWords = moodWordSeed['modern'] list_romanticWords = moodWordSeed['romantic'] list_relaxingWords = moodWordSeed['relaxing'] list_loudWords = moodWordSeed['loud'] m1 = 0 m2 = 0 m3 = 0 m4 = 0 m5 = 0 m6 = 0 m7 = 0 mc = 0 linecount = 10 credMap = interp1d([0.1, 5], [0.1, 0.8]) modelMap = interp1d([0, 1], [0, 0.2]) scoreMap = interp1d([0, linecount], [0, 1.0]) scoreMapXLarge = interp1d([0, linecount], [0, 0.7]) scoreMapLarge = interp1d([0, linecount], [0, 0.8]) scoreMapSmall = interp1d([0, linecount], [0, 1.2]) scoreMapXSmall = interp1d([0, linecount], [0, 1.5]) totalCount = 0 count = 0 scoreByModel = 0 for w in list_friendlyWords: count += textTotal.count(w) try: scoreByModel += modelMap(model.similarity(w, u'친절')) except: pass count += scoreByModel m1 += scoreMapLarge(removeOutBound(count, 0, linecount)) totalCount += count count = 0 scoreByModel = 0 print('=== Mood-inferencing Keywords ===') for w in list_crampWords: count += textTotal.count(w) if textTotal.count(w) > 0: print(w) try: scoreByModel += modelMap(model.similarity(w, u'답답한')) except: pass count += scoreByModel m2 += scoreMapSmall(removeOutBound(count, 0, linecount)) totalCount += count count = 0 scoreByModel = 0 for w in list_traditionalWords: count += textTotal.count(w) if textTotal.count(w) > 0: print(w) try: scoreByModel += modelMap(model.similarity(w, u'전통적')) except: pass count += scoreByModel m3 += scoreMapLarge(removeOutBound(count, 0, linecount)) totalCount += count count = 0 scoreByModel = 0 for w in list_modernWords: count += textTotal.count(w) if textTotal.count(w) > 0: print(w) try: scoreByModel += modelMap(model.similarity(w, u'세련된')) except: pass count += scoreByModel m4 += scoreMapSmall(removeOutBound(count, 0, linecount)) totalCount += count count = 0 scoreByModel = 0 for w in list_romanticWords: count += textTotal.count(w) if textTotal.count(w) > 0: print(w) try: scoreByModel += modelMap(model.similarity(w, u'로맨틱한')) except: pass count += scoreByModel m5 += scoreMapXLarge(removeOutBound(count, 0, linecount)) totalCount += count count = 0 scoreByModel = 0 for w in list_relaxingWords: count += textTotal.count(w) if textTotal.count(w) > 0: print(w) try: scoreByModel += modelMap(model.similarity(w, u'편안한')) except: pass count += scoreByModel m6 += scoreMap(removeOutBound(count, 0, linecount)) totalCount += count count = 0 scoreByModel = 0 for w in list_loudWords: count += textTotal.count(w) if textTotal.count(w) > 0: print(w) try: scoreByModel += modelMap(model.similarity(w, u'북적이는')) except: pass count += scoreByModel m7 += scoreMapXSmall(removeOutBound(count, 0, linecount)) totalCount += count # normalizing scores raw = [m1, m2, m3, m4, m5, m6, m7] if totalCount >= 1: norm = [float(i) / sum(raw) for i in raw] else: norm = raw norm = np.round(norm, 3) if thresholdMode: if thresholdMode.lower() == 'binary': # threshold = 0.2 tempResult = [] for n in norm: if n >= threshold: tempResult.append(1) else: tempResult.append(0) norm = tempResult try: wordModelCred = modelMap(model.accuracy(textTotal)) except: wordModelCred = random.uniform(0, 0.2) print('=== Credibility weight based on word appearance freq ===') print(totalCount) mc += (credMap(removeOutBound(totalCount, 0.1, 5)) + removeOutBound(wordModelCred, 0, 0.2)) # Return a json object. if textTotal != '': return { 'Mood': { 'Friendly': norm[0], 'Cramp': norm[1], 'Romantic': norm[4], 'Relaxing': norm[5], 'Loud': norm[6], 'Traditional': norm[2], 'Modern': norm[3] }, 'MoodCredibility': round(mc, 3) } else: return {'msg': 'Post not found'}, status.HTTP_404_NOT_FOUND
def sample(): client_ip = request.environ['REMOTE_ADDR'] visitor_logger.post_visit_data('web', client_ip, int(time.time()), '/sample', 'get') return render_template("sample.html")
def sampleDistrictPlaceness(): district = request.args.get('district') activity = request.args.get('activity') maen = request.args.get('maen') mood = request.args.get('mood') client_ip = request.environ['REMOTE_ADDR'] input_params = '/district-placeness?district={}&activity={}&maen={}&mood={}'.format( district, activity, maen, mood) visitor_logger.post_visit_data('web', client_ip, int(time.time()), input_params, 'get') search_result = [] if (mood == 'none'): # Mood is not specified. Filter hotspots first by activity. temp_result = hotspot_analyzer.get_hotspot_array_by_activity( district, activity) idx = 0 for temp in temp_result: idx += 1 temp['rank'] = idx temp['hotspot_name'] = location_metadata.getLocationName( temp['hotspot_id']) gps = location_metadata.getGPS(temp['hotspot_id']) temp['gps_lat'] = gps[0] temp['gps_lon'] = gps[1] #hotspot_moods = hotspot_mood.getCredibleMoodsOfHotspot(temp['hotspot_id'], 0.3) hotspot_moods = mood_extractor.get_hotspot_moods( district, temp['hotspot_id']) if (hotspot_moods is None or len(hotspot_moods) == 0): temp['mood'] = 'none' else: temp['mood'] = hotspot_moods[0] # Get mood index for showing a Google Map icon. temp['mood_index'] = mood_extractor.get_gmap_mood_index( temp['mood']) # Predefine some hotspots for demo if (temp['hotspot_id'] == '149949'): temp['mood_index'] = 4 elif (temp['hotspot_id'] == '319602204' or temp['hotspot_id'] == '100554430385266'): temp['mood_index'] = 3 elif (temp['hotspot_id'] == '1345605272126786'): temp['mood_index'] = 1 elif (temp['hotspot_id'] == '918860378233238' or temp['hotspot_id'] == '968345939922770'): temp['mood_index'] = 0 elif (temp['hotspot_id'] == '447938232'): temp['mood_index'] = 6 elif (temp['hotspot_id'] == '1023159399'): temp['mood_index'] = 2 search_result.append(temp) if (idx == 15): break else: # Mood is specified. Filter hotspots by mood first. mood_eng = mood_extractor.convert_mood_eng(mood) if (mood_eng is not None): temp_result = hotspot_analyzer.get_hotspot_array_by_mood( district, mood, activity) idx = 0 for temp in temp_result: idx += 1 temp['rank'] = idx temp['hotspot_name'] = location_metadata.getLocationName( temp['hotspot_id']) gps = location_metadata.getGPS(temp['hotspot_id']) temp['gps_lat'] = gps[0] temp['gps_lon'] = gps[1] temp['mood_index'] = mood_extractor.get_gmap_mood_index( mood_eng) search_result.append(temp) if (idx == 10): break result_hotspots = [] for item in search_result: spot_to_add = [] spot_to_add.append(item['hotspot_name']) spot_to_add.append(float(item['gps_lat'])) spot_to_add.append(float(item['gps_lon'])) spot_to_add.append(item['mood_index']) spot_to_add.append(item['count']) result_hotspots.append(spot_to_add) search_result = sorted(search_result, key=lambda k: k['rank']) return render_template("district_placeness.html", district=district, activity=activity, maen=maen, mood=mood, search_result=search_result, result_hotspots=result_hotspots)