def write_post(district,res): try: dat_db = dbhandler.firebase('https://placenessdb2.firebaseio.com/'+ sys.argv[1]) dat_db.patch("/" + district + "/"+ res['id'], json.dumps(res)) print("DB write successful") except: print("DB write error")
def write_to_db(json_obj): dat_db = dbhandler.firebase('https://placenessdb2.firebaseio.com/') for elem in json_obj: #print elem #print "experiment2/"+elem['id'] try: dat_db.put("experiment2/" + elem['id'], json.dumps(elem)) except: print "upload error"
from src import dbhandler from src import placeontology from src.jsonencoder import * import json ############# 0. initialization ############ db_data = dbhandler.firebase('https://placenessdb.firebaseio.com/data/yap') db_ont = dbhandler.firebase('https://placenessdb.firebaseio.com/ontology/yap') place = placeontology.ontology() ############# 1. assigning place name ############ placeid = "9999999999" postid = "1165825149222355077_1414241887" ############# 2. assigning values according to ontology ############ values_dweller = ['user1', 'girlfriend'] values_time = [ 1461737135, 2016, 4, 27, 15, 5, 35, "Wednesday", "false", "not sure" ] values_space = ['high traffic', 'sunny', 28] values_activity = ['action movies', 'watch movies'] values_opinion = ['excited to watch movies'] ############# 3. encoding values as json ############ json_dweller = encodeJson(place.dweller, values_dweller) json_time = encodeJson(place.time, values_time) json_space = encodeJson(place.space, values_space) json_activity = encodeJson(place.activity, values_activity) json_opinion = encodeJson(place.opinion, values_opinion) ############# 4. encoding json within json ############ json_key_values = encodeJson(place.root, [
except: print "upload error" #return next_url +'<br>\n' + str(json_data) + '\n' + table_txt; def iter_locationid(): locationids = dat_db.get('/experiment2/') found_last_id = False; last_id = '1033025300' for locationid in locationids: print locationid if found_last_id == False: if locationid == last_id: found_last_id = True else: continue realtime_instagram(locationid, None) dat_db = dbhandler.firebase('https://placenessdb2.firebaseio.com/') iter_locationid()
import os import json from datetime import datetime from src import dbhandler from src import placeontology from src import cognitiveAPI node = dbhandler.firebase('https://placenessdb.firebaseio.com/data/yap') data = {} node.put("", json.dumps(data))
from src.jsonencoder import * def unixtimestampToYMDHMS(timestamp): return datetime.fromtimestamp(int(timestamp)).strftime('%Y-%m-%d-%H-%M-%S') def getWeekday (ymdhms): return datetime.strptime(ymdhms,'%Y-%m-%d-%H-%M-%S').strftime('%A') datapath = "./data/Seoul mall/res/" filenames = os.listdir(datapath) ont = placeontology.ontology() ont_db = dbhandler.firebase('https://placenessdb.firebaseio.com/ontology/') dat_db = dbhandler.firebase('https://placenessdb.firebaseio.com/data/') for filename in filenames: filepath = datapath+filename f = open(filepath, "r") district_name = filename.split("_")[0] for line in f: data = json.loads(line) #place name place_name = filename.split("_")[1].split(".")[0] #post id
from src import dbhandler from src import placeontology from src.jsonencoder import * import json db_data = dbhandler.firebase('https://placenessdb.firebaseio.com/data/starbucks/') db_ont = dbhandler.firebase('https://placenessdb.firebaseio.com/ontology/starbucks/') place = placeontology.ontology() def insertRawInput(filename): placeid = filename.split("/")[3].split(":")[0] f = open("./" + filename, "r") for line in f: _raw_string = line _raw_string_json_acceptable = _raw_string.replace("'", "\"") try: _raw_json = json.loads(_raw_string_json_acceptable) try: db_data.put(placeid +"/instagram/"+ _raw_json['id'], json.dumps(_raw_json)) vals = ["","","","",""] data = encodeJson(place.root, vals) db_ont.put(placeid +"/instagram/"+ _raw_json['id'], data) except: print ("error:", json.dumps(_raw_json)) except: print _raw_string_json_acceptable
from src import dbhandler from src import cognitiveAPI from src.jsonencoder import * import os filenames = os.listdir("./data/hs_starbucks_activity/") dat_db = dbhandler.firebase('https://placenessdb.firebaseio.com/data/starbucks/') for filename in filenames: f = open ("./data/hs_starbucks_activity/"+filename, "r") data = json.load(f) campus = data['campus'] place = filename.split(".")[0] for elem in campus: place_instance = elem['key'] activity_dict = elem['activity'] if any(activity_dict) : res = {} for key in activity_dict: for k in key: res[k] = key[k] #print type (res) print "/" + place + "/instagram/"+place_instance+"/activity_tf" #print type(activity_dict) dat_db.put("/" + place + "/instagram/"+place_instance+"/activity_tf", json.dumps(res)) else: print "\t no keywords found"
# -*- coding: utf-8 -*- from src import dbhandler from src import placeontology from src.jsonencoder import * import json ont_db = dbhandler.firebase('https://placenessdb.firebaseio.com/ontology/') district_name = "IFC_Mall" place_name = "Starbucks" post_id = "1004038919362882936" metadata = {"source":"instagram",\ "url":"https://www.instagram.com/p/3vyl_hqkMn/", \ "profile_img_url": "https://scontent.cdninstagram.com/t51.2885-19/s150x150/12317957_143603502668427_1089213051_a.jpg"} ont_db.put_dweller(district_name, place_name, post_id, ["맛집", "IFC Mall", "스타벅"]) ont_db.put_with(district_name, place_name, post_id, ["여자친구", "장모님"]) ont_db.put_when(district_name, place_name, post_id, ["오늘", "휴일", "아침"]) ont_db.put_what(district_name, place_name, post_id, ["커피","케이크", "홍차"]) ont_db.put_activity(district_name, place_name, post_id, ["상견례"]) ont_db.put_opinion(district_name, place_name, post_id, ["정갈하다", "고급스럽다"]) ont_db.put_when_timestamp(district_name, place_name, post_id, json.dumps({"timestampjson":"timejson"})) ont_db.put_imageAnalysis(district_name, place_name, post_id, json.dumps({"imagekeyword":"imageeimad"})) ont_db.put_profileAnalysis(district_name, place_name, post_id, json.dumps({"profilejson":"profilejson"})) ont_db.put_metadata(district_name, place_name, post_id, json.dumps(metadata))
# -*- coding: utf-8 -*- from src import dbhandler db = dbhandler.firebase('https://placenessdb.firebaseio.com/ontology/times') placenames = db.get_shallow("/") _dweller = [] _with = [] _what = [] _activity = [] f = open("placedata_times.txt", "w") for placename in placenames: posts = db.get("/" + placename) for post in posts: print post dat = posts[post] __dweller = "?" __with = "?" __what = "?" __activity = "?" try: __dweller = dat['dweller']['modified_keywords'] for keyword in __dweller:
from src import dbhandler from src import cognitiveAPI from src.jsonencoder import * import os filenames = os.listdir("./data/hs_starbucks_revision/") dat_db = dbhandler.firebase( 'https://placenessdb.firebaseio.com/data/starbucks/') for filename in filenames: f = open("./data/hs_starbucks_revision/" + filename, "r") data = json.load(f) campus = data['campus'] place = filename.split(".")[0] for elem in campus: place_instance = elem['key'] activity_dict = elem['activity'] if any(activity_dict): res = {} for key in activity_dict: for k in key: res[k] = key[k] #print type (res) print "/" + place + "/instagram/" + place_instance + "/activity_tf_rev" #print type(activity_dict) try: dat_db.put( "/" + place + "/instagram/" + place_instance +
from src import dbhandler from src import placeontology from src.jsonencoder import * import json db_data = dbhandler.firebase( 'https://placenessdb.firebaseio.com/data/starbucks/') db_ont = dbhandler.firebase( 'https://placenessdb.firebaseio.com/ontology/starbucks/') place = placeontology.ontology() def insertRawInput(filename): placeid = filename.split("/")[3].split(":")[0] f = open("./" + filename, "r") for line in f: _raw_string = line _raw_string_json_acceptable = _raw_string.replace("'", "\"") try: _raw_json = json.loads(_raw_string_json_acceptable) try: db_data.put(placeid + "/instagram/" + _raw_json['id'], json.dumps(_raw_json)) vals = ["", "", "", "", ""] data = encodeJson(place.root, vals) db_ont.put(placeid + "/instagram/" + _raw_json['id'], data) except: print("error:", json.dumps(_raw_json))
import image_analysis import analyze_time def write_post(district,res): try: dat_db = dbhandler.firebase('https://placenessdb2.firebaseio.com/'+ sys.argv[1]) dat_db.patch("/" + district + "/"+ res['id'], json.dumps(res)) print("DB write successful") except: print("DB write error") text_word_dict = read_csv.get_text_dict(); img_word_dict = read_csv.get_img_dict(); if len(sys.argv) ==2: dat_db = dbhandler.firebase('https://placenessdb.firebaseio.com/'+ sys.argv[1]) districts = dat_db.get_shallow('/') for district in districts: hotspots = dat_db.get('/'+district+'/') print '\t' + district for hotspot in hotspots: print '\t' + hotspot res = {'text_keywords':[], 'img_keywords':[], 'time_keywords':[], 'id':hotspot} data = hotspots[hotspot] for wordbag in text_word_dict: for word in text_word_dict[wordbag]:
from src import dbhandler import urllib db = dbhandler.firebase('https://placenessdb.firebaseio.com/data/coex') hotspots = db.get_shallow("/") for hotspot in hotspots: print hotspot hotspots_instances = db.get("/" + hotspot) for hotspots_instance in hotspots_instances: print "\t" + hotspots_instance hotspots_instance_data = hotspots_instances[hotspots_instance] if "images" in hotspots_instance_data: print hotspots_instance_data["images"] urllib.urlretrieve( hotspots_instance_data["images"], "/home/cdsn/workspace/placenessdb/data/image161010/" + hotspot + "_" + hotspots_instance + ".jpg") ''' ############# 0. initialization ############ db = dbhandler.firebase('https://placenessdb.firebaseio.com/data/') ############# 1. fetching place ids ############ macroplaces = db.get_shallow("/") ############# 2. fetching place instance ids ############ for macroplace in macroplaces: #print macroplace hotspots = db.get_shallow("/"+macroplace)
# -*- coding: utf-8 -*- from src import dbhandler from src import cognitiveAPI from src.jsonencoder import * import numpy as np import subprocess import cv2 import os import urllib import json from bs4 import BeautifulSoup from flask import Flask from flask import request from src import dbhandler dat_db = dbhandler.firebase('https://placenessdb.firebaseio.com/experiment/') shallows = dat_db.get_shallow('/') sum = 0; for shallow in shallows: hotspot_shallow = dat_db.get_shallow('/'+shallow+'/') a = len(hotspot_shallow) sum += a print (shallow + ": " + str(a)) print sum
soup = BeautifulSoup(html, "lxml") data = soup.findAll("script", {"type": "text/javascript"}) json_text = (str(data[4]).split('"PostPage": [')[1]).split(']}, "qe":')[0] json_data = json.loads(json_text)['media'] print "media parsed..." #caption #comments json_data['created_time'] = json_data['date'] #filter # NA #id json_data['images'] = json_data['display_src'] #likes json_data['link'] = url locationid = json_data['location']['id'] #fixme: tags #embeded in 'caption' #fixme: type #either 'video' or 'image'. cannot be determined from source json_data['user'] = json_data['owner'] #fixme: user_has_liked #not available either dat_db = dbhandler.firebase('https://placenessdb.firebaseio.com/data/demo/') dat_db.put("timesquare" + "/" + "ontheborder" + "/" + json_data['id'] + "/", json.dumps(json_data)) print "done."
from src import dbhandler ############# 0. initialization ############ db = dbhandler.firebase('https://placenessdb.firebaseio.com/corpus/') ############# 1. fetching place ids ############ corpus = db.get("/") edges = corpus['edges'] nodes = corpus['nodes'] ############# 2. fetching place instance ids ############ for node in nodes: print node, nodes[node]['type'][0]