Пример #1
0
def exact_match(request, g_id, usr):
    if usr is None or usr == '' or usr == '/':
        bbox_ids = Bboxes.objects.filter(group_id=g_id).filter(
            cur_state__box_done=1).values('bbox_id')
    else:
        bbox_ids = Bboxes.objects.filter(group_id=g_id).filter(
            cur_state__box_done=1).filter(
                cur_state__user=usr).values('bbox_id')
    ims = []
    for box in bbox_ids:
        b = Cur_state.objects.filter(bbox_id=box['bbox_id']).get()
        img, bbox = get_im_bbox(b)
        im = {}
        im['img'] = img
        im['bbox'] = bbox
        ims.append(im)

    db = connect_to_db('backup_geocars')
    cursor = db.cursor()
    examples = get_images_for_group(int(g_id.encode('ascii', 'ignore')),
                                    cursor)
    db.close()
    group_name = GroupNames.objects.filter(
        group_id=g_id).values('group_name').get()

    return render_to_response(
        'streetview/show_exact.html', {
            'images': ims,
            'examples': examples,
            'group_name': group_name,
            'group_id': g_id,
            'username': usr
        })
Пример #2
0
def get_im_bbox(b):
    db = connect_to_db('bbox_collection_gsv')
    cursor = db.cursor()
    img_query = 'select myori_url,oriwidth,oriheight from imagenet_bbox.view_allimage where synsetid=%d and imageid=%d' % (
        b.bbox.synsetid, b.bbox.imageid)
    cursor.execute(img_query)
    row = cursor.fetchone()

    bbox_query = 'select pleft, pright, ptop, pbottom, width, height from bbox_answer where bbox_isgood and pleft!=-1 and synsetid=%s and targetsynsetid=%s and imageid=%d and assignid=%d' % (
        b.bbox.synsetid, b.bbox.synsetid, b.bbox.imageid, b.bbox.assignid)

    cursor.execute(bbox_query)
    bbrow = cursor.fetchone()
    bbox = {}
    bbox['bbox_id'] = b.bbox.bbox_id
    bbox['pleft'] = bbrow[0]
    bbox['pright'] = bbrow[1]
    bbox['ptop'] = bbrow[2]
    bbox['pbottom'] = bbrow[3]
    bbox['width'] = bbrow[4]
    bbox['height'] = bbrow[5]
    img = {}
    img['url'] = row[0]
    img['oriwidth'] = row[1]
    img['oriheight'] = row[2]

    db.close()
    return img, bbox
Пример #3
0
def get_im_bbox(b):
  db = connect_to_db('bbox_collection_gsv') 
  cursor = db.cursor()
  img_query='select myori_url,oriwidth,oriheight from imagenet_bbox.view_allimage where synsetid=%d and imageid=%d'%(b.bbox.synsetid, b.bbox.imageid) 
  cursor.execute(img_query)
  row=cursor.fetchone()

  bbox_query='select pleft, pright, ptop, pbottom, width, height from bbox_answer where bbox_isgood and pleft!=-1 and synsetid=%s and targetsynsetid=%s and imageid=%d and assignid=%d'%(b.bbox.synsetid,b.bbox.synsetid,b.bbox.imageid,b.bbox.assignid)
    
  cursor.execute(bbox_query) 
  bbrow=cursor.fetchone()
  bbox={}
  bbox['bbox_id']=b.bbox.bbox_id
  bbox['pleft']=bbrow[0]
  bbox['pright']=bbrow[1]
  bbox['ptop']=bbrow[2]
  bbox['pbottom']=bbrow[3]
  bbox['width']=bbrow[4]
  bbox['height']=bbrow[5]
  img={}
  img['url']=row[0]
  img['oriwidth']=row[1]
  img['oriheight']=row[2]
    
  db.close()
  return img,bbox
 def __init__(self, queue):
   threading.Thread.__init__(self)
   self.queue = queue
   db=connect_to_db('demo')
   cursor=db.cursor()
   self.cursor = cursor
   #self.b=network.QueryBrowser(BROWSER_ID)
   self.b=network.ThumbBrowser(BROWSER_ID)
 def __init__(self, queue):
     threading.Thread.__init__(self)
     self.queue = queue
     db = connect_to_db('demo')
     cursor = db.cursor()
     self.cursor = cursor
     #self.b=network.QueryBrowser(BROWSER_ID)
     self.b = network.ThumbBrowser(BROWSER_ID)
 def __init__(self, queue,out_queue):
   threading.Thread.__init__(self)
   self.queue = queue
   self.out_queue = out_queue
   db=connect_to_db('demo')
   cursor=db.cursor()
   self.cursor = cursor
   #self.b=network.QueryBrowser(browser_id)
   self.b=network.ThumbBrowser(browser_id)
Пример #7
0
def get_submodels(make):
  if make=='unknown': #Get all submodels from Jon 
    db = connect_to_db('geocars_crawled')
    cursor = db.cursor()
    submodel_query='select distinct(submodel) from control_classes order by submodel'
  else:  
    make_r=make.replace('_',' ')
    #Get submodel's from Jon's improved table
    db = connect_to_db('geocars_crawled')
    cursor = db.cursor()
    submodel_query='select distinct(submodel) from control_classes where make="%s" order by submodel'%(make_r)

  cursor.execute(submodel_query) 
  submodels=cursor.fetchall()
  submodel_list=[]
  for s in submodels:
    submodel_list.append(s[0])
  return submodel_list
Пример #8
0
def get_submodels(make):
    if make == 'unknown':  #Get all submodels from Jon
        db = connect_to_db('geocars_crawled')
        cursor = db.cursor()
        submodel_query = 'select distinct(submodel) from control_classes order by submodel'
    else:
        make_r = make.replace('_', ' ')
        #Get submodel's from Jon's improved table
        db = connect_to_db('geocars_crawled')
        cursor = db.cursor()
        submodel_query = 'select distinct(submodel) from control_classes where make="%s" order by submodel' % (
            make_r)

    cursor.execute(submodel_query)
    submodels = cursor.fetchall()
    submodel_list = []
    for s in submodels:
        submodel_list.append(s[0])
    return submodel_list
Пример #9
0
def show_ims(request, lat_lng):
    #exp=True means experimenting to see if images are
    #being downloaded properly
    exp = True
    #exp=False
    dates_list = []
    lat = lat_lng.split('_')[0].encode('ascii', 'ignore')
    lng = lat_lng.split('_')[1].encode('ascii', 'ignore')
    db = connect_to_db('geo')
    cursor = db.cursor()
    if exp == True:  #False:
        sqls = 'select distinct(im_date) from fixed_timelapse_times where lat=%s and lng=%s and small=0 and corrupt=0 and downloaded=1' % (
            lat, lng)
        cursor.execute(sqls)
        im_dates = cursor.fetchall()
        for d in im_dates:
            dates_list.append(d[0])
    else:
        lld_dict = request.session['lld_dict']
        for l in lld_dict[lat_lng]:
            dates_list.append(l.encode('ascii', 'ignore'))
        dates_list = list(set(dates_list))

    dates_list.sort()
    dates_list_str = [datetime.strftime(d, "%Y-%b-%d") for d in dates_list]
    rots = [0, 60, 120, 180, 240, 300]
    im_list = []
    for r in rots:
        im_rot_list = []
        for d, d_str in zip(dates_list, dates_list_str):
            year = d_str.split('-')[0]
            month = d_str.split('-')[1]
            date_str = '%s-%s-01' % (year, month)
            sqls = 'select corrupt,small,downloaded from geo.fixed_timelapse_times where lat=%s and lng=%s and rot=%s and im_date="%s"' % (
                lat, lng, r, datetime.strftime(d, "%Y-%m-%d"))
            cursor.execute(sqls)
            res = cursor.fetchone()
            if int(res[0]) == 0 and int(res[1]) == 0 and int(res[2]) == 1:
                im_dict = {}
                im_name = lat_lng_to_path(lat, lng, r, '%s-%s' % (month, year))
                print im_name
                im_dict['im'] = im_name
                im_dict['date'] = d
                im_dict['rot'] = r
                sql_s = 'select x1,y1,x2,y2,desc_val,pscore,group_id from  all_cars.city_175_timelapse_detected_cars where lat=%s and lng=%s and rot=%s and im_date="%s"' % (
                    lat, lng, r, datetime.strftime(d, "%Y-%m-%d"))
                cursor.execute(sql_s)
                bboxes_list = cursor.fetchall()
                bboxes = make_bbox_pred_dict(bboxes_list)
                im_dict['bboxes'] = bboxes
                im_rot_list.append(im_dict)
        im_list.append(im_rot_list)

    return render_to_response('timelapse/show_ims.html', {'im_list': im_list})
Пример #10
0
def models_trims(request, make, submodel):
    request.session['submodel'] = submodel
    username = request.user.username
    if make == 'unknown' or submodel == 'unknown':
        bbox_id = int(request.session.get('bbox', None)['bbox_id'])
        save_make_submodel(username, make, submodel, bbox_id)
        return HttpResponseRedirect(
            "http://imagenet.stanford.edu/streetview/streetview/")
    else:
        num_images = 4
        make = make.replace('_', ' ')
        submodel = submodel.replace('_', ' ')
        db = connect_to_db('geocars_crawled')
        cursor = db.cursor()

        #get group names & ids of all groups in submodel
        group_query = 'select group_name,group_id from control_classes where make="%s" and submodel="%s"' % (
            make, submodel)
        cursor.execute(group_query)
        group_info = cursor.fetchall()
        model_dict = {}

        for g in group_info:
            #Get positive examples for selected images
            sql_s = 'select distinct(path),viewpoint from backup_geocars.edmund_examples,backup_geocars.positive_examples where backup_geocars.edmund_examples.group_id=%d and backup_geocars.edmund_examples.group_id=backup_geocars.positive_examples.group_id and path not like "%%flipped%%" order by rand()' % (
                g[1])
            cursor.execute(sql_s)
            paths = cursor.fetchall()
            real_paths = get_images(paths)
            group_dict = {}
            model_name, trim_name = get_names(g[0], make, submodel)
            group_dict['images'] = real_paths
            group_dict['group_id'] = g[1]
            group_dict['trim_name'] = trim_name

            if model_name in model_dict:
                model_dict[model_name].append(group_dict)
            else:
                model_dict[model_name] = [group_dict]

        models = make_template_dict(model_dict, 'model_name', 'trims')

        img = request.session.get('image', None)
        bbox = request.session.get('bbox', None)
        db.close()
        return render_to_response(
            'streetview/models_trims.html', {
                'models': models,
                'make': make,
                'submodel': submodel,
                'image': img,
                'bbox': bbox
            })
Пример #11
0
def get_image(imageid):
  synsetid=145622
  db = connect_to_db('bbox_collection_gsv') 
  img_query='select myori_url, oriwidth,oriheight from imagenet_bbox.view_allimage where synsetid=%d and imageid=%d'%(synsetid, imageid) 
  cursor = db.cursor()
  cursor.execute(img_query)
  im=cursor.fetchone()
  db.close()
  im_dict={}
  im_dict['url']=im[0]
  im_dict['oriwidth']=im[1]
  im_dict['oriheight']=im[2]
  return im_dict
Пример #12
0
def show_ims(request,lat_lng):
   #exp=True means experimenting to see if images are 
   #being downloaded properly
   exp=True 
   #exp=False
   dates_list=[]
   lat=lat_lng.split('_')[0].encode('ascii','ignore')
   lng=lat_lng.split('_')[1].encode('ascii','ignore')
   db=connect_to_db('geo') 
   cursor=db.cursor()
   if exp== True:#False:
      sqls='select distinct(im_date) from fixed_timelapse_times where lat=%s and lng=%s and small=0 and corrupt=0 and downloaded=1'%(lat,lng)
      cursor.execute(sqls)
      im_dates=cursor.fetchall() 
      for d in im_dates: 
        dates_list.append(d[0])
   else:
      lld_dict=request.session['lld_dict']
      for l in lld_dict[lat_lng]: 
        dates_list.append(l.encode('ascii','ignore'))
      dates_list=list(set(dates_list))
  
   dates_list.sort()
   dates_list_str=[datetime.strftime(d,"%Y-%b-%d") for d in dates_list]
   rots=[0,60,120,180,240,300]
   im_list=[]
   for r in rots:
     im_rot_list=[] 
     for d,d_str in zip(dates_list,dates_list_str):
        year=d_str.split('-')[0]
        month=d_str.split('-')[1]
        date_str='%s-%s-01'%(year,month)
        sqls='select corrupt,small,downloaded from geo.fixed_timelapse_times where lat=%s and lng=%s and rot=%s and im_date="%s"'%(lat,lng,r,datetime.strftime(d,"%Y-%m-%d"))
        cursor.execute(sqls)
        res=cursor.fetchone()
        if int(res[0])==0 and int(res[1])==0 and int(res[2])==1:
          im_dict={}
          im_name=lat_lng_to_path(lat,lng,r,'%s-%s'%(month,year))
          print im_name
          im_dict['im'] =im_name
          im_dict['date'] =d
          im_dict['rot'] =r
          sql_s='select x1,y1,x2,y2,desc_val,pscore,group_id from  all_cars.city_175_timelapse_detected_cars where lat=%s and lng=%s and rot=%s and im_date="%s"'%(lat,lng,r,datetime.strftime(d,"%Y-%m-%d"))
          cursor.execute(sql_s)
          bboxes_list=cursor.fetchall()
          bboxes=make_bbox_pred_dict(bboxes_list)
          im_dict['bboxes']=bboxes
          im_rot_list.append(im_dict)
     im_list.append(im_rot_list)
 
   return render_to_response('timelapse/show_ims.html',{'im_list':im_list})
Пример #13
0
def models_trims(request,make,submodel):
  request.session['submodel']=submodel
  username=request.user.username
  if make=='unknown' or submodel=='unknown': 
    bbox_id=int(request.session.get('bbox',None)['bbox_id'])
    save_make_submodel(username,make,submodel,bbox_id)
    return HttpResponseRedirect("http://imagenet.stanford.edu/streetview/streetview/")
  else:
    num_images=4
    make  = make.replace('_',' ')
    submodel = submodel.replace('_',' ')
    db = connect_to_db('geocars_crawled')
    cursor = db.cursor()
    
    #get group names & ids of all groups in submodel 
    group_query='select group_name,group_id from control_classes where make="%s" and submodel="%s"'%(make,submodel)
    cursor.execute(group_query)
    group_info=cursor.fetchall()
    model_dict={}
    
    for g in group_info:
      #Get positive examples for selected images
      sql_s='select distinct(path),viewpoint from backup_geocars.edmund_examples,backup_geocars.positive_examples where backup_geocars.edmund_examples.group_id=%d and backup_geocars.edmund_examples.group_id=backup_geocars.positive_examples.group_id and path not like "%%flipped%%" order by rand()'%(g[1])
      cursor.execute(sql_s)
      paths=cursor.fetchall()
      real_paths=get_images(paths)
      group_dict={}
      model_name,trim_name=get_names(g[0],make,submodel)
      group_dict['images']=real_paths
      group_dict['group_id']=g[1]
      group_dict['trim_name']=trim_name

      if model_name in model_dict:
        model_dict[model_name].append(group_dict)
      else:
        model_dict[model_name]=[group_dict]
      
    models=make_template_dict(model_dict,'model_name','trims')

    img=request.session.get('image',None)
    bbox=request.session.get('bbox',None)
    db.close()
    return render_to_response('streetview/models_trims.html', {'models':models,'make':make,'submodel':submodel,'image':img,'bbox':bbox})
Пример #14
0
def exact_match(request,g_id,usr):
  if usr is None or usr=='' or usr=='/':
    bbox_ids=Bboxes.objects.filter(group_id=g_id).filter(cur_state__box_done=1).values('bbox_id')
  else:
    bbox_ids=Bboxes.objects.filter(group_id=g_id).filter(cur_state__box_done=1).filter(cur_state__user=usr).values('bbox_id')
  ims=[]
  for box in bbox_ids: 
    b=Cur_state.objects.filter(bbox_id=box['bbox_id']).get()
    img,bbox=get_im_bbox(b) 
    im ={}
    im['img']=img
    im['bbox']=bbox
    ims.append(im)
       
  db = connect_to_db('backup_geocars')
  cursor = db.cursor()
  examples=get_images_for_group(int(g_id.encode('ascii','ignore')),cursor)
  db.close()
  group_name=GroupNames.objects.filter(group_id=g_id).values('group_name').get()

  return render_to_response('streetview/show_exact.html',{'images':ims,'examples':examples,'group_name':group_name,'group_id':g_id,'username':usr})
Пример #15
0
        thread_name = self.name
        while True:
            task = self.queue.get()
            insert_queries(task[0], task[1], self.cursor)
            queue.task_done()


def insert_queries(tuples, chunk, cursor):
    print 'executing chunk %d' % chunk
    cursor.executemany(
        """update detected_cars set warped_im_name="%s" where im_name=%s""",
        tuples)


if __name__ == "__main__":
    db = connect_to_db('all_cars')
    cursor = db.cursor()
    sql_s = 'select distinct im_name from detected_cars'
    print sql_s
    cursor.execute(sql_s)
    ims = cursor.fetchall()

    CHUNKSIZE = 10000
    NUMTHREADS = 10

    queue = Queue.Queue()
    for i in range(NUMTHREADS):
        thread = sqlThread(queue)
        thread.daemon = True
        thread.start()
Пример #16
0
    if data is None:
        return 1
    store_in_db(fips, data, vars_list, cursor)
    return 0


def get_fips(cursor, table, col):
    print 'Getting FIPS'
    sql_s = 'select distinct(%s) from demo.%s where %s<>0' % (col, table, col)
    cursor.execute(sql_s)
    fips = cursor.fetchall()
    return fips


if __name__ == "__main__":
    db = connect_to_db('backup_geocars')
    cursor = db.cursor()
    fips = get_fips(cursor, 'latlong_fpis', 'fpis')
    stored_fips = get_fips(cursor, 'census', 'fips')
    unstored_fips = set(fips) - set(stored_fips)
    print len(stored_fips), len(unstored_fips), len(fips)
    queue = Queue.Queue()
    NUM_THREADS = 100
    i = 0
    for f in unstored_fips:
        i += 1
        f = str(f[0])
        queue.put(f)

    start = time.time()
    for i in range(NUM_THREADS):
   num_to_try=10
   while not connected:
      try:
        html=br.DownloadURL(link)
        connected = True # if line above fails, this is never executed
      except Exception as e: #catch all exceptions
        print 'Error in follow_link: %s trying again' %e
        tried += 1        
        if tried > num_to_try:
          print 'cannot download from link: %s' %link
          return 
   return html 

if __name__=="__main__":
  #Load latitude and longitudes
  db=connect_to_db('')
  cursor=db.cursor()
  lat_longs=get_lat_longs(cursor)
  url_base = "http://maps.googleapis.com/maps/api/geocode/json?latlng="
  queue = Queue.Queue()
  num_threads=100

  #Pull zip codes and city names for all lat/longs
  num_tuples=len(lat_longs)
  count = 0
  for tup in lat_longs:
    count += 1
    print 'adding %s out of %s...'%(count,num_tuples)
    queue.put(tup)

  start= time.time()
Пример #18
0
import sys

sys.path.append('/imagenetdb/tgebru/')
from mysql_utils import connect_to_db

db = connect_to_db('geocars')
cursor = db.cursor()
price_quantile = open('price_quantile.txt', 'rb').readlines()
fine_price_quantile = open('fine_price_quantile.txt', 'rb').readlines()

for p in price_quantile:
    sql_s = 'update car_metadata set price_bracket=%s where price=%s' % (
        p.split(',')[1], p.split(',')[0])
    cursor.execute(sql_s)
    print sql_s

for p in fine_price_quantile:
    sql_s = 'update car_metadata set fine_price_bracket=%s where price=%s' % (
        p.split(',')[1], p.split(',')[0])
    print sql_s
    cursor.execute(sql_s)

db.close()
import sys 
import os
sys.path.append('/imagenetdb/tgebru/') 
from mysql_utils import connect_to_db


if __name__=="__main__":
  db_name='all_cars' 
  db = connect_to_db(db_name)
  cursor = db.cursor()

  cursor.execute('''show tables from all_cars''')  
  tables=cursor.fetchall()
  for t in tables:
    sql_s='alter table %s  modify group_id int(4)'%(t[0])
    print sql_s
    cursor.execute(sql_s)
Пример #20
0
    self.db=connect_to_db('all_cars')
    self.cursor=self.db.cursor()

  def run(self):
    thread_name=self.name
    while True:
      task = self.queue.get()
      insert_queries(task[0],task[1],self.cursor)
      queue.task_done()

def insert_queries(tuples,chunk,cursor):
    print 'executing chunk %d'%chunk
    cursor.executemany("""update detected_cars set warped_im_name="%s" where im_name=%s""",tuples)

if __name__=="__main__":
  db=connect_to_db('all_cars')
  cursor=db.cursor()
  sql_s='select distinct im_name from detected_cars'
  print sql_s
  cursor.execute(sql_s)
  ims=cursor.fetchall()

  CHUNKSIZE=10000
  NUMTHREADS=10

  queue=Queue.Queue()
  for i in range(NUMTHREADS):
    thread = sqlThread(queue)
    thread.daemon = True
    thread.start()
Пример #21
0
    while not connected:
        try:
            html = br.DownloadURL(link)
            connected = True  # if line above fails, this is never executed
        except Exception as e:  #catch all exceptions
            print 'Error in follow_link: %s trying again' % e
            tried += 1
            if tried > num_to_try:
                print 'cannot download from link: %s' % link
                return
    return html


if __name__ == "__main__":
    #Load latitude and longitudes
    db = connect_to_db('')
    cursor = db.cursor()
    lat_longs = get_lat_longs(cursor)
    url_base = "http://maps.googleapis.com/maps/api/geocode/json?latlng="
    queue = Queue.Queue()
    num_threads = 100

    #Pull zip codes and city names for all lat/longs
    num_tuples = len(lat_longs)
    count = 0
    for tup in lat_longs:
        count += 1
        print 'adding %s out of %s...' % (count, num_tuples)
        queue.put(tup)

    start = time.time()
Пример #22
0
import sys
sys.path.append('/imagenetdb/tgebru/')
from mysql_utils import connect_to_db

db = connect_to_db('demo')
cursor = db.cursor()

sql_s = 'show tables from demo like "%SF1%"'
cursor.execute(sql_s)
tables = cursor.fetchall()
for t in tables:
    sql_s = 'drop table %s' % t[0]
    print sql_s
    cursor.execute(sql_s)
   num_to_try=10
   while not connected:
      try:
        html=br.DownloadURL(link)
        connected = True # if line above fails, this is never executed
      except Exception as e: #catch all exceptions
        print 'Error in follow_link: %s trying again' %e
        tried += 1        
        if tried > num_to_try:
          print 'cannot download from link: %s' %link
          return 
   return html 

if __name__=="__main__":
  #Load latitude and longitudes
  db=connect_to_db('backup_geocars')
  cursor=db.cursor()
  lat_longs=get_lat_longs(cursor)
  url_base = "http://data.fcc.gov/api/block/2010/find?"
  queue = Queue.Queue()
  out_queue = Queue.Queue()
  num_threads=1000

  #Pull fips codes for all lat/longs
  num_tuples=len(lat_longs)
  count = 0
  for tup in lat_longs:
    count += 1
    print 'adding %s out of %s...'%(count,num_tuples)
    queue.put(tup)
Пример #24
0
def get_lat_lng_list(cityid, exp, request):
    ll_list = []

    if exp == True:  #False:
        db = connect_to_db('geo')
        zip = 10011
        cursor = db.cursor()
        cityid = 175
        sqls = 'select lat,lng,count(distinct(im_date)) from fixed_timelapse_times where cityid=%d and downloaded=1 and small=0 and corrupt=0 group by lat,lng' % (
            cityid)
        #sqls='select distinct t.lat,t.lng, count(im_date) from geo.timelapse_times t, demo.latlong_fpis l where zipcode=%d and l.lat=t.lat and l.lng=t.lng group by lat,lng'%zip
        cursor.execute(sqls)
        lat_lng_date = cursor.fetchall()
        NUM_SAMPLES = 1000
        lat_lng_date = random.sample(lat_lng_date, NUM_SAMPLES)
        lld_dict = {}
        for l in lat_lng_date:
            ll_dict = {}  #need dict to traverse in view
            ll_dict['gps'] = '%s_%s' % (str(l[0]), str(l[1]))
            ll_dict['numdates'] = int(l[2])
            ll_list.append(ll_dict)
            sqls = 'select distinct(im_date) from fixed_timelapse_times where lat=%s and lng=%s' % (
                str(l[0]), str(l[1]))
            cursor.execute(sqls)
            dates = cursor.fetchone()
            lld_dict['%s_%s' %
                     (str(l[0]), str(l[1]))] = [str(d) for d in dates]
        db.close()
        request.session['lld_dict'] = lld_dict
    else:  #Different file for training/test vs all timelapse
        #f=open('/imagenetdb/tgebru/scrape/lat_lng_rot_url.txt','rb')

        #Validation set for 2013 housing data
        #f=open('/imagenetdb3/tgebru/cvpr2016/housing_data/train_test_split_2013/housing_2013_class_val.txt','rb')

        #Only loaded images
        #f=open('/afs/cs.stanford.edu/u/tgebru/cvpr2016/loaded_lat_lng_rot_url.txt', 'rb')
        NUM_SAMPLES = 1000
        #lines=f.readlines()
        #lines=random.sample(lines,UM_SAMPLES)
        #f.close()
        lld_dict = {}

        #Load lat,lng,dates dict
        with open(
                '/afs/cs.stanford.edu/u/tgebru/cvpr2016/ipython_code/kings_lat_lng_date_dict.pickle',
                'rb') as f:
            lat_lng_date_dict = pickle.load(f)
        '''
     for l in lines: 
       #uncomment for all timelapse
       parts=l.split('\t') 
       lat=parts[0].split('_')[0].strip()
       lng=parts[0].split('_')[1].strip()
       date=parts[-1].split('_')[-1][0:-5].strip()

       #Uncomment for 2013 housing data
       #lat=l.split('/')[-1].split('_')[0].strip()
       #lng=l.split('/')[-1].split('_')[1].strip()
       #date=l.split('/')[-1].split('_')[-1].split(' ')[0][0:-4].strip()
       
       #Different file for training/test vs all timelapse
       lat_lng='%s_%s'%(lat,lng) 
       if lat_lng in lld_dict.keys():
         lld_dict[lat_lng].append(date)
       else:
         lld_dict[lat_lng]=[date]
     '''
        #Now create a list to send to the view
        #for k in lld_dict.keys():
        keys = random.sample(lat_lng_date_dict.keys(), NUM_SAMPLES)
        for k in keys:
            ll_dict = {}
            ll_dict['gps'] = k
            dates = list(set(lat_lng_date_dict[k].keys()))
            numdates = len(dates)
            ll_dict['numdates'] = numdates
            lld_dict[k] = dates
            #Only want to see images with multiple years
            if numdates > 1:
                ll_list.append(ll_dict)
        request.session['lld_dict'] = lld_dict

    return ll_list
 def __init__(self, queue):
   threading.Thread.__init__(self)
   self.queue = queue
   self.db=connect_to_db('all_cars')
   self.cursor=self.db.cursor()
Пример #26
0
def get_lat_lng_list(cityid,exp,request):
   ll_list=[]

   if exp== True: #False:
     db = connect_to_db('geo')
     zip=10011
     cursor=db.cursor()
     cityid=175
     sqls='select lat,lng,count(distinct(im_date)) from fixed_timelapse_times where cityid=%d and downloaded=1 and small=0 and corrupt=0 group by lat,lng'%(cityid);
     #sqls='select distinct t.lat,t.lng, count(im_date) from geo.timelapse_times t, demo.latlong_fpis l where zipcode=%d and l.lat=t.lat and l.lng=t.lng group by lat,lng'%zip
     cursor.execute(sqls)
     lat_lng_date=cursor.fetchall()
     NUM_SAMPLES=1000
     lat_lng_date=random.sample(lat_lng_date,NUM_SAMPLES) 
     lld_dict={}
     for l in lat_lng_date:
       ll_dict={} #need dict to traverse in view
       ll_dict['gps']='%s_%s'%(str(l[0]),str(l[1]))
       ll_dict['numdates']=int(l[2])
       ll_list.append(ll_dict)
       sqls='select distinct(im_date) from fixed_timelapse_times where lat=%s and lng=%s'%(str(l[0]),str(l[1]))
       cursor.execute(sqls)
       dates=cursor.fetchone()
       lld_dict['%s_%s'%(str(l[0]),str(l[1]))]=[str(d) for d in dates]
     db.close()  
     request.session['lld_dict']=lld_dict
   else: #Different file for training/test vs all timelapse
     #f=open('/imagenetdb/tgebru/scrape/lat_lng_rot_url.txt','rb')

     #Validation set for 2013 housing data
     #f=open('/imagenetdb3/tgebru/cvpr2016/housing_data/train_test_split_2013/housing_2013_class_val.txt','rb')
     
     #Only loaded images
     #f=open('/afs/cs.stanford.edu/u/tgebru/cvpr2016/loaded_lat_lng_rot_url.txt', 'rb')
     NUM_SAMPLES=1000
     #lines=f.readlines()
     #lines=random.sample(lines,UM_SAMPLES)
     #f.close()
     lld_dict={}

     #Load lat,lng,dates dict
     with open('/afs/cs.stanford.edu/u/tgebru/cvpr2016/ipython_code/kings_lat_lng_date_dict.pickle','rb') as f:
        lat_lng_date_dict=pickle.load(f)

     '''
     for l in lines: 
       #uncomment for all timelapse
       parts=l.split('\t') 
       lat=parts[0].split('_')[0].strip()
       lng=parts[0].split('_')[1].strip()
       date=parts[-1].split('_')[-1][0:-5].strip()

       #Uncomment for 2013 housing data
       #lat=l.split('/')[-1].split('_')[0].strip()
       #lng=l.split('/')[-1].split('_')[1].strip()
       #date=l.split('/')[-1].split('_')[-1].split(' ')[0][0:-4].strip()
       
       #Different file for training/test vs all timelapse
       lat_lng='%s_%s'%(lat,lng) 
       if lat_lng in lld_dict.keys():
         lld_dict[lat_lng].append(date)
       else:
         lld_dict[lat_lng]=[date]
     '''
     #Now create a list to send to the view
     #for k in lld_dict.keys():
     keys=random.sample(lat_lng_date_dict.keys(),NUM_SAMPLES) 
     for k in keys:
       ll_dict={}
       ll_dict['gps']=k
       dates=list(set(lat_lng_date_dict[k].keys()))
       numdates=len(dates)
       ll_dict['numdates']=numdates
       lld_dict[k]=dates
       #Only want to see images with multiple years
       if numdates > 1:
         ll_list.append(ll_dict)
     request.session['lld_dict']=lld_dict 

   return ll_list
import sys 
sys.path.append('/imagenetdb/tgebru/') 
from mysql_utils import connect_to_db

db=connect_to_db('demo')
cursor=db.cursor()

sql_s='show tables from demo like "%SF1%"'
cursor.execute(sql_s)
tables=cursor.fetchall()
for t in tables:
  sql_s='drop table %s'%t[0]
  print sql_s
  cursor.execute(sql_s)

import sys 
sys.path.append('/imagenetdb/tgebru/') 
from mysql_utils import connect_to_db

countries=open('/tmp/makes.csv','rb').readlines()
db=connect_to_db('geocars')
cursor=db.cursor()
for c in countries:
  parts=c.split(',')
  sql='select group_id from synsets where make="%s" and group_id is not null'%parts[0].strip()
  print sql
  cursor.execute(sql)
  group_ids=cursor.fetchall()
  for g in group_ids:
    print parts[0],g
    sql='update car_metadata set country="%s",is_foreign=%s where group_id=%s'%(parts[1].strip(),parts[2].strip(),g[0])
    print sql
    cursor.execute(sql)
db.close()
Пример #29
0
 def __init__(self, queue):
     threading.Thread.__init__(self)
     self.queue = queue
     self.db = connect_to_db('all_cars')
     self.cursor = self.db.cursor()
import sys
sys.path.append('/imagenetdb/tgebru/')
from mysql_utils import connect_to_db
db = connect_to_db('boston_cars')
cursor = db.cursor()
import pickle

#Number of All images
sql_s = 'select count(distinct(im_name)) from ma_detected_cars'
print sql_s
cursor.execute(sql_s)
num_ims = cursor.fetchall()
print 'number of sampled images in MA....'
print num_ims

#Number of images per zipcode
zipcodes_sql = 'select distinct(zipcode),count(distinct(im_name))c from ma_detected_cars m,demo.latlong_fpis l where m.lat=l.lat and m.lng=l.lng group by zipcode order by c'
print zipcodes_sql
cursor.execute(zipcodes_sql)
zipcodes_num_ims = cursor.fetchall()
zip_dict = {}
for z in zipcodes_num_ims:
    zip_dict[z[0]] = z[1]

print(zipcodes_num_ims)

#Number of all ground truth images in the 3 cities
gt_sql = 'select distinct(zip_code),sum(veh_tot)s from grid250m_attributes a,grid_quarters_public g where a.g250m_id=g.g250m_id and quarter="2010_q2" and (muni_id=35 or muni_id=281 or muni_id=348) group by zip_code'

print gt_sql
cursor.execute(gt_sql)
import sys
sys.path.append('/imagenetdb/tgebru/')
from mysql_utils import connect_to_db
import scipy.io
import numpy
import os

if __name__ == "__main__":
    census_or_acs = sys.argv[1]  #'acs'
    LEVEL = sys.argv[2]  #'zipcode'
    train_or_val = sys.argv[3]

    db_name = 'all_cars'
    db = connect_to_db(db_name)
    cursor = db.cursor()
    root_dir = '/imagenetdb3/mysql_tmp_dir/car_census'

    census_var_name_f = '%s_var_names.txt' % census_or_acs
    census_variables_f = '%s_%s_variables.txt' % (train_or_val, census_or_acs)

    car_meta_name_f = 'car_meta_names.txt'
    car_attributes_f = '%s_car_attributes.txt' % (train_or_val)
    image_names_f = '%s_image_names.txt' % (train_or_val)

    census_var_file_name = '%s_variables.txt' % census_or_acs
    census_vars = open(census_var_file_name).readlines()
    census_variables = []
    i = 0
    for c in census_vars:
        census_variables.insert(i, c.split(',')[0].strip())
        i += 1
import sys 
sys.path.append('/imagenetdb/tgebru/') 
from mysql_utils import connect_to_db
db=connect_to_db('boston_cars')
cursor=db.cursor()
import pickle

#Number of All images
sql_s='select count(distinct(im_name)) from ma_detected_cars'
print sql_s
cursor.execute(sql_s)
num_ims=cursor.fetchall()
print 'number of sampled images in MA....'
print num_ims

#Number of images per zipcode
zipcodes_sql='select distinct(zipcode),count(distinct(im_name))c from ma_detected_cars m,demo.latlong_fpis l where m.lat=l.lat and m.lng=l.lng group by zipcode order by c'
print zipcodes_sql
cursor.execute(zipcodes_sql)
zipcodes_num_ims=cursor.fetchall()
zip_dict={}
for z in zipcodes_num_ims:
  zip_dict[z[0]]=z[1]

print(zipcodes_num_ims)

#Number of all ground truth images in the 3 cities 
gt_sql='select distinct(zip_code),sum(veh_tot)s from grid250m_attributes a,grid_quarters_public g where a.g250m_id=g.g250m_id and quarter="2010_q2" and (muni_id=35 or muni_id=281 or muni_id=348) group by zip_code'

print gt_sql
cursor.execute(gt_sql)