def searchByMake(make): e = turo.search() f = e.searchByMake(make, "dataset/{}Viz.json".format(make)) print f return render_template("geoViz.html", DATABASE=f, titleVal="Make: {}".format(make))
def getSingleVehicle(id_val): info = {} e = turo.search() vals = e.searchSingleID(id_val) if len(vals) == 0: return redirect(url_for("getNew")) else: vals = vals[0] for i, val in enumerate(vals): info[ALL_KEYS[i]] = val info['avg_price'] = e.getAveragePriceModel(info['vehicle_model']) info['total_count'] = e.getTotalModel(info['vehicle_model']) info['price_diff'] = SocioEconomics.Status( extract_zip(info['location_address'])) tempInfo = [] for i in range(10): tempInfo.append({ "name": 'test{}'.format(i), "count": random.randint(1, 100) }) info = clean_data(info) return render_template("resultPage.html", result=info, newItems=tempInfo, modelInfo=MODEL_INFO, myIndex=[x['model'] for x in MODEL_INFO ].index(info['vehicle_make']))
def searchByOwnerID(id_val): e = turo.search() f = e.searchUserID(id_val) DATABASE = f return render_template("geoViz.html", DATABASE=DATABASE, titleVal="Owner ID: {}".format(id_val))
def searchByVehicleID(id_val): e = turo.search() f = e.searchVehicleID(id_val) DATABASE = f print f return render_template("geoViz.html", DATABASE=DATABASE, titleVal="Vehicle ID: {}".format(id_val))
def searchByModel(model): e = turo.search() print model.replace("%20", " ").lower() f = e.searchByModel( model.replace("%20", " ").lower(), "dataset/{}Viz.json".format(model.replace("%20", ""))) DATABASE = f print f return render_template("geoViz.html", DATABASE=DATABASE, titleVal="Model: {}".format(model))
print(len(infoList)) lock.acquire() if len(infoList) > 1000: e = 'moreInfo{}.json'.format(random.randint(1, 50)) with open(e, 'w') as outfile: json.dump(infoList, outfile) print("Saved to {}".format(e)) infoList = [] lock.release() except Exception as exp: print exp if __name__ == '__main__': infoList = [] a = turo.search() listOfURLs = [] for listing in a.database: listOfURLs.append(listing['vehicle']['url']) listOfURLs = chunks(listOfURLs, int(len(listOfURLs) / THREADS)) threads = [ threading.Thread(target=getMoreInfo, args=(ar, )) for ar in listOfURLs ] for thread in threads: thread.start() for thread in threads: thread.join() e = 'moreInfo{}.json'.format(random.randint(1, 50)) with open(e, 'w') as outfile: json.dump(infoList, outfile)