def add_location(trip_id): """Gather location information about a trip.""" trip = Trip.query.get(trip_id) if request.method == "POST": user_id = session["user_id"] name = request.form["name"] address = request.form["address"] city = request.form["city"] state = request.form["state"] country = request.form["country"] location = Location(user_id=user_id, address=address, city=city, state=state, country=country, name=name) location.trips = [trip] # assoc table pop db.session.add(location) #if refactor with modelMixin, can do location.save db.session.commit() location_id = location.location_id return redirect(f"/user_journal/{user_id}") else: return render_template("create_location.html", trip_id=trip_id)
def test_apply_action_collide_with_ship(self): ship1 = Ship('1', Location(150, 100), 0, 8, 6) ship2 = Ship('2', Location(155, 100), 0, 8, 6) environment = Environment(300, 200, [ship1, ship2]) action = Action(ship1.uid, 0, True) with self.assertRaises(Exception): environment.apply_action(action)
def test_init(self): ship = Ship('uid', Location(1, 2), 90, 8, 12) self.assertEqual('uid', ship.uid) self.assertEqual(Location(1, 2), ship.location) self.assertEqual(90, ship.orientation) self.assertEqual(8, ship.size) self.assertEqual(12, ship.speed)
def test_apply_action(self): ship = Ship('uid', Location(150, 100), 90, 8, 6) environment = Environment(300, 200, [ship]) action = Action(ship.uid, 0, True) environment.apply_action(action) self.assertEqual(Location(150, 94), ship.location) # moved up 6 (the ship's speed)
def load_locations(): """Load data from locations.csv into locations table in db.""" # Delete all rows in table, so if we need to run this a second time, # we won't be trying to add duplicate users Location.query.delete() with open('seed_data/location.csv', 'r') as f: reader = csv.reader(f) location_list = list(reader) del location_list[0] # Read location list and insert data d = {} for row in location_list: location_id, district_id, state_name = row d[location_id] = [district_id, state_name] if district_id == '': loc = Location(location_id=location_id, district_id=None, state_name=state_name) else: loc = Location(location_id=location_id, district_id=district_id, state_name=state_name) # We need to add to the session or it won't ever be stored db.session.add(loc) # Once we're done, we should commit our work db.session.commit()
def put_random_scan(): insert_time = utils.random_time() #Create the location map_id = maps.random_map_id() location = Location(map_id=map_id, x=utils.random_int(0, 300), y=utils.random_int(0, 600), timestamp=insert_time) location.put() #Base station base_mac = BASE_APS_MAC_ADDRESSES[utils.random_int( 0, len(BASE_APS_MAC_ADDRESSES) - 1)] base_station = base_stations.get_base_station_with_mac( utils.mac_string_to_int(base_mac)) #Create or update the client mac_int = utils.mac_string_to_int(CLIENT_MAC_ADDRESSES[utils.random_int( 0, len(CLIENT_MAC_ADDRESSES) - 1)]) client = clients.create_or_update_client(mac_int, base_station, insert_time) #Create the scan scan = Scan(map_key=map_id, client=client.key(), base_ap=base_station.key(), ss=utils.random_int(-90, -25), timestamp=insert_time, location=location) scan.put() logging.info(scan.key())
def check_windows_conditions(): indoors = Location.select().where(Location.outdoor == False).first() local_outdoors = Location.select().where(Location.outdoor == True, Location.hidden == False, Location.remote == True).first() record_indoors = get_last_record_for_location(indoors) record_outdoors = get_last_record_for_location(local_outdoors) record_30_min_ago_outdoors = get_last_record_for_location( local_outdoors, 30) last_decision = WindowsDecision.select().order_by( WindowsDecision.date.desc()).first() delta_degrees = 0.5 close_windows = None if (not last_decision or not last_decision.close) and \ record_30_min_ago_outdoors.temperature - record_outdoors.temperature < -delta_degrees and \ record_indoors.temperature - record_outdoors.temperature < -delta_degrees \ and record_indoors.temperature > 23: # Temperature outdoors is growing # Now temperature is lower indoors than outdoors by more than delta_degrees degrees # Send notification to close windows close_windows = True if (not last_decision or last_decision.close) and \ record_30_min_ago_outdoors.temperature - record_outdoors.temperature > delta_degrees and \ record_outdoors.temperature - record_indoors.temperature < -delta_degrees and \ record_indoors.temperature > 25: # Temperature is lowering # Now temperature is lower outdoors than indoors by more than delta_degrees degrees # Send notification to open windows close_windows = False now = datetime.datetime.now() if close_windows is not None and ( not last_decision or (now - last_decision.date) > datetime.timedelta(hours=1)): connection = pika.BlockingConnection( pika.ConnectionParameters(host=CONFIG['rabbitmq-host'], credentials=pika.PlainCredentials( CONFIG['rabbitmq-user'], CONFIG['rabbitmq-password']))) channel = connection.channel() channel.basic_qos(prefetch_count=1) channel.basic_publish( exchange='', routing_key='mijia-notify', properties=pika.BasicProperties(content_type='application/json'), body=json.dumps({ 'text': f'<pre>Outdoor temp: {record_outdoors.temperature}ºC\n' f'Indoor temp: {record_indoors.temperature}ºC</pre>\n' f'===> <b>{"Close" if close_windows else "Open"} the windows</b>' })) WindowsDecision(date=now, close=close_windows).save()
def test_init(self): ship1 = Ship('1', Location(1, 2), 90, 8, 6) ship2 = Ship('2', Location(1, 2), 90, 12, 4) environment = Environment(300, 200, [ship1, ship2]) self.assertEqual(300, environment.width) self.assertEqual(200, environment.height) self.assertEqual(2, len(environment.ships)) self.assertEqual(ship1, environment.ships[0]) self.assertEqual(ship2, environment.ships[1])
def test_to_dict(self): ship1 = Ship('1', Location(1, 2), 90, 8, 6) ship2 = Ship('2', Location(1, 2), 90, 12, 4) environment = Environment(300, 200, [ship1, ship2]) expected = { 'width': 300, 'height': 200, 'ships': [ship1.to_dict(), ship2.to_dict()] } self.assertEqual(expected, environment.to_dict())
def create_test_world(): world = add(World(name='Test World')) portal = add(Location(name='Portal', world=world)) plaza = add(Location(name='Plaza', world=world)) hotel = add(Location(name='Old Grand Hotel', world=world)) basement = add(Location(name='Hotel Basement', world=world)) add( Path( start=portal, destination=hotel, description= "YOU ARE IN THE HOTEL. THERE'S A DOOR TO THE BASEMENT IN FRONT OF YOU." )) add( Path( start=plaza, destination=hotel, description= "YOU ARE IN THE HOTEL. THERE'S A DOOR TO THE BASEMENT IN FRONT OF YOU." )) add( Path(start=hotel, destination=plaza, description="YOU ARE IN THE PLAZA, FACING THE HOTEL.")) add( Path( start=hotel, destination=basement, description= "YOU ARE IN THE BASEMENT. THERE ARE STAIRS UP TO THE HOTEL LOBBY BEHIND YOU." )) add( Path( start=basement, destination=hotel, description= "YOU ARE IN THE HOTEL LOBBY. THERE'S AN EXIT TO THE PLAZA IN FRONT OF YOU." )) add( Session(code='TestSession1', active=True, current_location=hotel, previous_location=portal)) db.session.commit()
def load_locations(): """Look up city coordinates using Google Geocode API and write to database.""" with open('data/500cities.csv') as f: reader = csv.reader(f) for row in reader: city, state, companies_qty = row citystate = city + ', ' + state companies_qty = int(companies_qty) gmaps = googlemaps.Client( key='AIzaSyDhCeDrRa_Fs_gLbPUHp-UsHPKb53LIFlw') geocode_result = gmaps.geocode(citystate) lat = geocode_result[0]['geometry']['location']['lat'] lng = geocode_result[0]['geometry']['location']['lng'] location = Location(city=city, state=state, lat=lat, lng=lng, companies_qty=companies_qty) db.session.add(location) db.session.commit()
def create_location(plant_location): """Create a location type for plants.""" location = Location(plant_location=plant_location) db.session.add(location) db.session.commit() return location
def populate(self): # clear the existing cache for x in range(0, len(self)): self.pop() if self.location != 'global': location = Location.get(self.location) if location.is_region: hubs = location.has_hubs profile_select = User.select(AND(IN(User.q.homeplaceID, hubs), User.q.public_field==1, User.q.active==1, User.q.description != u"", User.q.modified > datetime.datetime.now() - datetime.timedelta(days=365))).orderBy('modified').reversed()[:30] else: profile_select = User.select(AND(User.q.homeplaceID==location, User.q.public_field==1, User.q.active==1, User.q.description != u"", User.q.modified > datetime.datetime.now() - datetime.timedelta(days=365))).orderBy('modified').reversed()[:30] else: profile_select = User.select(AND(User.q.public_field==1, User.q.active==1, User.q.description != u"", User.q.modified > datetime.datetime.now() - datetime.timedelta(days=365))).orderBy('modified').reversed()[:30] for profile in profile_select: cache_obj = self.objectcache_factory(profile) self.append(cache_obj)
def get_empty_location(self): position_zero = [] for i in range(len(self.__map)): for j in range(len(self.__map[0])): if self.__map[i][j] == 0: position_zero.append(Location(i, j)) return position_zero
def load_locations(): """Populates locations with loactions from ex.locations""" print("Locations") # Delete all rows in table, so if we need to run this a second time, # we won't be trying to add duplicate users #need to delete rating db also because it has forigen keys from users Found.query.delete() Lost.query.delete() Location.query.delete() # Read ex.Location file and insert data for row in open("example_data/ex.location"): row = row.rstrip() title, address1, address2, city, state, zipcode, lat, lng = row.split( "|") location = Location(title=title, address1=address1, address2=address2, city=city, state=state, zipcode=zipcode) # We need to add to the session or it won't ever be stored db.session.add(location) # Once we're done, we should commit our work db.session.commit()
def get(self, slug): people = [] q = Location.gql("WHERE slug = :slug", slug = slug) location = q.get() if location: tapins = Tapin.gql("WHERE location = :location ORDER BY date", location = location) from collections import defaultdict grouped = defaultdict(list) for tapin in tapins: grouped[tapin.date.date()].append(tapin) tapin = self.request.get('tapin') template = jinja_environment.get_template("location.html") self.response.out.write(template.render({ "user": users.get_current_user(), "location": location, "tapins": grouped, "tapin": tapin })) else: self.redirect("/new-location?slug=%s&message=not-found" % slug)
def poll_leganes_wu(): attempts = 0 while attempts < 5: try: leganes_location = Location.get(Location.name == 'leganes') response = requests.get( 'https://api.weather.com/v2/pws/observations/current', params={ 'apiKey': CONFIG['wu_api_key'], 'stationId': 'ILEGAN9', 'numericPrecision': 'decimal', 'format': 'json', 'units': 'm' }) data = response.json().get('observations')[0] Record.get_or_create(date=arrow.get( data['epoch']).to('Europe/Madrid').datetime.replace( tzinfo=None), location=leganes_location, defaults={ 'temperature': data['metric']['temp'], 'humidity': data['humidity'] }) break except Exception: time.sleep(60) attempts += 1
def __get_empty_location(self): # 每次统计空位置,都先清空之前的数据,避免影响本次数据. self.__list_empty_location.clear() for r in range(len(self.__map)): for c in range(len(self.__map[r])): if self.__map[r][c] == 0: self.__list_empty_location.append(Location(r, c))
def load_locations(): """Load locations from locations.csv into database.""" print "Locations" for row in open("seed_data/locations.csv"): row = row.rstrip().split(",") for i, element in enumerate(row): if element == "": row[i] = None id, name, city, state, country, latitude, longitude = row # TODO Make locations singular locations = Location( id=id, name=name, city=city, state=state, country=country, latitude=latitude, longitude=longitude, ) db.session.add(locations) db.session.commit()
def select_all_locations(self): expected_columns = [ 'id', 'address', 'city', 'state', 'zip_code', 'location' ] # self.validate_columns(*expected_columns) sql = ''' SELECT id, address, city, state FROM {} WHERE address IS NOT NULL AND city IS NOT NULL AND state IS NOT NULL AND ( location IS NULL OR zip_code IS NULL ) '''.format(self.locations_table) with self.connection.cursor() as cursor: try: cursor.execute(sql) return [Location(*(*row, None, None, None)) for row in cursor] except Exception as e: self.connection.close() raise e
def __calculate_empty_location(self): self.__list_empty_location.clear() for r in range(4): for c in range(4): if self.__map[r][c] == 0: loc = Location(r, c) self.__list_empty_location.append(loc)
def get_empty_location(self): self.list_empty_location.clear() for r in range(4): for c in range(4): if self.map[r][c] == 0: loc = Location(r, c) self.list_empty_location.append(loc)
def location_request(name): """Function that handles appending or fetching a location.""" print(request.form) if request.method == 'PUT': try: lat = float(request.form["latitude"]) lon = float(request.form["longitude"]) except: return "", 400 # <- Bad request db.session.add( Location( name=name, latitude=lat, longitude=lon, elevation=fetch_elevaion(((lat, lon), ))[0], )) db.session.commit() return jsonify({"message": "success"}), 201 if request.method == 'GET': d = db.session.query(Location).filter_by(name=name).first() if not d: return '{}', 204 return jsonify({ "name": d.name, "latitude": d.latitude, "longitude": d.longitude, "elevation": d.elevation, }), 200
def __find_zero_loc(self): self.list_location.clear() for r in range(len(self.__map)): for c in range(len(self.__map[r])): if self.__map[r][c] == 0 : loc = Location(r,c) self.list_location.append(loc)
def load_location(filename): for line in open(filename): line = line.rstrip().split(' (') # print(line[0]) loc_name = line[0] geolocator = Nominatim() input_loc = geolocator.geocode(loc_name) latlng = str(input_loc.latitude) + ',' + str(input_loc.longitude) # print(latlng) loc2 = geolocator.reverse(latlng, language="en") lat = loc2.raw['lat'] lng = loc2.raw['lon'] country = loc2.raw['address']['country'] state = loc2.raw['address']['state'] # city = loc2.raw['address']['city'] # zipcode = loc2.raw['address']['postcode'] # print(loc2, lat, lng, country, state) location = Location(lat=lat, lng=lng, loc_name=loc_name, country=country, state=state) db.session.add(location) print("Out of loop") db.session.commit() print("location db session commit")
def report_lost(): """adds new item to losts""" title = request.form.get('title') description = request.form.get('description') location = request.form.get('location') reward = request.form.get('reward') print(reward) if not reward or reward == 'undefined': reward = None geocoding_info = get_geocoding(location) location_id = get_location_id(geocoding_info['lat'], geocoding_info['lng']) if not location_id: new_location = Location(address1=geocoding_info['street'], city=geocoding_info['city'], zipcode=geocoding_info['zipcode'], state=geocoding_info['state'], lat=geocoding_info['lat'], lng=geocoding_info['lng']) db.session.add(new_location) # location_id = get_location_id(lat, lng) db.session.flush() location_id = new_location.location_id #add item to found database new_lost = Lost(title=title, description=description, location_id=location_id, user_id=session['user_id'], time=datetime.now(), reward=reward) db.session.add(new_lost) db.session.flush() new_lost_id = new_lost.lost_id #if an image was uploaded, store it in the database if 'file' in request.files: file = request.files['file'] img_name = 'l' + str(new_lost_id) + file.filename new_image = Image(img_name=img_name, img_data=file.read(), lost_id=new_lost_id) db.session.add(new_image) db.session.commit() return redirect('/lost')
def put_random_scan(): insert_time = utils.random_time() #Create the location map_id = maps.random_map_id() location = Location(map_id=map_id, x=utils.random_int(0, 300), y=utils.random_int(0, 600), timestamp=insert_time) location.put() #Base station base_mac = BASE_APS_MAC_ADDRESSES[utils.random_int(0, len(BASE_APS_MAC_ADDRESSES) - 1)] base_station = base_stations.get_base_station_with_mac(utils.mac_string_to_int(base_mac)) #Create or update the client mac_int = utils.mac_string_to_int(CLIENT_MAC_ADDRESSES[utils.random_int(0, len(CLIENT_MAC_ADDRESSES) - 1)]) client = clients.create_or_update_client(mac_int, base_station, insert_time) #Create the scan scan = Scan(map_key=map_id, client=client.key(), base_ap=base_station.key(), ss=utils.random_int(-90, -25), timestamp=insert_time, location=location) scan.put() logging.info(scan.key())
def __get_empty_location(self): # 每次统计空位置,都先清空之前的数据,避免影响本次数据. # (Each time when counting the empty position, the previous data need to be cleared first to avoid affecting the data) self.__list_empty_location.clear() for r in range(len(self.__map)): for c in range(len(self.__map[r])): if self.__map[r][c] == 0: self.__list_empty_location.append(Location(r, c))
def get(self): #clear maps utils.delete_all_in_index(maps.MAP_INDEX) db.delete(BaseStation.all()) db.delete(Location.all()) db.delete(Client.all()) db.delete(Scan.all()) self.response.out.write("Fake database cleared")
def __get_empty_location(self): # 每次统计空位置,都先清空之前的数据,避免影响本次的数据 self.__list_empty_location.clear() # 获取所有空白位置 for row in range(len(self.__map)): for col in range(len(self.__map[row])): if self.__map[row][col] == 0: self.__list_empty_location.append(Location(row, col))
def __calculate_empty_location(self): self.__list_empty_location.clear() for r in range(len(self.__map)): for c in range(len(self.__map[r])): if self.__map[r][c] == 0: # 记录r c # self.__list_empty_location.append((r, c)) self.__list_empty_location.append(Location(r, c))
def load_data(): """pull data from API and load into db""" page_num = 1 # if type(num) == int: # url = "https://refugerestrooms.org:443/api/v1/restrooms.json?page=1&per_page=" + str(page_num) # else: # pass while True: url = "https://www.refugerestrooms.org:443/api/v1/restrooms.json?per_page=100&page=" + str( page_num) results = [] response = requests.get(url) if response.status_code == 200: results = response.json() page_num += 1 # loop thru json data for v in results: # add bathroom and location b = Bathroom(name=v['name'], unisex=v['unisex'], accessible=v['accessible'], changing_table=v['changing_table']) db.session.add(b) db.session.commit() # add location if v['latitude'] == None or v['longitude'] == None: v['latitude'] = 0.00 v['longitude'] = 0.00 l = Location(bathroom_id=b.bathroom_id,street=v['street'], city=v['city'], state=v['state'], \ country=v['country'], latitude=v['latitude'], longitude=v['longitude'], directions=v['directions']) db.session.add(l) db.session.commit() # add comment if len(v['comment']) > 1: c = Comment(comment=v['comment'], bathroom_id=b.bathroom_id, user_id=0) db.session.add(c) db.session.commit() # add ratings if v['downvote'] == 1: r = Rating(bathroom_id=b.bathroom_id, user_id=0, score=2) db.session.add(r) db.session.commit() elif v['upvote'] == 1: r = Rating(bathroom_id=b.bathroom_id, user_id=0, score=5) db.session.add(r) db.session.commit() time.sleep(1) else: break return "finished loading data"
def create_location(location): """Create and return a new entry type""" new_location = Location(location=location) db.session.add(new_location) db.session.commit() return new_location
def check_locations(): locations = Location.select().order_by(Location.zip_code.asc()) locations = locations.execute() locations = [l for l in locations] for l in locations: print "Location: " + str(l.zip_code_string) + ' ' + l.city + ', ' + l.state return
def get(self, slug): if users.get_current_user(): q = Location.gql("WHERE slug = :slug", slug=slug) location = q.get() if location: tapin = Tapin() tapin.user = users.get_current_user() tapin.location = location.key() tapin.put() self.redirect("/location/" + slug + "?tapin=" + str(tapin.key())) else: self.redirect(users.create_login_url("/tapin/%s" % slug))
def get_kmeans_graph(): """GETs user location query, runs location through a Location class function called 'get books associated with location' from my Model.py and returns a list of book objects associated with a location with the user's input radius. Books are filtered out based on whether they have a description longer than 20 words.""" user_location_query = flaskrequest.args.get('search-input') print "user_location_query for get kmeans", user_location_query radius = flaskrequest.args.get('radius') print "radius for get kmeans", radius book_obj_list = Location.get_books_associated_with_location(radius, user_location_query) book_obj_list = [book_obj for book_obj in book_obj_list if book_obj.description] book_obj_list = [book_obj for book_obj in book_obj_list if len(book_obj.description.split(" ")) > 20] # from the book_cosine_similarity file kmeans_cluster_html = returns_kmeans_cluster_graph(book_obj_list) # kmeans result return jsonify(kmeans = kmeans_cluster_html)
def post(self): slug = self.request.get('slug') name = self.request.get('name') description = self.request.get('description') location = Location.gql("WHERE slug = :slug", slug=slug).get() if location: self.error(500) self.response.out.write('Slug already used') else: location = Location() location.slug = slug location.name = name location.description = description location.put() self.redirect("/location/%s" % slug)
def search_for_books(): """Search for books through the homepage by location.""" user_location_query = flaskrequest.args.get('search-input') #both print "user query, ", user_location_query radius = flaskrequest.args.get('radius') print "radius1, ", radius radius = int(radius) print "radius, ", radius print user_location_query #query for books associated with place within 100mi #returns a list of book objects jsonify_search_result_list = [] # json result ##hard code with LA cordinates due to quota limit:## book_obj_list = Location.get_books_associated_with_location(radius, user_location_query) #both book_obj_list = [book_obj for book_obj in book_obj_list if book_obj.description] book_obj_list = [book_obj for book_obj in book_obj_list if len(book_obj.description.split(" ")) > 20] print book_obj_list for book_object in book_obj_list: # json result book_dict = {} keyword_list = [word.keyword for word in book_object.keywords] keywords = filter(None, keyword_list) author_list = [author.author_name for author in book_object.authors if book_object.authors] book_dict["title"] = book_object.title.replace('"', "'") print book_dict["title"] book_dict["subtitle"] = book_object.subtitle book_dict["authors"] = ", ".join(author_list) book_dict["description"] = book_object.description.replace('"', "'") book_dict["thumbnailUrl"] = book_object.thumbnail_url book_dict["previewLink"] = book_object.preview_link book_dict["keywords"] = keywords print book_dict["keywords"] jsonify_search_result_list.append(book_dict) # print book_dict # print jsonify_search_result_list print "search complete" # user_location_query, jsonify_search_result_list = jsonList_query return jsonify(name = jsonify_search_result_list)
def postlisting(): # Handles a new listing being submitted title = request.form.get('title') title = titlecase(title) texture = request.form.get('texture') datemade = request.form.get('datemade') quantity = request.form.get('quantity') freshfrozen = request.form.get('freshfrozen') description = request.form.get('description') allergens = request.form.getlist('allergens') user_id = session['user_id'] phone_number = request.form.get('phone_number') geoCheckbox = request.form.get('geoCheckbox') if len(phone_number) == 17: phone_number = phone_number[4:7]+phone_number[9:12]+phone_number[13:] else: phone_number = None allergen = Allergen.add_allergen(allergens) allergen_id = allergen.allergen_id if geoCheckbox: lat = request.form.get('lat') lng = request.form.get('lng') location = Location.add_location(lat, lng) location_id = location.location_id else: location_id = None Food.add_food(title, texture, datemade, quantity, freshfrozen, description, allergen_id, user_id, location_id, phone_number) flash('Your mush has been successfully posted!') return redirect('/home')
"PR": "PUERTO RICO", "SC": "SOUTH CAROLINA", "KY": "KENTUCKY", "OR": "OREGON", "SD": "SOUTH DAKOTA", } # Org types to scrape ORG_TYPES = ["police department", "fire department", "city hall", "newspaper", "tv station", "hospital"] def setup_db(): database.connect() try: Muni.create_table() # only needed once except Exception, e: pass try: Location.create_table() # only needed once except Exception, e: pass def get_all_items(): for m in Muni.select().where(Muni.success == True): print m.title + " " + str(m.tel) setup_db() get_all_items()
def load_studies(): """Load data from database.txt into Location, Activation, Study tables.""" # Delete all rows in existing tables, so if we need to run this a second time, # we won't add duplicates Location.query.delete() Study.query.delete() Activation.query.delete() skip = True count_studies = 0 # Parse txt file and convert to appropriate data types for seeding for row in open("seed_data/database.txt"): # Skip the header of the txt file if skip: skip = False continue # Stop after the first 5000 rows for now # if count_studies > 5000: # break row = row.rstrip().split("\t") # Information to go into Study, if applicable: pmid = int(row[0]) doi = row[1] title = row[9] authors = row[10] year = int(row[11]) journal = row[12].rstrip() # Information to go into Location, if applicable x = float(row[2]) y = float(row[3]) z = float(row[4]) space = row[5] # Check whether PMID is already in Study; if not, add it to db. study_obj = Study.get_study_by_pmid(pmid) if study_obj is None: study_to_add = Study(pmid=pmid, doi=doi, title=title, authors=authors, year=year, journal=journal) db.session.add(study_to_add) db.session.commit() # Check whether xyz is already in Location; if not, add it to db and # retrieve its location ID (an autoincrementing primary key). # If xyz already in Location, retrieve its location_id. location_obj = Location.check_by_xyz_space(x, y, z, space) if location_obj is None: location_to_add = Location(x_coord=x, y_coord=y, z_coord=z, space=space) db.session.add(location_to_add) db.session.commit() loc_id = Location.check_by_xyz_space(x, y, z, space).location_id else: loc_id = location_obj.location_id # Add activation to db, using location_id identified/generated above activation_to_add = Activation(pmid=pmid, location_id=loc_id) db.session.add(activation_to_add) db.session.commit() # Print where we are and increment counter print "Database.txt seeding row ", count_studies count_studies += 1
def load_studies(): """Loads data from database.txt into Location, Activation, Study tables. File format: PMID \t doi \t x \t y \t z \t space \t peak_id \t table_id \t table_num \t title \t authors \t year \t journal \t Source: Neurosynth database.txt file""" skip = True count_studies = 0 database = open("seed_data/database.txt") # Parse txt file and convert to appropriate data types for seeding for row in database: # Skip the header of the txt file if skip: skip = False continue # Stop after the first 5000 rows for now # if count_studies > 5000: # break row = row.rstrip().split('\t') # Information to go into Study, if applicable: pmid = int(row[0]) doi = row[1] title = row[9] authors = row[10] year = int(row[11]) journal = row[12].rstrip() # Information to go into Location, if applicable x = float(row[2]) y = float(row[3]) z = float(row[4]) # Check whether PMID is already in Study; if not, add it to db. study_obj = Study.get_study_by_pmid(pmid) if study_obj is None: study_to_add = Study(pmid=pmid, doi=doi, title=title, authors=authors, year=year, journal=journal) db.session.add(study_to_add) db.session.commit() # Check whether xyz is already in Location; if not, add it to db and # retrieve its location ID (an autoincrementing primary key). # If xyz already in Location, get its location_id. location_obj = Location.check_by_xyz(x, y, z) if location_obj is None: location_to_add = Location(x_coord=x, y_coord=y, z_coord=z) db.session.add(location_to_add) db.session.commit() loc_id = Location.check_by_xyz(x, y, z).location_id else: loc_id = location_obj.location_id # Add activation to db, using location_id identified/generated above activation_to_add = Activation(pmid=pmid, location_id=loc_id) db.session.add(activation_to_add) db.session.commit() # Print where we are and increment counter print "Database.txt seeding row ", count_studies count_studies += 1 database.close()
conn = TorCtl.connect(passphrase=TORCTL_PASS) conn.sendAndRecv('signal newnym\r\n') conn.close() time.sleep(10) def p_print(data): pprint.pprint(data) def setup_db(): database.connect() try: Muni.create_table() #only needed once except Exception, e: pass try: Location.create_table() #only needed once except Exception, e: pass def zipit(): ifile = open('zipcode.csv', "rb") reader = csv.reader(ifile) rownum = 0 for row in reader: #ex: "zip","city","state","latitude","longitude","timezone","dst" # F**k the header row. if rownum == 0: rownum+=1
def zipit(): ifile = open('zipcode.csv', "rb") reader = csv.reader(ifile) rownum = 0 for row in reader: #ex: "zip","city","state","latitude","longitude","timezone","dst" # F**k the header row. if rownum == 0: rownum+=1 continue if len(row) < 7: print "Skipping " + str(row) continue has_loc = Location.select().where(Location.zip_code == row[0]) has_loc = has_loc.execute() has_loc = [u for u in has_loc] if len(has_loc) > 0: print "Already have Location for zip: " + str(row[0]) continue l = Location() l.zip_code = row[0] l.zip_code_string = str(row[0]) l.city = slugify(row[1]) l.state_code = row[2] l.state = slugify(code_to_state[row[2]]) l.lat = row[3] l.lon = row[4] if l.lat is not "": l.save() print "Saved location: " + str(l) + ' ' + str(l.zip_code) rownum += 1 ifile.close()