def save(request): if request.GET.get('location') is None or request.GET.get('keywords') is None: raise BadRequest('Missing location or keywords parameter') places = nearby_places(request.GET.get('location'), request.GET.get('keywords')) for p in places: db_place = Place(name=p, date_saved=timezone.now()) db_place.save() return places
def save(request): if request.GET.get('location') is None or request.GET.get( 'keywords') is None: raise BadRequest('Missing location or keywords parameter') places = nearby_places(request.GET.get('location'), request.GET.get('keywords')) for p in places: db_place = Place(name=p, date_saved=timezone.now()) db_place.save() return places
def load_csv(): Place.objects.all().delete() with open(os.path.dirname(os.path.abspath(__file__)) + '/sejrssedler_steder.csv', 'rt') as csvfile: notereader = csv.reader(csvfile, delimiter=',') firstline = True for row in notereader: if firstline: firstline = False continue p = Place(placeid=int(row[0]), name=row[1], rank=int(row[2])) p.save()
def app(user, plan_record): app = create_app(test_config) with app.app_context(): db.create_all() admin_role = Role(name="admin") user_role = Role(name="user") db.session.add(admin_role) db.session.add(user_role) # Admin user user.roles = [admin_role, user_role] db.session.add(user) # Non-admin user non_admin = User(first="Example", last="Person", email="*****@*****.**") non_admin.roles = [user_role] db.session.add(non_admin) place = Place(name="Lowell, MA", description="A town") db.session.add(place) plan = PlanSchema().load(plan_record) plan.user = user db.session.add(plan) db.session.commit() return app
def app(user, plan_record): app = create_app( { "TESTING": True, "SQLALCHEMY_DATABASE_URI": "sqlite:///:memory:", "SQLALCHEMY_TRACK_MODIFICATIONS": False, "SECRET_KEY": b"my_secret_key", } ) with app.app_context(): db.create_all() admin_role = Role(name="admin") user_role = Role(name="user") db.session.add(admin_role) db.session.add(user_role) user.roles = [admin_role, user_role] db.session.add(user) place = Place(name="Lowell, MA", description="A town") db.session.add(place) plan = PlanSchema().load(plan_record) plan.user = user db.session.add(plan) db.session.commit() return app
def app(user, plan_record): app = create_app(test_config) with app.app_context(): db.create_all() admin_role = Role(name="admin") user_role = Role(name="user") db.session.add(admin_role) db.session.add(user_role) # Admin user user.roles = [admin_role, user_role] db.session.add(user) # Non-admin user non_admin = User(first="Example", last="Person", email="*****@*****.**") non_admin.roles = [user_role] db.session.add(non_admin) place = Place( slug="lowell", name="Lowell, MA", state="Massachusetts", description="A town", districting_problems=[ DistrictingProblem( number_of_parts=9, name="Town Council", plural_noun="Town Council Districts", ) ], units=[ UnitSet( name="Blocks", unit_type="block", slug="blocks", bounds="[[0, 100], [0, 100]]", ) ], ) db.session.add(place) plan = Plan(**PlanSchema().load(plan_record)) plan.user = user db.session.add(plan) db.session.commit() return app
def create(self, validate_data): """ Create and return a new `Snippet` instance, given the validated data. """ # return Place.objects.create(**validated_data) engine = sqlalchemy.create_engine('sqlite:///db.sqlite3') Session = sqlalchemy.orm.sessionmaker( bind=engine, expire_on_commit=False) session = Session() instance = Place(**validate_data) session.add(instance) session.commit() # sa_session.expunge_all() # session.close() return instance
def get_place_tweets_at_user_id(request): api = get_api_with_auth(request) # First we need to get location ID # loc = api.geo_search(query=request.GET['place'])[0] # place = {'id': loc.id, 'name': loc.full_name} # # recent_tweets = [] # query = '@' + str(request.GET['user_id']) # tweets = api.search(q=query, rpp=100, place=loc.id) # for tweet in tweets: # recent_tweets.append(get_tweet_info(tweet)) place = Place.get_place(api, request.GET['place'], request.GET['user_screen_name']) if place is None: response = {'error': 'Rate Limit Exceeded.'} return JsonResponse(response) recent_tweets = place.get_tweets(api, request.GET['type']) response = {} response['tweets'] = recent_tweets response['place'] = place.name return JsonResponse(response)
def create_place(): #id = request.json.get('id') img = request.json.get('img') name_place = request.json.get('name_place') description = request.json.get('description') location = request.json.get('location') if not name_place: return jsonify({"msg": "El nombre no puede estar vacio"}), 400 new_place = Place() new_place.img = img new_place.name_place = name_place new_place.description = description new_place.location = location db.session.add(new_place) db.session.commit() return jsonify({"msg": "Place creado exitosamente"}), 200
def scrape(self, regex): # Setup search query venue = regex use_regex = 1 ignore_case = 1 max_lock = 6 country = 235 payload = { "vname": venue, "regex": use_regex, "ignorecase": ignore_case, "lock": max_lock, "country": country, "submit": "Search" } scrape = requests.post("https://db.slickbox.net/venues.php", data=payload) # ------------------------------------- Parse the HTML Output -------------------------------------- # # Find where the relevant data begins and ends in the HTML output data = scrape.text[scrape.text.find('</thead>'):scrape.text. rfind('</table>')] # Fill empty fields with text to prevent BS from erasing them, then initialize BS soup = BeautifulSoup(data, "html.parser") # Split the permalinks into separate fields in order to make the data useful for a in soup.find_all("a"): # Create three new elements (soon to be database fields) permalink = soup.new_tag("td") permalink.string = a.get("href") name = soup.new_tag("td") name.string = a.string ven_id = soup.new_tag("td") ven_id.string = re.findall("(?!.*[venues=]).*", permalink.string)[0] # Add the new elements to the document tree a.parent.append(ven_id) a.parent.append(permalink) a.parent.append(name) # Clean up the original permalink a.decompose() ven_id.parent.unwrap() # Generate a 2D list of the fields place = [] count = 0 row = -1 for child in soup.find_all("td"): if count % 16 == 0: # A new venue entry begins every 16 fields place.append([]) row += 1 count = 0 if child.string is not None: place[row].append(child.string.strip()) else: place[row].append(child.string) # Cannot strip a NoneType count += 1 # ------------------------------------- Export to the Database ------------------------------------- # for row in place: # Generate a Python datetime object for each date field if row[11] != None: row[11] = iso8601.parse_date(row[11]) if row[13] != None: row[13] = iso8601.parse_date(row[13]) # We care whether there has been a place update request, not the date if row[14] != None: row[14] = True else: row[14] = False # Add the row to the database new = Place(venueId=row[0], permalink=row[1], name=row[2], lockLevel=row[3], categories=row[4], number=row[5], street=row[6], city=row[7], state=row[8], country=row[9], createdBy=row[10], createdOn=row[11], updatedBy=row[12], updatedOn=row[13], updateRequest=row[14], isResidential=row[15]) new.save()