Пример #1
0
    def setUp(self):

        db.drop_all()
        db.create_all()

        self.client = app.test_client()
        self.testuser = User.signup(username='******',
                                    email='*****@*****.**',
                                    password='******')
        self.testuser_id = 1000
        self.testuser.id = self.testuser_id

        self.testsearch = Search.create({
            'location': 'Test location',
            'date': '2020-07-07',
            'dates': '6-10-20,6-11-20,6-12-20,6-13-20,6-14-20',
            'deaths': '402,406,410,416,422',
            'cases': '6916,6985,7051,7107,7151',
            'change_deaths': '1,2,3,4,5',
            'change_cases': '10,20,30,40,50',
            'created_at': datetime.now(),
            'description': 'Test description'
        })

        self.testsearch_id = 2000
        self.testsearch.id = self.testsearch_id

        db.session.commit()

        self.testuser.searches.append(self.testsearch)

        db.session.commit()
Пример #2
0
  def test_create(self):
    testsearch = Search.create({
      'location': 'Test place',
      'date': '2020-07-07',
      'dates': '6-10-20',
      'deaths': '402',
      'cases': '6916',
      'change_deaths': '1',
      'change_cases': '10',
      'description': 'Testing description'
    })

    testsearch.id = 1234
    db.session.add(testsearch)
    db.session.commit()

    testsearch = Search.query.get(1234)

    self.assertIsNotNone(testsearch)
    self.assertEqual(testsearch.location, 'Test place')
    self.assertEqual(testsearch.date, '2020-07-07')
    self.assertEqual(testsearch.dates, '6-10-20')
    self.assertEqual(testsearch.deaths, '402')
    self.assertEqual(testsearch.cases, '6916')
    self.assertEqual(testsearch.description, 'Testing description')
    self.assertIsInstance(testsearch.created_at, datetime)
Пример #3
0
  def setUp(self):
    db.drop_all()
    db.create_all()
    
    u = User.signup('testuser', '*****@*****.**', 'testpwd')
    uid = 1111
    u.id = uid

    db.session.commit()

    s = Search.create({
      'location': 'Test location',
      'date': '2020-07-07',
      'dates': '6-10-20,6-11-20,6-12-20,6-13-20,6-14-20',
      'deaths': '402,406,410,416,422',
      'cases': '6916,6985,7051,7107,7151',
      'change_deaths': '1,2,3,4,5',
      'change_cases': '10,20,30,40,50',
      'created_at': datetime.now(),
      'description': 'Test description'
      })
    
    sid = 2222
    s.id = 2222

    u.searches.append(s)
    db.session.commit()

    self.u = u
    self.s = s
Пример #4
0
def save_search():
	""" If user is logged in, save search to user, else save to 
	session and redirect user to login 
	"""

	if current_user.is_authenticated:
		s = Search.create(request.json)
		current_user.searches.append(s)
		db.session.commit()
		return 'saved'
	else:
		session['search'] = serialize(request.json)
		return 'login'
Пример #5
0
def login():
    """ Show login page with login form """

    # If user has been redirected to save a search, show flash message.
    if request.args.get('saveSearch') and request.method == 'GET':
        flash(
            Markup(
                'Please log in to save search. Don\'t have an account? Register <a href="/signup">here</a>'
            ), 'danger')

    if current_user.is_authenticated:
        return redirect(url_for('index'))

    form = LoginForm()

    if form.validate_on_submit():
        username = form.username.data
        password = form.password.data
        user = User.authenticate(username, password)

        if user:
            login_user(user)

            # If user has saved search in session, save it to database and delete session.
            if 'search' in session:
                s = Search.create(session['search'])
                user.searches.append(s)
                db.session.commit()
                del session['search']
                flash("Search saved", 'success')
                return redirect(f'/user/{user.username}/searches')

            return redirect(url_for('index'))

        flash("Invalid credentials.", 'danger')

    return render_template('/login.html',
                           form=form,
                           btnText="Log in",
                           cancel='index',
                           color="#99d3FF")
Пример #6
0
def get_results(query, package, include_stack_overflow, fetch_index, search_id, api_key):

    # Make request for search results
    params = DEFAULT_PARAMS.copy()
    params['key'] = api_key
    params['cx'] = search_id
    params['q'] = query
    if not include_stack_overflow:
        params['siteSearch'] = 'stackoverflow.com'
        params['siteSearchFilter'] = 'e'  # 'e' for 'exclude'
    response = make_request(default_requests_session.get, SEARCH_URL, params=params)

    # Pause so that we don't bombard the server with requests
    time.sleep(REQUEST_DELAY)

    # If request resulted in error, the response is null.  Skip over this query.
    if response is None:
        return

    # Parse search results
    soup = BeautifulSoup(response.content, 'html.parser')
    url = soup.find('opensearch:Url')
    entry_count = len(soup.find_all('entry'))

    # The Atom spec for the search API
    # (https://developers.google.com/custom-search/json-api/v1/reference/cse/list#response)
    # mentions that the estimated results count may be a long integer.
    # To my knowledge, peewee (our ORM) doesn't support long integer fields.
    # So, I cast this to an integer instead and cross my fingers there is no overflow.
    search = Search.create(
        fetch_index=fetch_index,
        query=query,
        page_index=0,
        requested_count=REQUESTED_RESULT_COUNT,
        result_count_on_page=entry_count,
        estimated_results_count=int(
            soup.find('cse:searchinformation').find('cse:totalresults').text),
        package=package,
    )

    # Fetch the first "entry" or search result
    entry = soup.entry

    # Save all of the search results from first to last.
    # Maintaining consistency with our query scraping, ranking starts at 1.
    for rank in range(1, entry_count + 1):

        # Extract fields from the entry
        updated_datetime_without_milliseconds = re.sub('\.\d\d\dZ', 'Z', entry.updated.text)
        updated_datetime = datetime.datetime.strptime(
            updated_datetime_without_milliseconds,
            "%Y-%m-%dT%H:%M:%SZ"
        )
        link = entry.link['href']
        snippet = entry.summary.string
        title = entry.title.text
        url = entry.id.text

        # Create a record for this search result
        SearchResult.create(
            search=search,
            title=title,
            snippet=snippet,
            link=link,
            url=url,
            updated_date=updated_datetime,
            rank=rank,
        )

        # To my knowledge, this is the only method for which it is strongly implied in
        # the BeautifulSoup documentation that you are fetching the next result
        # in the sequence.  I also assume that the search API is returning results
        # in the order of decreasing relevance, such that rank increases (gets bigger)
        # with each successive entry visited.
        entry = entry.find_next('entry')