Ejemplo n.º 1
0
async def many_requests(request):
    return UJSONResponse({'ok': True})
Ejemplo n.º 2
0
async def sign_out(
    current_user: models.User = Depends(security.get_current_active_user),
):
    response = UJSONResponse({"status": "signed out"})
    response.set_cookie(oauth2_scheme.token_name, "", httponly=True)
    return response
Ejemplo n.º 3
0
async def homepage(request):
    return UJSONResponse(dict(hello='world'))
Ejemplo n.º 4
0
 async def patch(self, request: Request, reqargs: dict) -> UJSONResponse:
     command = EditMemberCmd(**reqargs)
     return UJSONResponse({
         "data": reqargs,
         "member_id": request.path_params["member_id"]
     })
Ejemplo n.º 5
0
async def index(req):
    """Index route
    """
    return UJSONResponse({'msg': 'Hello, world!'})
Ejemplo n.º 6
0
async def unknown_error(request, exc: Exception):
    return UJSONResponse(content={"message": str(exc)}, status_code=HTTP_500_INTERNAL_SERVER_ERROR)
Ejemplo n.º 7
0
 async def post(self, request: Request, reqargs: dict) -> UJSONResponse:
     # meteor_logman.info(reqargs)
     command = CreateMemberCmd(**reqargs)
     result: MemberProfileRst = member_profile_service.create(command)
     resp = MemberProfileResp().dump(result)
     return UJSONResponse({"success": True, "data": resp})
Ejemplo n.º 8
0
async def homepage(request):
    # def call():
    art_num = art_num_ref.get()
    print(art_num)
    today = date.today()
    print((today - origin_date).days)
    days_elasped = (today - origin_date).days

    firebase_data = []
    top_headlines = newsapi.get_top_headlines(language='en', sources="fortune")
    top_headlines2 = newsapi.get_top_headlines(language='en',
                                               sources="bbc-news")
    top_headlines3 = newsapi.get_top_headlines(language='en',
                                               sources="the-verge")
    top_headlines4 = newsapi.get_top_headlines(language='en',
                                               sources="business-insider")

    # pprint.pprint( newsapi.get_sources())
    newsTexts = []
    urls = []
    sources = []

    for article in top_headlines['articles']:
        newsTexts.append(article['title'] + ". " + article['description'])
        urls.append(article['url'])
        sources.append(removeVowels(article['source']['name']))

    for article in top_headlines2['articles']:
        newsTexts.append(article['title'] + ". " + article['description'])
        urls.append(article['url'])
        sources.append(removeVowels(article['source']['name']))

    for article in top_headlines3['articles']:
        newsTexts.append(article['title'] + ". " + article['description'])
        urls.append(article['url'])
        sources.append(removeVowels(article['source']['name']))

    for article in top_headlines4['articles']:
        newsTexts.append(article['title'] + ". " + article['description'])
        urls.append(article['url'])
        sources.append(removeVowels(article['source']['name']))
    words = []

    pprint.pprint(urls)

    # Send for our named entity recognition.
    query = {'sections': newsTexts, 'sense2vec': False}
    jsonData = json.dumps(query)
    r = requests.post(spacy_url + '/ner', json=query)

    data = r.json()['data']
    namedEntities = []
    for obj in data:
        for ent in obj['entities']:

            if ent['label'] not in utils.NON_ENTITIES:

                if ent['text'] not in namedEntities and not any(
                        map(str.isdigit, ent['text'])):
                    if len(ent['text']) < 30 and len(ent['text']) > 4:
                        if ent['text'].isalnum():
                            namedEntities.append(ent['text'])

    pprint.pprint(namedEntities)
    output_qs = ''

    # get from our gpt2
    answers = []
    count = 0
    sample_size = 10 if len(namedEntities) > 10 else len(namedEntities)
    headline_nouns = random.sample(namedEntities, sample_size)
    for entity in headline_nouns:
        output = {}

        connecting_word = connecting[random.randint(0, len(connecting) - 1)]
        for index, text in enumerate(newsTexts):
            if entity in text:
                output['url'] = urls[index]
                output['imageURL'] = getImageURL(urls[index], entity)
                output['source'] = sources[index]

        print("Progress: " + str(count) + "/" + str(sample_size) + " -- " +
              entity)
        ending = random.choice(utils.ENDINGS)
        output[
            'question'] = "What if the imagination of " + entity + " " + connecting_word + " " + ending + "?"
        output['question-0'] = "What if the"
        output['question-1'] = "imagination of "
        output['question-2'] = entity
        output['question-3'] = " " + connecting_word + " " + ending + "?"

        question = "What if the imagination of " + entity + " was " + ending + "? Its " + ending + " that could"

        try:
            r = requests.post('https://fabalt-gpt-idvgffrwca-ez.a.run.app/',
                              json={
                                  "prefix": question,
                                  "temperature": "0.7",
                                  "top_k": "40",
                                  "top_p": "0.6"
                              })
            atext = r.json()["text"]
            answer = ''
            for gpt2response in atext:
                if len(gpt2response) < 300:
                    answer = gpt2response

            query = {'text': answer}
            sentences_split = requests.post(spacy_url + '/sentencizer',
                                            json=query)
            i = 0
            for sentence in sentences_split.json()['sentences']:
                if (i == 1):
                    output['answer'] = sentence
                    answers.append(output)
                i += 1

            count += 1

        except Exception as e:
            print(str(e))

    for index, answer in enumerate(answers):
        print(type(answer))
        answer['stamp'] = art_num + index
        json_object = json.dumps(answer, indent=4)
        pprint.pprint(json_object)
        firebase_data.append(answer)

    # # Update firebase with it.
    ref.set({"data": json.dumps(firebase_data, indent=4)})
    art_num += len(answers)
    art_num_ref.set(art_num)
    gc.collect()
    return UJSONResponse({'status': "Success"}, headers=response_header)
Ejemplo n.º 9
0
async def api_get_build(request: Request):
    tag = request.path_params["jenkins_tag"]
    return UJSONResponse(get_build_dict(tag))
Ejemplo n.º 10
0
async def http_exception_handler(request, exc):
    return UJSONResponse({"error": exc.detail}, status_code=exc.status_code)
Ejemplo n.º 11
0
async def home(request):
    return UJSONResponse({"msg": "api is running"})
Ejemplo n.º 12
0
async def run_search(request):
    text = await request.json()
    results = get_ranked_results(text['text'])
    return UJSONResponse({'Results': results})
Ejemplo n.º 13
0
 async def get(self, request):
     user_id = request.path_params['id']
     log.info('get user with id: %s' % user_id)
     u = await User.get(user_id, raw=True)
     log.info('got user: %s' % u)
     return UJSONResponse(u)
Ejemplo n.º 14
0
async def brute_force(request):
    return UJSONResponse({'ok': True})
async def http_exception(request, exc):
    return UJSONResponse({"msg": exc.detail}, status_code=exc.status_code)
Ejemplo n.º 16
0
Archivo: app.py Proyecto: pyrates/roll
async def minimal(request):
    return UJSONResponse({'message': 'Hello, World!'})
Ejemplo n.º 17
0
async def unprocessable_entity_error(request, exc: RequestValidationError):
    return UJSONResponse(content={"message": exc.errors()}, status_code=HTTP_422_UNPROCESSABLE_ENTITY)
Ejemplo n.º 18
0
 async def asgi(receive, send):
     response = UJSONResponse({"hello": "world"})
     await response(receive, send)
Ejemplo n.º 19
0
async def http_error(request, exc: HTTPException):
    return UJSONResponse(content={"message": exc.detail}, status_code=exc.status_code)
Ejemplo n.º 20
0

async def database_updates(request):
    num_queries = get_num_queries(request)
    updates = [(randint(1, 10000), randint(1, 10000))
               for _ in range(num_queries)]
    worlds = [{
        'id': row_id,
        'randomNumber': number
    } for row_id, number in updates]

    async with connection_pool.acquire() as connection:
        statement = await connection.prepare(READ_ROW_SQL)
        for row_id, number in updates:
            await statement.fetchval(row_id)
        await connection.executemany(WRITE_ROW_SQL, updates)

    return UJSONResponse(worlds)


routes = [
    Route('/json', UJSONResponse({'message': 'Hello, world!'})),
    Route('/db', single_database_query),
    Route('/queries', multiple_database_queries),
    Route('/fortunes', fortunes),
    Route('/updates', database_updates),
    Route('/plaintext', PlainTextResponse(b'Hello, world!')),
]

app = Starlette(routes=routes)
Ejemplo n.º 21
0
 async def get(self, request: Request) -> UJSONResponse:
     command = GetMemberByIdCmd(**request.path_params)
     result: MemberProfileRst = member_profile_service.get(command)
     resp = MemberProfileResp().dump(result)
     return UJSONResponse(resp)
Ejemplo n.º 22
0
async def homepage(request):
    global generate_count
    global sess
    global script_path

    if request.method == 'GET':
        params = request.query_params
    elif request.method == 'POST':
        params = await request.json()
    elif request.method == 'HEAD':
        return UJSONResponse({'text': ''},
                             headers=response_header)

    #prompt=params.get('prompt', '')[:100]
    prompt=(params.get('prompt', ''))
    if prompt is None:
        prompt = "<|startoftext|>" # GPT2's standard start token
    logging.info('Generating text for [%s], model: [%s]', prompt, model)

    # Run the inferencing and get the raw unprocessed tweets
    unproc_tweets_list = gpt2.generate(sess, checkpoint_dir=checkpoint_path,
                             length=int(params.get('length', 80)),
                             nsamples=int(params.get('num_samples', 20)),
                             temperature=float(params.get('temperature', 0.7)),
                             top_k=int(params.get('top_k', 40)),
                             top_p=float(params.get('top_p', 0.9)),
                             prefix=prompt,
                             truncate=params.get('truncate', "<|endoftext|>"),
                             include_prefix=str(params.get(
                                 'include_prefix', True)).lower() == 'true',
                             batch_size=int(params.get('batch_size', 20)),
                             return_as_list=True
                         )
    proc_tweets_list = []
    deleted_list = []
    
    # Not all tweets are gold, so do some cleanup
    # Remove duplicate sentences/words and then get rid of extremely short tweets
    for raw_tweet in unproc_tweets_list:
        # Remove \n and " characters
        t=raw_tweet.replace('\n',' ')
        t=t.replace('\"','')

        # Break the text into sentences and remove the last sentence which is most likely to be incomplete
        t_list = sent_tokenize(t)
        del t_list[-1]
        t = ' '.join(unique(t_list))

        # Some tweets have repeated words. Remove the ones below a threshold. 
        if len(set(t.split())) > 20:
            proc_tweets_list.append(t)
        else:
            deleted_list.append(t)
    
    logging.info("Generated %d tweets", len(proc_tweets_list))

    if len(proc_tweets_list) > 0:
        # Find the sentiment of the tweets using AWS comprehend 
        tweets_list, sentiment_list, score_list = get_sentiment(proc_tweets_list)

        # Store in DynamoDB
        dynamodb = boto3.resource('dynamodb', region_name='ap-south-1')
        table = dynamodb.Table('gpt2-tweets-' + model)
        json_text = json.dumps(tweets_list)
        json_sentiment = json.dumps(sentiment_list)
        json_score = json.dumps(score_list)
        timestamp = datetime.now().isoformat()
        logging.info('Adding to DynamoDB DB, prompt: [%s], model: %s', prompt.lower(), model)
        print
        table.put_item(
            Item={
                'prompt' : prompt.lower(),
                'text' : json_text,
                'orig_prompt' : prompt,
                'sentiment' : json_sentiment,
                'score' : json_score,
                'timestamp': timestamp,
                'visits': decimal.Decimal(1)
            }
        )

    logging.info('Finished executing script')

    return UJSONResponse({'text': proc_tweets_list}, headers=response_header)
Ejemplo n.º 23
0
async def req_param(req):
    """Reqeuest param test
    """
    return UJSONResponse({'msg': f"Param = {req.path_params.get('id', -1)}"})
Ejemplo n.º 24
0
async def tree_artifacts(request):
    run_uuid = request.path_params["run_uuid"]
    filepath = request.query_params.get("path", "")
    ls = await list_files(subpath=run_uuid, filepath=filepath)
    return UJSONResponse(ls)
Ejemplo n.º 25
0
async def verify_magic(data: models.Magic = Body(...)):
    user = await security.authenticate_user_magic(data.email, data.secret)
    if not user:
        raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail="Invalid Link")
    response = UJSONResponse({"status": "authenticated"})
    return add_login_cookie(response, user)
Ejemplo n.º 26
0
 async def app(scope, receive, send):
     response = UJSONResponse({"hello": "world"})
     await response(scope, receive, send)
Ejemplo n.º 27
0
async def json_serialization():
    return UJSONResponse({'message': 'Hello, world!'})
Ejemplo n.º 28
0
 async def get(self, request):
     data = await get_patient_by_id(request)
     if not data:
         return UJSONResponse(status_code=404)
     return UJSONResponse(data)
Ejemplo n.º 29
0
 async def get(self, request: Request) -> UJSONResponse:
     return UJSONResponse({"path": "Members"})
Ejemplo n.º 30
0
async def users(request: Request) -> UJSONResponse:
    users = await User.all().values()
    users = [UserResponse(**user).as_dict() for user in users]
    return UJSONResponse({"users": users})