Esempio n. 1
0
 async def unicorn_exception_handler(request: Request,
                                     exc: EntityNotFoundError):
     return JSONResponse(
         status_code=status.HTTP_404_NOT_FOUND,
         content={"message": exc.message},
     )
Esempio n. 2
0
def multiply(n: Numbers):
    task = multiply_numbers.delay(n.x, n.y)
    result = task.get()
    return JSONResponse(content={"status": "ok", "data": result})
Esempio n. 3
0
def show_stats() -> JSONResponse:
    return JSONResponse(content=stats.emitted)
Esempio n. 4
0
async def handle_news_not_found_error(request: Request, exc: NewsNotFoundError):
    return JSONResponse(
        status_code=status.HTTP_404_NOT_FOUND,
        content={"detail": str(exc)},
    )
Esempio n. 5
0
def validation_exception_handler(request, exc: RequestValidationError):
    e = str(exc)
    start, end = e.find("query ->") + len("query ->"), e.find("(type")
    msgErr = e[start:end].strip()
    return JSONResponse(status_code=400, content={"detial": msgErr})
Esempio n. 6
0
async def http_error_handler(_: Request, exc: HTTPException) -> JSONResponse:
    return JSONResponse({"errors": [exc.detail]}, status_code=exc.status_code)
async def query(request: PDResponse):
    """Add support to message.

    Add support edges to knowledge_graph and bindings to results.
    """
    in_message = request.dict()

    # save the logs for the response (if any)
    if 'logs' not in in_message or in_message['logs'] is None:
        in_message['logs'] = []

    # init the status code
    status_code: int = 200

    message = in_message['message']

    qgraph = message['query_graph']
    kgraph = message['knowledge_graph']
    answers = message['results']

    # get cache if possible
    try:
        cache = Cache(
            redis_host=CACHE_HOST,
            redis_port=CACHE_PORT,
            redis_db=CACHE_DB,
            redis_password=CACHE_PASSWORD,
        )
    except Exception as e:
        logger.exception(e)
        cache = None

    redis_batch_size = 100

    try:
        async with OmnicorpSupport() as supporter:
            # get all node supports

            keys = [
                f"{supporter.__class__.__name__}({node})"
                for node in kgraph['nodes']
            ]
            values = []
            for batch in batches(keys, redis_batch_size):
                values.extend(cache.mget(*batch))

            jobs = [
                count_node_pmids(supporter, node, key, value, cache,
                                 kgraph['nodes'])
                for node, value, key in zip(kgraph['nodes'], values, keys)
            ]

            # which qgraph nodes are sets?
            qgraph_setnodes = set([
                n for n in qgraph['nodes']
                if (('is_set' in qgraph['nodes'][n]) and (
                    qgraph['nodes'][n]['is_set']))
            ])

            # Generate a set of pairs of node curies
            pair_to_answer = defaultdict(set)  # a map of node pairs to answers
            for ans_idx, answer_map in enumerate(answers):

                # Get all nodes that are not part of sets and densely connect them
                # can be str (not a set) or list (could be a set or not a set)
                nonset_nodes = []
                setnodes = {}

                # node binding results is now a dict containing dicts that contain a list of dicts.
                for nb in answer_map['node_bindings']:
                    if nb in qgraph_setnodes:
                        setnodes[nb] = [
                            node['id']
                            for node in answer_map['node_bindings'][nb]
                        ]
                    else:
                        if len(answer_map['node_bindings'][nb]) != 0:
                            nonset_nodes.append(
                                answer_map['node_bindings'][nb][0]['id'])

                nonset_nodes = sorted(nonset_nodes)
                # nodes = sorted([nb['kg_id'] for nb in answer_map['node_bindings'] if isinstance(nb['kg_id'], str)])
                for node_pair in combinations(nonset_nodes, 2):
                    pair_to_answer[node_pair].add(ans_idx)

                # set_nodes_list_list = [nb['kg_id'] for nb in answer_map['node_bindings'] if isinstance(nb['kg_id'], list)]
                # set_nodes = [n for el in set_nodes_list_list for n in el]
                # For all nodes that are within sets, connect them to all nodes that are not in sets
                for qg_id, snodes in setnodes.items():
                    for snode in snodes:
                        for node in nonset_nodes:
                            node_pair = tuple(sorted((node, snode)))
                            pair_to_answer[node_pair].add(ans_idx)

                # now all nodes in set a to all nodes in set b
                for qga, qgb in combinations(setnodes.keys(), 2):
                    for anode in setnodes[qga]:
                        for bnode in setnodes[qgb]:
                            # node_pair = tuple(sorted(anode, bnode))
                            node_pair = tuple(sorted((anode, bnode)))
                            pair_to_answer[node_pair].add(ans_idx)

            # get all pair supports
            cached_prefixes = cache.get('OmnicorpPrefixes') if cache else None

            keys = [
                f"{supporter.__class__.__name__}_count({pair[0]},{pair[1]})"
                for pair in pair_to_answer
            ]
            values = []
            for batch in batches(keys, redis_batch_size):
                values.extend(cache.mget(*batch))

            jobs.extend([
                count_shared_pmids(
                    supporter,
                    support_idx,
                    pair,
                    key,
                    value,
                    cache,
                    cached_prefixes,
                    kgraph,
                    pair_to_answer,
                    answers,
                ) for support_idx, (
                    pair, value,
                    key) in enumerate(zip(pair_to_answer, values, keys))
            ])
            await asyncio.gather(*jobs)

        # load the new results into the response
        message['knowledge_graph'] = kgraph
        message['results'] = answers

    except Exception as e:
        # put the error in the response
        status_code = 500

        # save any log entries
        in_message['logs'].append(
            create_log_entry(f'Exception: {str(e)}', 'ERROR'))

    # validate the response again after normalization
    in_message = jsonable_encoder(PDResponse(**in_message))

    # return the result to the caller
    return JSONResponse(content=in_message, status_code=status_code)
Esempio n. 8
0
async def main():
    data = {"message": "pizza-order-api"}
    return JSONResponse(status_code=status.HTTP_200_OK, content=data)
Esempio n. 9
0
def options_item(item_id: str):
    return JSONResponse(None, headers={"x-fastapi-item-id": item_id})
Esempio n. 10
0
async def http_exception_handler(request, exc):
    '''Override the StarletteHTTPException exception'''
    return JSONResponse(status_code=exc.status_code, content=exc.detail)
Esempio n. 11
0
def handle_auth_error(request: Request, ex: AuthError):
    return JSONResponse(status_code=ex.status_code, content=ex.error)
Esempio n. 12
0
async def value_error_handler(request: Request, exc: ValueError):
    return JSONResponse(
        status_code=404,
        content={"message": str(exc)},
    )
Esempio n. 13
0
def delete_temparary_file(request: Request, file_id: str):
    """ Remove temporary dataset file and confirmation message """
    deleted_file_id = delete_tmpfile(file_id)
    if not deleted_file_id:
        raise HTTPException(status_code=404, detail="File not found")
    return JSONResponse(content={"message": "submission cancelled"})
Esempio n. 14
0
async def monitoring_data(start: datetime,
                          match: List[str] = Query([], min_length=1),
                          end: datetime = None,
                          step: str = None):
    # /monitoringData?match[]=up&match[]=up&
    #            start=2015-07-01T20:10:30.781Z&
    #            end=2015-07-01T20:11:00.781Z&
    #            step=1m

    if len(match) < 1:
        return JSONResponse(status_code=404,
                            content={
                                "status": "Error",
                                "message": "Match required."
                            })
    if start != None and end != None and start >= end:
        return JSONResponse(status_code=404,
                            content={
                                "status":
                                "Error",
                                "message":
                                "Start is greater or equals than the end."
                            })
    if end != None and step is None:
        return JSONResponse(status_code=404,
                            content={
                                "status": "Error",
                                "message": "Interval datetime needs step."
                            })
    if step != None:
        if step[:-1].isdigit() and step[-1] in ['s', 'm', 'h', 'M', 'y']:
            step_time = int(step[:-1])
            step_unit = step[-1]
        else:
            return JSONResponse(
                status_code=404,
                content={
                    "status":
                    "Error",
                    "message":
                    "Step is a integer followed by format ['s', 'm', 'h', 'M', 'y']."
                })
    else:
        step_time = None
        step_unit = None

    # Random response
    if generate_response() == '500':
        return JSONResponse(status_code=500,
                            content={
                                "status": "Error",
                                "message": "Faild to connect to OSM."
                            })

    dates = get_interval_datetimes(start, end, step_time, step_unit)

    response = {
        "status": "success",
        "data": {
            "resultType": "matrix",
            "result": []
        }
    }
    for metric in match:
        json_metric = {
            "metric": {
                "__name__": metric,
                "job": "prometheus",
                "instance": "http://5gzorro_osm.com"
            },
            "values": []
        }
        for date in dates:
            json_metric['values'].append(
                [datetime.timestamp(date),
                 round(random.uniform(0, 1), 2)])
        response['data']['result'].append(json_metric)

    return response
Esempio n. 15
0
async def search_json(request: Request):
    dct = await _search_json(request)
    return JSONResponse(dct)
Esempio n. 16
0
def trace_item(item_id: str):
    return JSONResponse(None, media_type="message/http")
Esempio n. 17
0
def predict(payload: Payload):
    result = model_obj.predict(str(payload.msg))
    return JSONResponse(content={'result': result})
Esempio n. 18
0
def authjwt_exception_handler(request: Request, exc: AuthJWTException):
    return JSONResponse(status_code=exc.status_code,
                        content={"detail": exc.message})
Esempio n. 19
0
async def validation_exception_handler(request: Request, exc: RequestValidationError):
    logger.error(f"{exc.errors()} -- {exc.body}")
    return JSONResponse(
        status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
        content=jsonable_encoder({"detail": exc.errors(), "body": exc.body}),
    )
Esempio n. 20
0
def logout(token: str = Depends(get_current_user_token)):
    if add_blacklist_token(token):
        return JSONResponse({'result': True})
    raise CREDENTIALS_EXCEPTION
Esempio n. 21
0
 def error(reason, status_code):
     return JSONResponse(content={'reason': reason},
                         status_code=status_code)
Esempio n. 22
0
async def create_movie(movie: Movie):
    if movie.duration is not None:
        post_duration(movie.duration)
    content = dict(movie)
    content['id'] = random.randint(1, 100)
    return JSONResponse(content=content, headers=HEADERS, status_code=201)
Esempio n. 23
0
async def handle_validation_error(request: Request, exc: ValidationError):
    return JSONResponse(
        status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
        content={"detail": exc.errors()},
    )
Esempio n. 24
0
async def create_user(user: UserModel = Body(...)):
    user = jsonable_encoder(user)
    new_user = await db["students"].insert_one(user)
    created_user = await db["students"].find_one({"_id": new_user.inserted_id})
    return JSONResponse(status_code=status.HTTP_201_CREATED,
                        content=created_user)
Esempio n. 25
0
async def inference(inference: Inference, status_code=HTTP_200_OK):

    #####################################################################
    ############## Get data #############################################
    #####################################################################

    # Check inference services from db
    # Image classification / object detection / image segmentation

    #####################################################################

    try:
        service = db.services.find_one(
            {"service_id": f"{inference.service_id}"})
        service_type = service["service_type"]
    except Exception as e:
        return "lol"

    # print(service_type)

    try:
        model = db.models.find_one({"model_id": f"{inference.model_id}"})
        model_path = model["model_path"]
    except Exception as e:
        print("An exception occurred ::", e)
    # print(model_path)

    #####################################################################
    ############## Inference ############################################
    #####################################################################

    if service_type == "inference":
        # print(model_path)
        learn = load_learner(f"{model_path}")

        # print(inference.data[0])
        img_path = inference.data[0]
        img = cv2.imread(img_path)

        out = learn.predict(img)
        # print(out)
        # print(max(out[2]).cpu().detach().numpy())

        classified_as = out[0]
        inference_probability = max(out[2].cpu().detach().numpy())

        #####################################################################
        ############## Store inference output ###############################
        #####################################################################

        output_response = OutputResponse(
            node_ip="192.1.1.1",
            service_id="1234",
            image_path=f"{img_path}",
            classified_as=f"{classified_as}",
            probability=f"{inference_probability}")

        db.output.insert_one(output_response.dict())

        json_test = jsonable_encoder(output_response)
        # print("JSON TEST : ", json_test)
    return JSONResponse(content=json_test)
Esempio n. 26
0
def root():
    msg = {'message': 'Hello friends!'}
    return JSONResponse(content=msg, status_code=200)
Esempio n. 27
0
def show_configuration() -> JSONResponse:
    safe_response = jsonable_encoder(config.show())
    return JSONResponse(content=safe_response)
Esempio n. 28
0
async def index():
    return JSONResponse("./data/inventories_init_JSON.json")
Esempio n. 29
0
async def apply_descriptor(request: Request):
    """
    Applies Mordred Descriptor on Dataset and creates a new Dataset
    """
    # Error check between the key at features and values
    for i in range(1, len(request.dataset.dataEntry) + 1):
        for key in request.dataset.dataEntry[i - 1].values:
            if key != request.dataset.features[0].key:
                return JSONResponse(status_code=422,
                                    content={422: "Mismatching Keys"})

    # Get all smiles from request
    smiles = []
    features_key = request.dataset.features[0].key
    for entry in request.dataset.dataEntry:
        smiles.append(entry.values[features_key])

    # Calculate all given smiles
    calculations = []
    for s in smiles:
        calculations.append(str(calculate_single(s)))

    # Strip the mordred generated string from unwanted brackets
    calculations = [item.replace('Result({', '') for item in calculations]
    calculations = [item.replace('})', '') for item in calculations]

    data_entry_out = [
        DataEntryOut(entryId={
            "name": s,
            "ownerUUID": None,
            "type": None,
            "URI": None
        },
                     values=format_calculations(calculations[smiles.index(s)]))
        for s in smiles
    ]

    descriptor_names_list = get_descriptors()

    conditions_mordred = {
        "Implementation Vendor":
        "Molecular Descriptor Calculator",
        "Implementation Identifier":
        "1.2.1a1",
        "Implementation Title":
        "https://jcheminf.biomedcentral.com/articles/10.1186/s13321-018-0258-y",
        "Specification Reference":
        "http://mordred-descriptor.github.io/documentation/master/"
    }
    features_out = [
        FeaturesOut(name=d +
                    " descriptor of feature with name smiles and URI " +
                    request.dataset.features[0].uri,
                    conditions=conditions_mordred,
                    category="CDK",
                    uri=d) for d in descriptor_names_list
    ]

    response_object = {
        "responseDataset": {
            "dataEntry": data_entry_out,
            "features": features_out,
            "descriptors": ["CDK"]
        }
    }

    return response_object
Esempio n. 30
0
 def error_handler_400(request: Request, err: HTTPException):
     return JSONResponse(status_code=400,
                         content={
                             'message': f'Bad request - {err}',
                             'status': 400
                         })