Exemplo n.º 1
0
def create_table():
    try:
        if not db.dialect.has_table(db, "predictions"):
            prediction_table.create()
            logger.info("Prediction table created")
        else:
            logger.info("Prediction table already exists")
            pass
    except Exception as e:
        logger.error(f"Error when trying to create/connect to the db {e}")
Exemplo n.º 2
0
 def create_if_not_exist(session, name: str = None):
     test_slug = slugify(name)
     result = session.query(Category).filter(
         Category.slug == test_slug).one_or_none()
     if result:
         logger.info("Category exists, associating recipe")
         return result
     else:
         logger.info("Category doesn't exists, creating tag")
         return Category(name=name)
Exemplo n.º 3
0
 def bind(self):
     try:
         e = CURRENT_ENGINE.get()
         bind = e.result()
         logger.info(f"Set bind to {bind.repr(color=True)}")
         return bind
     except LookupError:
         # not in a request
         logger.info("Not in a request, using default bind")
         return self._bind
Exemplo n.º 4
0
async def initial_deal():
    try:
        query = sa.select([deals]).order_by(deals.c.created_at.desc()).limit(1)
        rlt = await database.execute(query)
        if not rlt:
            query = deals.insert().values(uuid=uuid.uuid4(),
                                          balance=0.0).returning(deals.c.uuid)
            rlt = await database.execute(query)
            logger.info(f"Initial deal created {rlt}")
    except Exception as e:
        logger.warning(f"initial_deal: Exception: {e}")
Exemplo n.º 5
0
async def websocket_endpoint(websocket: WebSocket):
    await websocket.accept()
    while True:
        data = await websocket.receive_text()
        chat_message = base_models.ChatMessage(uid_src=1, uid_dest=2, message=data)
        is_profanity, msg_dict = await api.chat_api.send_chat(chat_message)
        if not is_profanity:
            await websocket.send_text(f"Message text was: {data}")
        else:
            logger.info('### Profanity detected')
            print('### Profanity detected')
Exemplo n.º 6
0
def print_response(response):
    if not CONFIG.print_log:
        return
    if response.history:
        logger.info(' Request was redirected')
        for resp in response.history:
            logger.info(f'  status_code = {resp.status_code}, url= {resp.url}')
        logger.info(' Final destination:')
    else:
        logger.info(' Request was not redirected')
    logger.info(f'  status_code = {response.status_code}, url= {response.url}')
Exemplo n.º 7
0
def default_user_init(session: Session):
    default_user = {
        "full_name": "Change Me",
        "email": "*****@*****.**",
        "password": get_password_hash(settings.DEFAULT_PASSWORD),
        "group": settings.DEFAULT_GROUP,
        "admin": True,
    }

    logger.info("Generating Default User")
    db.users.create(session, default_user)
Exemplo n.º 8
0
def produce_upload_message(json_payload):
    if not KAFKA_PRODUCER:
        raise Exception("Kafka not available")
    logger.debug("to producer.send()")
    future = KAFKA_PRODUCER.send(KAFKA_TOPIC, json_payload)
    try:
        record_metadata = future.get(timeout=10)
        logger.info("send future completed")
        return record_metadata
    except KafkaError:
        logger.exception('Failed to send to kafka')
        raise
Exemplo n.º 9
0
def load_model():
    model_path = PATH_TO_MODEL
    desc_path = PATH_TO_DESCRIPTION
    global model
    model = None
    logger.info('Start to load model')
    time.sleep(20)
    with open(model_path, 'rb') as file:
        model = pickle.load(file)
    global desc
    logger.info('Model has been loaded!')
    desc = classes.read_attempt_params(desc_path)
Exemplo n.º 10
0
async def get_items(q: str = Query(...,
                                   min_length=5,
                                   max_length=50,
                                   regex="^fixedquery$")):
    try:
        results = [{"item_id": "Foo"}, {"item_id": "Bar"}]
        if q:
            logger.info(type(q))
            results.append({"q": q})
        return results
    except AttributeError as e:
        logger.warning(msg=e)
Exemplo n.º 11
0
async def translate(text: str = Form(...),
                    source_language: str = Form(...),
                    target_language: str = Form(...)):
    data = {"source": source_language, "target": target_language, "q": text}
    req = requests.get("http://taln.upf.edu/mmt-es_en/translate", params=data)

    json_response = req.json()
    text = json_response['data']['translation']

    response = {"text": text}
    logger.info(response)
    return response
Exemplo n.º 12
0
Arquivo: main.py Projeto: josix/IOU
async def callback(request: Request):
    # get X-Line-Signature header value
    signature: str = request.headers["X-Line-Signature"]
    # get request body as text
    body: bytes = await request.body()
    decode_body: str = body.decode()
    logger.info("Request body: {}".format(decode_body))
    # handle webhook body
    try:
        handler.handle(decode_body, signature)
    except InvalidSignatureError:
        HTTPException(status_code=400, detail="Invalid Signature")
    return "OK"
Exemplo n.º 13
0
    async def _bulk_start_all(self):
        await self.db.ready.wait()
        await self.app_ready.wait()

        webhooks = await self.db.pool.fetch(
            f"SELECT {self.target}, webhook_url FROM {self.table}")

        for webhook in webhooks:
            self._loop.create_task(
                self._start_webhook(target=webhook[self.target],
                                    webhook_url=webhook["webhook_url"]))

        logger.info(f"started {len(webhooks)} {self.provider} hooks")
Exemplo n.º 14
0
def create_bundle(config: BundleConfig, process: bool = True):
    """Create a bundle and return an ID for later reference."""
    config.bundle_uuid = str(uuid.uuid4()).replace('-', '')
    TestDataGenerator().generate_bundle(config)
    if process:
        notify_upload(
            HOST_URL,
            config.account_id,
            config.tenant_id,
            config.bundle_uuid)
    else:
        logger.info("Process=False, not sending message")
    return config
Exemplo n.º 15
0
def convert_float_to_int(
    stats: Optional[Dict[str, Any]],
    source_asset_co: RasterTileSetSourceCreationOptions,
) -> Tuple[RasterTileSetSourceCreationOptions, str]:

    stats = generate_stats(stats)

    logger.info("In convert_float_to_int()")

    assert len(stats.bands) == 1
    stats_min = stats.bands[0].min
    stats_max = stats.bands[0].max
    value_range = math.fabs(stats_max - stats_min)

    logger.info(
        f"stats_min: {stats_min} stats_max: {stats_max} value_range: {value_range}"
    )

    # Shift by 1 (and add 1 later) so any values of zero don't get counted as no_data
    uint16_max = np.iinfo(np.uint16).max - 1
    # Expand or squeeze to fit into a uint16
    mult_factor = (uint16_max / value_range) if value_range else 1

    logger.info(f"Multiplicative factor: {mult_factor}")

    if isinstance(source_asset_co.no_data, list):
        raise RuntimeError("Cannot apply colormap on multi band image")
    elif source_asset_co.no_data is None:
        old_no_data: str = "None"
    elif source_asset_co.no_data == str(np.nan):
        old_no_data = "np.nan"
    else:
        old_no_data = str(source_asset_co.no_data)

    calc_str = (f"(A != {old_no_data}).astype(bool) * "
                f"(1 + (A - {stats_min}) * {mult_factor}).astype(np.uint16)")

    logger.info(f"Resulting calc string: {calc_str}")

    source_asset_co.data_type = DataType.uint16
    source_asset_co.no_data = 0

    if source_asset_co.symbology and source_asset_co.symbology.colormap is not None:
        source_asset_co.symbology.colormap = {
            (1 + (float(k) - stats_min) * mult_factor): v
            for k, v in source_asset_co.symbology.colormap.items()
        }
        logger.info(
            f"Resulting colormap: {source_asset_co.symbology.colormap}")

    return source_asset_co, calc_str
Exemplo n.º 16
0
def health(background_task: BackgroundTasks, response: Response) -> bool:
    global counter
    if model is None:
        response.status_code = status.HTTP_503_SERVICE_UNAVAILABLE
        return False
    elif counter < MAX_RESPONSE:
        counter += 1
        msg = f'You just use {counter} of {MAX_RESPONSE} requests'
        logger.info(msg)
        return True
    else:
        logger.error('Service unavailable')
        response.status_code = status.HTTP_404_NOT_FOUND
        return False
Exemplo n.º 17
0
    async def process_input(self, input):
        loop = asyncio.get_running_loop()
        our_task = {"done_event": asyncio.Event(loop=loop),
                    "input": input,
                    "time": loop.time()}
        async with self.queue_lock:
            if len(self.queue) >= self.max_queue_size:
                raise HandlingError("I'm too busy", code=503)
            self.queue.append(our_task)
            logger.info("enqueued task. new queue size {}".format(len(self.queue)))
            self.schedule_processing_if_needed(loop)

        await our_task["done_event"].wait()
        return our_task["output"]
Exemplo n.º 18
0
def upload_blob(img, ext: str, mime: str):
    """Given an img array and extension, uploads it to GStorage."""
    if "." in ext:
        ext = ext[1:]
    filename = str(uuid.uuid4()) + "." + ext
    fastapi_logger.info(f"Uploading to Storage: {filename}")

    blob = bucket.blob(filename)
    with tempfile.NamedTemporaryFile(suffix=ext) as temp:
        temp_filename = temp.name + "." + ext
        cv2.imwrite(temp_filename, img)
        blob.upload_from_filename(temp_filename, content_type=mime)
    blob.make_public()
    return blob.public_url
Exemplo n.º 19
0
async def send_mail(message: Union[Dict, EmailSchema, EmailAttachFileSchema]):
    message = message if isinstance(message, Dict) else message.dict()
    email = generate_email(**message)
    succeed = False
    try:
        sg = get_sg_client()
        response = sg.send(email)
        logger.info('send mail status_code: ' + str(response.status_code))
        succeed = response.status_code >= 200
        # logger.info('response body: ' + str(response.body))
        # logger.info('response header' + str(response.headers))
    except Exception as e:
        logger.warning(f'Error: {str(e)}')
    return succeed
Exemplo n.º 20
0
 async def model_runner(self):
     loop = asyncio.get_running_loop()
     self.queue_lock = asyncio.Lock(loop=loop)
     self.needs_processing = asyncio.Event(loop=loop)
     logger.info("started model runner")
     while True:
         logger.info('Waiting for needs_processing')
         await self.needs_processing.wait()
         self.needs_processing.clear()
         if self.needs_processing_timer is not None:
             self.needs_processing_timer.cancel()
             self.needs_processing_timer = None
         logger.info('Locking queue_lock')
         async with self.queue_lock:
             if self.queue:
                 longest_wait = loop.time() - self.queue[0]["time"]
             else:  # oops
                 longest_wait = None
             logger.info(
                 "launching processing. queue size: {}. longest wait: {}".format(len(self.queue), longest_wait))
             to_process = self.queue[:self.max_batch_size]
             del self.queue[:len(to_process)]
             self.schedule_processing_if_needed(loop)
         # so here we copy, it would be neater to avoid this
         batch = [t["input"] for t in to_process]
         result = await loop.run_in_executor(
             None, functools.partial(self.run_model, batch)
         )
         for t, r in zip(to_process, result):
             t["output"] = r
             t["done_event"].set()
         del to_process
Exemplo n.º 21
0
async def is_service_account(token: str = Depends(oauth2_scheme)) -> bool:
    """Calls GFW API to authorize user.

    User must be service account with email [email protected]
    """

    response = who_am_i(token)

    if response.status_code == 401 or not (
            response.json()["email"] == "*****@*****.**"
            and "gfw" in response.json()["extraUserData"]["apps"]):
        logger.info("Unauthorized user")
        raise HTTPException(status_code=401, detail="Unauthorized")
    else:
        return True
Exemplo n.º 22
0
async def is_admin(token: str = Depends(oauth2_scheme)) -> bool:
    """Calls GFW API to authorize user.

    User must be ADMIN for gfw app
    """

    response = who_am_i(token)

    if response.status_code == 401 or not (
            response.json()["role"] == "ADMIN"
            and "gfw" in response.json()["extraUserData"]["apps"]):
        logger.info("Unauthorized user")
        raise HTTPException(status_code=401, detail="Unauthorized")
    else:
        return True
Exemplo n.º 23
0
async def get_user(token: str = Depends(oauth2_scheme)) -> Tuple[str, str]:
    """Calls GFW API to authorize user.

    This functions check is user of any level is associated with the GFW
    app and returns the user ID
    """

    response: Response = await who_am_i(token)

    if response.status_code == 401 or not (
        "gfw" in response.json()["extraUserData"]["apps"]
    ):
        logger.info("Unauthorized user")
        raise HTTPException(status_code=401, detail="Unauthorized")
    else:
        return response.json()["id"], response.json()["role"]
Exemplo n.º 24
0
async def _query_raster_lambda(
    geometry: Geometry,
    sql: str,
    grid: Grid = Grid.ten_by_forty_thousand,
    format: QueryFormat = QueryFormat.json,
    delimiter: Delimiters = Delimiters.comma,
) -> Dict[str, Any]:
    data_environment = await _get_data_environment(grid)
    payload = {
        "geometry": jsonable_encoder(geometry),
        "query": sql,
        "environment": data_environment.dict()["layers"],
        "format": format,
    }

    logger.info(
        f"Submitting raster analysis lambda request with payload: {payload}")

    try:
        response = await invoke_lambda(RASTER_ANALYSIS_LAMBDA_NAME, payload)
    except httpx.TimeoutException:
        raise HTTPException(500, "Query took too long to process.")

    # invalid response codes are reserved by Lambda specific issues (e.g. too many requests)
    if response.status_code >= 300:
        raise HTTPException(
            500,
            f"Raster analysis geoprocessor returned invalid response code {response.status_code}",
        )

    # response must be in JSEND format or something unexpected happened
    response_body = response.json()
    if "status" not in response_body or ("data" not in response_body
                                         and "message" not in response_body):
        raise HTTPException(
            500,
            f"Raster analysis lambda received an unexpected response: {response.text}",
        )

    if response_body["status"] == "failed":
        # validation error
        raise HTTPException(422, response_body["message"])
    elif response_body["status"] == "error":
        # geoprocessing error
        raise HTTPException(500, response_body["message"])

    return response_body
Exemplo n.º 25
0
    async def update(self):
        """ Fetch and cache latest data from Galaxy

        Returns:
            str of json data from Galaxy
        """
        fastapi_logger.info('Fetching %s "%s" metadata',
                            self.__class__.__name__, self.name)
        # Ensure no two lookups occur at the same time
        async with asyncio.Lock():
            text = await fetch_from_url(self.url(), self.__class__.__name__,
                                        self.name)
            if text is None:
                return None
            jdata = json.loads(text)
        self.last_update = datetime.now()
        return jdata
Exemplo n.º 26
0
async def api_logger(request: Request, response=None, error=None):
    time_format = "%Y/%m/%d %H:%M:%S"
    t = time() - request.state.start
    status_code = error.status_code if error else response.status_code
    error_log = None
    user = request.state.user

    if error:
        if request.state.inspect:
            frame = request.state.inspect
            error_file = frame.f_code.co_filename
            error_func = frame.f_code.co_name
            error_line = frame.f_lineno
        else:
            error_func = error_file = error_line = "UNKNOWN"

        error_log = dict(
            errorFunc=error_func,
            location="{} line in {}".format(str(error_line), error_file),
            raised=str(error.__class__.__name__),
            msg=str(error.ex),
        )

    email = user.email.split("@") if user and user.email else None
    user_log = dict(
        client=request.state.ip,
        user=user.id if user and user.id else None,
        email="**" + email[0][2:-1] + "*@" +
        email[1] if user and user.email else None,
    )

    log_dict = dict(
        url=request.url.hostname + request.url.path,
        method=str(request.method),
        statusCode=status_code,
        errorDetail=error_log,
        client=user_log,
        processedTime=str(round(t * 1000, 5)) + "ms",
        datetimeUTC=datetime.utcnow().strftime(time_format),
        datetimeKST=(datetime.utcnow() +
                     timedelta(hours=9)).strftime(time_format),
    )
    if error and error.status_code >= 500:
        logger.error(json.dumps(log_dict))
    else:
        logger.info(json.dumps(log_dict))
Exemplo n.º 27
0
def log_stats(request, data=None, user=None):
    """Log detailed data in JSON for incoming/outgoing API request."""
    client_host = request.client.host
    msg = {}
    # TODO: Log user once authentication is connected. msg["user"] = str(user)
    # request.state.time_started = time.time()
    msg["time_started"] = str(time.time())
    msg["method"] = str(request.method)
    msg["url"] = str(request.url)
    msg["host"] = str(client_host)
    msg["query_params"] = str(request.query_params)
    msg["path_params"] = str(request.path_params)
    msg["headers"] = dict(request.headers)
    msg["data"] = str(data)
    fastapi_logger.info(json.dumps(msg))
    es.index(index=idx_log, body=json.dumps(msg))
    return
Exemplo n.º 28
0
    async def InitLoop(self, loop: asyncio.AbstractEventLoop):

        while True:
            fastapi_logger.debug("dsadas")
            fastapi_logger.info("dsadas")
            try:
                self.Connection = await aio_pika.robust_connection.connect_robust(
                    "amqp://*****:*****@" + RABBITMQ_HOST,
                    # loop=loop
                )
                logging.info("Connected to rmq")

                break
            except Exception as err:
                logging.info("Failed to establish connection with RMQ")
                logging.info("Retring in 2 sec")
                self.Connection = None
                await asyncio.sleep(2)
Exemplo n.º 29
0
def create_blog_site(*, new_site: CreateBlogSite, current_user: User = Depends(get_current_user)):
    title = new_site.title
    site_name = new_site.site_name
    theme = new_site.theme
    blog_site = BlogSite.select().where(BlogSite.user_id == current_user.uuid)
    if blog_site:
        return fail_response('您的博客站点已存在,不能重复创建')
    try:
        BlogSite.create(title=title,
                        site_name=site_name,
                        theme=theme,
                        user_id=current_user.uuid)
        logger.info(f'创建博客站点name={site_name}站点成功')
        return success_response('添加成功')
    except Exception as e:
        db.rollback()
        logger.info(f'创建博客站点name={site_name}站点成功,失败原因:{e}')
        return fail_response('创建博客站点失败')
Exemplo n.º 30
0
def init_db(db: Session) -> None:
    from app import crud

    try:
        Base.metadata.create_all(bind=db.bind)
        if crud.user.get_by_nickname(db, nickname=settings.ADMIN_NAME) is None:
            logger.info("Creating new admin user, "
                        f"as no existing with {settings.ADMIN_NAME} was found")
            crud.user.create(
                db,
                obj_in=schemas.UserCreate(
                    nickname=settings.ADMIN_NAME,
                    password=settings.ADMIN_PASSWORD,
                ),
            )
    except SQLAlchemyError:
        db.rollback()
        raise