Exemple #1
0
def scrape_image(image_url: str, slug: str) -> Path:
    if isinstance(image_url, str):  # Handles String Types
        image_url = image_url

    if isinstance(image_url, list):  # Handles List Types
        image_url = image_url[0]

    if isinstance(image_url, dict):  # Handles Dictionary Types
        for key in image_url:
            if key == "url":
                image_url = image_url.get("url")

    filename = slug + "." + image_url.split(".")[-1]
    filename = app_dirs.IMG_DIR.joinpath(filename)

    try:
        r = requests.get(image_url, stream=True)
    except:
        logger.exception("Fatal Image Request Exception")
        return None

    if r.status_code == 200:
        r.raw.decode_content = True

        write_image(slug, r.raw, filename.suffix)

        filename.unlink()

        return slug

    return None
Exemple #2
0
async def fetch_from_url(url: str, job: str, instance: str, retries: int = 5) -> Optional[str]:
    """ Fetch content from specified URL
    URL will be retried up to 'retries' times

    Args:
        url: str URL to fetch
        job: Class name or other description of download type, used when
        logging
        instance: Specific software instance being downloaded, used when
        logging
        retries: int specifying the number of times to retry URL

    Returns:
        The supplied 'Collection' class instance
    """
    count = 0
    try:
        async for attempt in AsyncRetrying(stop=stop_after_delay(retries)):
            with attempt:
                count += 1
                if count > 1:
                    fastapi_logger.info('Fetching %s "%s" metadata (try %s)',
                                        job, instance, count)
                # Create HTTP session
                async with aiohttp.ClientSession() as session:
                    # Fetch latest JSON from Ansible Galaxy API
                    async with session.get(url) as response:
                        # Cache latest JSON
                        return await response.text()
    except RetryError:
        fastapi_logger.exception('Error fetching %s "%s" URL %s', job,
                                 instance, url)
    return None
Exemple #3
0
async def redirect_latest(request: Request, call_next):
    """Redirect all GET requests using latest version to actual version number.

    Redirect only POST requests to for query and download endpoints, as
    other POST endpoints will require to list version number explicitly.
    """

    if (request.method == "GET" and "latest" in request.url.path) or (
        request.method == "POST"
        and "latest" in request.url.path
        and ("query" in request.url.path or "download" in request.url.path)
    ):
        try:
            path_items = request.url.path.split("/")

            i = 0
            for i, item in enumerate(path_items):
                if item == "latest":
                    break
            if i == 0:

                raise BadRequestError("Invalid URI")
            path_items[i] = await get_latest_version(path_items[i - 1])
            url = "/".join(path_items)
            if request.query_params:
                url = f"{url}?{request.query_params}"
            return RedirectResponse(url=url)

        except BadRequestError as e:
            return ORJSONResponse(
                status_code=400, content={"status": "failed", "message": str(e)}
            )

        except RecordNotFoundError as e:
            return ORJSONResponse(
                status_code=404, content={"status": "failed", "message": str(e)}
            )

        except HTTPException as e:
            return http_error_handler(e)

        except Exception as e:
            logger.exception(str(e))
            return ORJSONResponse(
                status_code=500,
                content={
                    "status": "error",
                    "message": "Internal Server Error. Could not process request.",
                },
            )
    else:
        response = await call_next(request)
        return response
Exemple #4
0
def produce_upload_message(json_payload):
    if not KAFKA_PRODUCER:
        raise Exception("Kafka not available")
    logger.debug("to producer.send()")
    future = KAFKA_PRODUCER.send(KAFKA_TOPIC, json_payload)
    try:
        record_metadata = future.get(timeout=10)
        logger.info("send future completed")
        return record_metadata
    except KafkaError:
        logger.exception('Failed to send to kafka')
        raise
Exemple #5
0
 def push(body=Depends(get_body)):
     try:
         request = PushFeaturesRequest(**json.loads(body))
         df = pd.DataFrame(request.df)
         store.push(
             push_source_name=request.push_source_name,
             df=df,
             allow_registry_cache=request.allow_registry_cache,
         )
     except Exception as e:
         # Print the original exception on the server side
         logger.exception(traceback.format_exc())
         # Raise HTTPException to return the error message to the client
         raise HTTPException(status_code=500, detail=str(e))
Exemple #6
0
async def redirect_latest(request: Request, call_next):
    """Redirect all GET requests using latest version to actual version
    number."""

    try:
        if request.method == "GET" and "latest" in request.url.path:
            path_items = request.url.path.split("/")

            i = 0
            for i, item in enumerate(path_items):
                if item == "latest":
                    break
            if i == 0:

                raise BadRequestError("Invalid URI")
            path_items[i] = await get_latest_version(path_items[i - 1])
            url = "/".join(path_items)
            return RedirectResponse(url=f"{url}?{request.query_params}")
        else:
            response = await call_next(request)
            return response

    except BadRequestError as e:
        return ORJSONResponse(status_code=400,
                              content={
                                  "status": "failed",
                                  "data": str(e)
                              })

    except RecordNotFoundError as e:
        return ORJSONResponse(status_code=404,
                              content={
                                  "status": "failed",
                                  "data": str(e)
                              })

    except Exception as e:
        logger.exception(str(e))
        return ORJSONResponse(
            status_code=500,
            content={
                "status": "error",
                "data": "Internal Server Error. Could not process request.",
            },
        )
Exemple #7
0
async def predict_bitcoin_next_min(request: PredictionDataIn):

    try:
        event = sort_prediction_request(request.bitcoin_last_minute)
    except (IndexError, ValueError) as e:
        logger.exception(e)
        return HTTPException(400, detail='Invalid input payload')

    series_data = build_series_df(event)
    transformed_series_data = np.array([scaler.transform(series_data)])

    # The reshape param is simply the length of the input values
    prediction = model.predict(transformed_series_data)
    for_transform = [[0, prediction[0][0], 0, 0, 0, 0]]
    res = scaler.inverse_transform(for_transform)
    return {
        'bitcoin_prediction': round(float(res[0][1]), 2)
    }
Exemple #8
0
 def write_to_online_store(body=Depends(get_body)):
     warnings.warn(
         "write_to_online_store is deprecated. Please consider using /push instead",
         RuntimeWarning,
     )
     try:
         request = WriteToFeatureStoreRequest(**json.loads(body))
         df = pd.DataFrame(request.df)
         store.write_to_online_store(
             feature_view_name=request.feature_view_name,
             df=df,
             allow_registry_cache=request.allow_registry_cache,
         )
     except Exception as e:
         # Print the original exception on the server side
         logger.exception(traceback.format_exc())
         # Raise HTTPException to return the error message to the client
         raise HTTPException(status_code=500, detail=str(e))
Exemple #9
0
async def redeploy_tile_cache_service(asset_id: UUID) -> None:
    """Redeploy Tile cache service to make sure dynamic tile cache is
    recognized."""
    try:
        update_ecs_service(TILE_CACHE_CLUSTER, TILE_CACHE_SERVICE)
        ecs_change_log = ChangeLog(
            date_time=datetime.now(),
            status=ChangeLogStatus.success,
            message="Redeployed Tile Cache Service",
        )
    except ClientError as e:
        # Let's don't make this a blocker but make sure it gets logged in case something goes wrong
        logger.exception(str(e))
        ecs_change_log = ChangeLog(
            date_time=datetime.now(),
            status=ChangeLogStatus.failed,
            message="Failed to redeploy Tile Cache Service",
            detail=str(e),
        )
    await assets.update_asset(asset_id, change_log=[ecs_change_log.dict(by_alias=True)])
Exemple #10
0
async def _get_gfw_geostore(geostore_id: UUID) -> GeostoreCommon:
    """Get GFW Geostore geometry."""

    try:
        geostore: Geostore = await get_geostore_from_anywhere(geostore_id)
        geostore_common: GeostoreCommon = GeostoreCommon(
            geostore_id=geostore.gfw_geostore_id,
            geojson=geostore.gfw_geojson,
            area__ha=geostore.gfw_area__ha,
            bbox=geostore.gfw_bbox,
        )
    except (KeyError, RecordNotFoundError) as ex:
        logger.exception(ex)
        raise BadResponseError("Cannot fetch geostore geometry")

    if geostore.gfw_geojson is None:
        logger.error(f"Geometry for geostore_id {geostore_id} is None")
        raise BadResponseError("Cannot fetch geostore geometry")

    return geostore_common
Exemple #11
0
    async def get_online_features(request: Request):
        try:
            # Validate and parse the request data into GetOnlineFeaturesRequest Protobuf object
            body = await request.body()
            request_proto = GetOnlineFeaturesRequest()
            Parse(body, request_proto)

            # Initialize parameters for FeatureStore.get_online_features(...) call
            if request_proto.HasField("feature_service"):
                features = store.get_feature_service(
                    request_proto.feature_service)
            else:
                features = list(request_proto.features.val)

            full_feature_names = request_proto.full_feature_names

            batch_sizes = [len(v.val) for v in request_proto.entities.values()]
            num_entities = batch_sizes[0]
            if any(batch_size != num_entities for batch_size in batch_sizes):
                raise HTTPException(status_code=500,
                                    detail="Uneven number of columns")

            entity_rows = [{
                k: feast_value_type_to_python_type(v.val[idx])
                for k, v in request_proto.entities.items()
            } for idx in range(num_entities)]

            response_proto = store.get_online_features(
                features, entity_rows,
                full_feature_names=full_feature_names).proto

            # Convert the Protobuf object to JSON and return it
            return MessageToDict(  # type: ignore
                response_proto,
                preserving_proto_field_name=True,
                float_precision=18)
        except Exception as e:
            # Print the original exception on the server side
            logger.exception(e)
            # Raise HTTPException to return the error message to the client
            raise HTTPException(status_code=500, detail=str(e))
Exemple #12
0
    def get_online_features(body=Depends(get_body)):
        try:
            # Validate and parse the request data into GetOnlineFeaturesRequest Protobuf object
            request_proto = GetOnlineFeaturesRequest()
            Parse(body, request_proto)

            # Initialize parameters for FeatureStore.get_online_features(...) call
            if request_proto.HasField("feature_service"):
                features = store.get_feature_service(
                    request_proto.feature_service, allow_cache=True)
            else:
                features = list(request_proto.features.val)

            full_feature_names = request_proto.full_feature_names

            batch_sizes = [len(v.val) for v in request_proto.entities.values()]
            num_entities = batch_sizes[0]
            if any(batch_size != num_entities for batch_size in batch_sizes):
                raise HTTPException(status_code=500,
                                    detail="Uneven number of columns")

            response_proto = store._get_online_features(
                features=features,
                entity_values=request_proto.entities,
                full_feature_names=full_feature_names,
                native_entity_values=False,
            ).proto

            # Convert the Protobuf object to JSON and return it
            return MessageToDict(  # type: ignore
                response_proto,
                preserving_proto_field_name=True,
                float_precision=18)
        except Exception as e:
            # Print the original exception on the server side
            logger.exception(traceback.format_exc())
            # Raise HTTPException to return the error message to the client
            raise HTTPException(status_code=500, detail=str(e))
Exemple #13
0
JOB_LINE = (
    '{job_id},37,job,1,organization_{org_id},{created},'
    'template_name_{template_id},471,'
    'scheduled,19,localhost,"",f,{status},f,{started},{finished},5.873,"",1\n')
EVENT_LINE = ('{id},{created},'
              '374c9e9c-561c-4222-acd4-91189dd95b1d,"",verbose_{module_id},'
              'verbose_module_{module_id},{failed},'
              '{changed},"","","super_task_{module_id}",'
              '"",{job_id},{host_id},"host_name_{host_id}"\n')

try:
    KAFKA_PRODUCER = KafkaProducer(
        bootstrap_servers=['{0}:{1}'.format(KAFKA_HOST, KAFKA_PORT)],
        value_serializer=lambda m: json.dumps(m).encode('ascii'))
except Exception:
    logger.exception('Failed to connect to: %s:%s', KAFKA_HOST, KAFKA_PORT)


class TestDataGenerator:
    def _default_date_time(self, days_ago=0, seconds=0):
        date = datetime.datetime.now() - datetime.timedelta(days=days_ago)
        date = date.replace(hour=1,
                            minute=21,
                            second=seconds,
                            microsecond=840210)
        return date.astimezone().isoformat()

    def read_sample_data(self):
        return {
            filename: pkgutil.get_data('api.core.sample_data', filename)
            for filename in FILES
Exemple #14
0
async def issue(createToken: createToken, request: Request,
                background_tasks: BackgroundTasks):
    config = get_config()
    certificate_batch_handler, transaction_handler, connector = \
        ethereum_sc.instantiate_blockchain_handlers(config)

    # file that stores the ipfs hashes of the certificates in the batch
    if createToken.enableIPFS is True:
        try:
            ipfsHash = add_file_ipfs("./data/meta_certificates/.placeholder")
            generateKey = True
            ipnsHash, generatedKey = add_file_ipns(ipfsHash, generateKey)
            tokenURI = 'http://ipfs.io/ipns/' + ipnsHash['Name']
        except Exception as e:
            logger.info(e)
            raise HTTPException(status_code=400,
                                detail=f"Couldn't add file to IPFS")
    else:
        tokenURI = 'https://bloxberg.org'

    try:
        #pr = cProfile.Profile()
        #pr.enable()

        tx_id, token_id = await issue_batch_to_blockchain(
            config, certificate_batch_handler, transaction_handler,
            createToken.recipientPublickey, tokenURI,
            createToken.unSignedCerts)
        #pr.disable()
        #pr.print_stats(sort="tottime")
        #pr.dump_stats('profileAPI.pstat')
    except Exception as e:
        logger.exception(e)
        try:
            background_tasks.add_task(removeTempFiles,
                                      config.blockchain_certificates_dir,
                                      createToken.unSignedCerts,
                                      config.work_dir)
        except Exception as e:
            pass
        raise HTTPException(
            status_code=400,
            detail=f"Failed to issue certificate batch to the blockchain")

    # Retrieve file path of certified transaction
    blockchain_file_path = config.blockchain_certificates_dir
    json_data = []

    for fileID in certificate_batch_handler.certificates_to_issue:
        full_path_with_file = str(blockchain_file_path + '/' + fileID +
                                  '.json')
        if createToken.enableIPFS is True:
            ipfsHash = add_file_ipfs(full_path_with_file)

        with open(full_path_with_file) as f:
            d = json.load(f)
        # Save JSON Certificate to IPFS
        if createToken.enableIPFS is True:
            temp = ipfs_object["file_certifications"]
            y = {
                "id": fileID,
                "ipfsHash": 'http://ipfs.io/ipfs/' + ipfsHash,
                "crid": d["crid"]
            }
            temp.append(y)

        json_data.append(d)

    # write ipfs object into the ipfs batch file
    try:
        if createToken.enableIPFS is True:
            with open(ipfs_batch_file, 'w') as file:
                json.dump(ipfs_object, file)
            ipfs_batch_hash = add_file_ipfs(ipfs_batch_file)
            generateKey = False
            ipnsHash = add_file_ipns(ipfs_batch_hash,
                                     generateKey,
                                     newKey=generatedKey)
            print("Updated IPNS Hash")
            print(ipnsHash)
            # update_ipfs_link(token_id, 'http://ipfs.io/ipfs/' + ipfs_batch_hash)
    except:
        return "Updating IPNS link failed,"

    python_environment = os.getenv("app")
    # if python_environment == "production":
    background_tasks.add_task(removeTempFiles,
                              config.blockchain_certificates_dir,
                              createToken.unSignedCerts, config.work_dir)

    #     full_path_with_file = str(config.blockchain_certificates_dir + '/')
    #     for file_name in os.listdir(full_path_with_file):
    #         if file_name.endswith('.json'):
    #             print(full_path_with_file + file_name)
    #             os.remove(full_path_with_file + file_name)

    return json_data