コード例 #1
0
ファイル: util.py プロジェクト: jgmel/hyperglass
def copyfiles(src_files: Iterable[Path], dst_files: Iterable[Path]):
    """Copy iterable of files from source to destination with threading."""
    queue = Queue()
    threads = ()
    src_files_len = len(src_files)
    dst_files_len = len(dst_files)

    if src_files_len != dst_files_len:
        raise ValueError(
            "The number of source files " +
            "({}) must match the number of destination files ({}).".format(
                src_files_len, dst_files_len))

    for i, file in enumerate(src_files):
        file_thread = FileCopy(src=file, dst=dst_files[i], queue=queue)
        threads += (file_thread, )

    for thread in threads:
        thread.start()

    for _file in src_files:
        copied = queue.get()
        log.success("Copied {}", str(copied))

    for thread in threads:
        thread.join()

    for i, file in enumerate(dst_files):
        if not file.exists():
            raise RuntimeError("{} was not copied to {}", str(src_files[i]),
                               str(file))

    return True
コード例 #2
0
def on_starting(server: Arbiter):
    """Gunicorn pre-start tasks."""

    setup_lib_logging(params.debug)

    python_version = platform.python_version()
    required = ".".join((str(v) for v in MIN_PYTHON_VERSION))
    log.info("Python {} detected ({} required)", python_version, required)

    async def runner():
        # Standard Library
        from asyncio import gather

        await gather(build_ui(), cache_config())

    check_redis_instance()
    aiorun(build_ui())
    cache_config()

    log.success(
        "Started hyperglass {v} on http://{h}:{p} with {w} workers",
        v=__version__,
        h=format_listen_address(params.listen_address),
        p=str(params.listen_port),
        w=server.app.cfg.settings["workers"].value,
    )
コード例 #3
0
ファイル: util.py プロジェクト: jgmel/hyperglass
async def build_frontend(  # noqa: C901
    dev_mode: bool,
    dev_url: str,
    prod_url: str,
    params: dict,
    app_path: Path,
    force: bool = False,
):
    """Perform full frontend UI build process.

    Securely creates temporary file, writes frontend configuration
    parameters to file as JSON. Then writes the name of the temporary
    file to /tmp/hyperglass.env.json as {"configFile": <file_name> }.

    Webpack reads /tmp/hyperglass.env.json, loads the temporary file,
    and sets its contents to Node environment variables during the build
    process.

    After the build is successful, the temporary file is automatically
    closed during garbage collection.

    Arguments:
        dev_mode {bool} -- Development Mode
        dev_url {str} -- Development Mode URL
        prod_url {str} -- Production Mode URL
        params {dict} -- Frontend Config paramters

    Raises:
        RuntimeError: Raised if errors occur during build process.

    Returns:
        {bool} -- True if successful
    """
    import hashlib
    import tempfile

    from aiofile import AIOFile
    import json
    from hyperglass.constants import __version__

    env_file = Path("/tmp/hyperglass.env.json")  # noqa: S108

    package_json = await read_package_json()

    env_vars = {
        "_HYPERGLASS_CONFIG_": params,
        "_HYPERGLASS_VERSION_": __version__,
        "_HYPERGLASS_PACKAGE_JSON_": package_json,
        "_HYPERGLASS_APP_PATH_": str(app_path),
    }

    # Set NextJS production/development mode and base URL based on
    # developer_mode setting.
    if dev_mode:
        env_vars.update({
            "NODE_ENV": "development",
            "_HYPERGLASS_URL_": dev_url
        })
    else:
        env_vars.update({
            "NODE_ENV": "production",
            "_HYPERGLASS_URL_": prod_url
        })

    # Check if hyperglass/ui/node_modules has been initialized. If not,
    # initialize it.
    initialized = await check_node_modules()
    if initialized:
        log.debug("node_modules is already initialized")
    elif not initialized:
        log.debug(
            "node_modules has not been initialized. Starting initialization..."
        )
        node_setup = await node_initial(dev_mode)
        if node_setup == "":
            log.debug("Re-initialized node_modules")

    try:
        env_json = json.dumps(env_vars, default=str)

        # Create SHA256 hash from all parameters passed to UI, use as
        # build identifier.
        build_id = hashlib.sha256(env_json.encode()).hexdigest()

        # Read hard-coded environment file from last build. If build ID
        # matches this build's ID, don't run a new build.
        if env_file.exists() and not force:
            async with AIOFile(env_file, "r") as ef:
                ef_json = await ef.read()
                ef_id = json.loads(ef_json).get("buildId", "empty")

                log.debug("Previous Build ID: {id}", id=ef_id)

                if ef_id == build_id:
                    log.debug(
                        "UI parameters unchanged since last build, skipping UI build..."
                    )
                    return True

        # Create temporary file. json file extension is added for easy
        # webpack JSON parsing.
        temp_file = tempfile.NamedTemporaryFile(mode="w+",
                                                prefix="hyperglass_",
                                                suffix=".json",
                                                delete=not dev_mode)
        log.info("Starting UI build...")
        log.debug(
            f"Created temporary UI config file: '{temp_file.name}' for build {build_id}"
        )

        async with AIOFile(temp_file.name, "w+") as temp:
            await temp.write(env_json)
            await temp.fsync()

            # Write "permanent" file (hard-coded named) for Node to read.
            async with AIOFile(env_file, "w+") as ef:
                await ef.write(
                    json.dumps({
                        "configFile": temp_file.name,
                        "buildId": build_id
                    }))
                await ef.fsync()

                # While temporary file is still open, initiate UI build process.
                if not dev_mode or force:
                    initialize_result = await node_initial(dev_mode)
                    build_result = await build_ui(app_path=app_path)

                    if initialize_result:
                        log.debug(initialize_result)
                    elif initialize_result == "":
                        log.debug("Re-initialized node_modules")

                    if build_result:
                        log.success("Completed UI build")
                elif dev_mode and not force:
                    log.debug(
                        "Running in developer mode, did not build new UI files"
                    )

        await migrate_images(app_path, params)

        generate_opengraph(
            Path(params["web"]["opengraph"]["image"]),
            1200,
            630,
            app_path / "static" / "images",
            params["web"]["theme"]["colors"]["black"],
        )

    except Exception as e:
        raise RuntimeError(str(e)) from None

    return True
コード例 #4
0
async def query(query_data: Query, request: Request,
                background_tasks: BackgroundTasks):
    """Ingest request data pass it to the backend application to perform the query."""

    timestamp = datetime.utcnow()
    background_tasks.add_task(send_webhook, query_data, request, timestamp)

    # Initialize cache
    cache = AsyncCache(db=params.cache.database, **REDIS_CONFIG)
    log.debug("Initialized cache {}", repr(cache))

    # Use hashed query_data string as key for for k/v cache store so
    # each command output value is unique.
    cache_key = query_data.digest()

    # Define cache entry expiry time
    cache_timeout = params.cache.timeout

    log.debug("Cache Timeout: {}", cache_timeout)
    log.info("Starting query execution for query {}", query_data.summary)

    cache_response = await cache.get_dict(cache_key, "output")

    json_output = False

    if query_data.device.structured_output and query_data.query_type in (
            "bgp_route",
            "bgp_community",
            "bgp_aspath",
    ):
        json_output = True

    cached = False
    runtime = 65535
    if cache_response:
        log.debug("Query {} exists in cache", cache_key)

        # If a cached response exists, reset the expiration time.
        await cache.expire(cache_key, seconds=cache_timeout)

        cached = True
        runtime = 0
        timestamp = await cache.get_dict(cache_key, "timestamp")

    elif not cache_response:
        log.debug("No existing cache entry for query {}", cache_key)
        log.debug("Created new cache key {} entry for query {}", cache_key,
                  query_data.summary)

        timestamp = query_data.timestamp

        starttime = time.time()

        if params.fake_output:
            # Return fake, static data for development purposes, if enabled.
            cache_output = await fake_output(json_output)
        else:
            # Pass request to execution module
            cache_output = await execute(query_data)

        endtime = time.time()
        elapsedtime = round(endtime - starttime, 4)
        log.debug("Query {} took {} seconds to run.", cache_key, elapsedtime)

        if cache_output is None:
            raise HyperglassError(message=params.messages.general,
                                  alert="danger")

        # Create a cache entry
        if json_output:
            raw_output = json.dumps(cache_output)
        else:
            raw_output = str(cache_output)
        await cache.set_dict(cache_key, "output", raw_output)
        await cache.set_dict(cache_key, "timestamp", timestamp)
        await cache.expire(cache_key, seconds=cache_timeout)

        log.debug("Added cache entry for query: {}", cache_key)

        runtime = int(round(elapsedtime, 0))

    # If it does, return the cached entry
    cache_response = await cache.get_dict(cache_key, "output")
    response_format = "text/plain"

    if json_output:
        response_format = "application/json"

    log.debug("Cache match for {}:\n{}", cache_key, cache_response)
    log.success("Completed query execution for query {}", query_data.summary)

    return {
        "output": cache_response,
        "id": cache_key,
        "cached": cached,
        "runtime": runtime,
        "timestamp": timestamp,
        "format": response_format,
        "random": query_data.random(),
        "level": "success",
        "keywords": [],
    }
コード例 #5
0
async def query(query_data: Query, request: Request,
                background_tasks: BackgroundTasks):
    """Ingest request data pass it to the backend application to perform the query."""

    timestamp = datetime.utcnow()
    background_tasks.add_task(send_webhook, query_data, request, timestamp)

    # Initialize cache
    cache = Cache(db=params.cache.database, **REDIS_CONFIG)
    log.debug("Initialized cache {}", repr(cache))

    # Use hashed query_data string as key for for k/v cache store so
    # each command output value is unique.
    cache_key = query_data.digest()

    # Define cache entry expiry time
    cache_timeout = params.cache.timeout

    log.debug(f"Cache Timeout: {cache_timeout}")
    log.info(f"Starting query execution for query {query_data.summary}")

    cache_response = await cache.get_dict(cache_key, "output")

    cached = False
    if cache_response:
        log.debug("Query {q} exists in cache", q=cache_key)

        # If a cached response exists, reset the expiration time.
        await cache.expire(cache_key, seconds=cache_timeout)

        cached = True
        runtime = 0
        timestamp = await cache.get_dict(cache_key, "timestamp")

    elif not cache_response:
        log.debug(f"No existing cache entry for query {cache_key}")
        log.debug(
            f"Created new cache key {cache_key} entry for query {query_data.summary}"
        )

        timestamp = query_data.timestamp
        # Pass request to execution module
        starttime = time.time()
        cache_output = await Execute(query_data).response()
        endtime = time.time()
        elapsedtime = round(endtime - starttime, 4)
        log.debug(f"Query {cache_key} took {elapsedtime} seconds to run.")

        if cache_output is None:
            raise HyperglassError(message=params.messages.general,
                                  alert="danger")

        # Create a cache entry
        if query_data.device.structured_output:
            raw_output = json.dumps(cache_output)
        else:
            raw_output = str(cache_output)
        await cache.set_dict(cache_key, "output", raw_output)
        await cache.set_dict(cache_key, "timestamp", timestamp)
        await cache.expire(cache_key, seconds=cache_timeout)

        log.debug(f"Added cache entry for query: {cache_key}")

        runtime = int(round(elapsedtime, 0))

    # If it does, return the cached entry
    cache_response = await cache.get_dict(cache_key, "output")

    if query_data.device.structured_output:
        response_format = "application/json"
        cache_response = json.loads(cache_response)
    else:
        response_format = "text/plain"

    log.debug(f"Cache match for {cache_key}:\n {cache_response}")
    log.success(f"Completed query execution for {query_data.summary}")

    return {
        "output": cache_response,
        "id": cache_key,
        "cached": cached,
        "runtime": runtime,
        "timestamp": timestamp,
        "format": response_format,
        "random": query_data.random(),
        "level": "success",
        "keywords": [],
    }