示例#1
0
    def _endpoints_from_watchman(
            self, endpoint: str) -> typing.List[EndpointMetadata]:
        """
        Get a list of endpoints by querying Watchman
        """
        resp = requests.get(endpoint)
        if not resp.ok:
            raise IOError(f"Failed to get endpoints: {resp.content}")

        return [
            EndpointMetadata(
                target_name=data["endpoint-metadata"]["metadata"]["name"],
                healthy=data["healthy"],
                endpoint=f'{self.base_url}{data["endpoint"].rstrip("/")}',
                tag_list=normalize_sensor_tags(
                    data["endpoint-metadata"]["metadata"]["dataset"]
                    ["tag_list"]),
                target_tag_list=normalize_sensor_tags(
                    data["endpoint-metadata"]["metadata"]["dataset"]
                    ["target_tag_list"]),
                resolution=data["endpoint-metadata"]["metadata"]["dataset"]
                ["resolution"],
                model_offset=data["endpoint-metadata"]["metadata"]
                ["model"].get("model-offset", 0),
            ) if data["healthy"] else EndpointMetadata(
                target_name=None,
                healthy=data["healthy"],
                endpoint=f'{self.base_url}{data["endpoint"].rstrip("/")}',
                tag_list=None,
                target_tag_list=None,
                resolution=None,
                model_offset=None,
            ) for data in resp.json()["endpoints"]
        ]
def _endpoint_metadata(name: str, healthy: bool) -> EndpointMetadata:
    """
    Helper to build a basic EndpointMetadata with only name and healthy fields set
    """
    return EndpointMetadata(
        target_name=name, healthy=healthy, endpoint=None, tag_list=None, resolution=None
    )
async def test_influx_forwarder(influxdb):
    """
    Test that the forwarder creates correct points from a
    multi-indexed series
    """
    endpoint = EndpointMetadata(
        "some-target-name",
        healthy=True,
        endpoint="/some-endpoint",
        tag_list=tu.SENSORTAG_LIST,
        target_tag_list=tu.SENSORTAG_LIST,
        resolution="10T",
        model_offset=0,
    )

    # Feature outs which match length of tags
    # These should then be re-mapped to the sensor tag names
    keys = [("name1", i) for i, _ in enumerate(tu.SENSORTAG_LIST)]

    # Feature outs which don't match the length of the tags
    # These will be kept at 0..N as field names
    keys.extend([("name2", i) for i in range(len(tu.SENSORTAG_LIST) * 2)])

    # Assign all keys unique numbers
    columns = pd.MultiIndex.from_tuples(keys)
    index = pd.date_range("2019-01-01", "2019-01-02", periods=4)
    df = pd.DataFrame(columns=columns, index=index)

    # Generate some unique values for each key, and insert it into that column
    for i, key in enumerate(keys):
        df[key] = range(i, i + 4)

    # Create the forwarder and forward the 'predictions' to influx.
    forwarder = ForwardPredictionsIntoInflux(
        destination_influx_uri=tu.INFLUXDB_URI)
    await forwarder.forward_predictions(predictions=df, endpoint=endpoint)

    # Client to manually verify the points written
    client = influx_client_from_uri(tu.INFLUXDB_URI, dataframe_client=True)

    name1_results = client.query("SELECT * FROM name1")["name1"]

    # Should have the tag names as column names since the shape matched
    assert all(c in name1_results.columns
               for c in ["machine"] + tu.SENSORS_STR_LIST)
    for i, tag in enumerate(tu.SENSORS_STR_LIST):
        assert np.allclose(df[("name1", i)].values, name1_results[tag].values)

    # Now check the other top level name "name2" is a measurement with the correct points written
    name2_results = client.query("SELECT * FROM name2")["name2"]

    # Should not have the same names as tags, since shape was 2x as long, should just be numeric columns
    assert all([
        str(c) in name2_results.columns
        for c in ["machine"] + list(range(len(tu.SENSORTAG_LIST) * 2))
    ])
    for key in filter(lambda k: k[0] == "name2", keys):
        assert np.allclose(df[key].values, name2_results[str(key[1])].values)
示例#4
0
 def _endpoints_from_watchman(self, endpoint: str) -> typing.List[EndpointMetadata]:
     """
     Get a list of endpoints by querying Watchman
     """
     resp = self.session.get(endpoint)
     if not resp.ok:
         logger.error(f"Failed to get endpoints: {repr(resp.content)}")
         resp.raise_for_status()
     return [EndpointMetadata(data) for data in resp.json()["endpoints"]]