Beispiel #1
0
    def _evaluate(self, dispatch_url, ee_id):
        asyncio.set_event_loop(asyncio.get_event_loop())
        try:
            with Client(dispatch_url) as c:
                event = CloudEvent(
                    {
                        "type": ids.EVTYPE_ENSEMBLE_STARTED,
                        "source": f"/ert/ee/{self._ee_id}",
                    }, )
                c.send(to_json(event).decode())
            self.run_flow(ee_id)

            with Client(dispatch_url) as c:
                event = CloudEvent(
                    {
                        "type": ids.EVTYPE_ENSEMBLE_STOPPED,
                        "source": f"/ert/ee/{self._ee_id}",
                        "datacontenttype": "application/octet-stream",
                    },
                    cloudpickle.dumps(self.config["outputs"]),
                )
                c.send(to_json(event).decode())
        except Exception as e:
            logger.exception(
                "An exception occurred while starting the ensemble evaluation",
                exc_info=True,
            )
            with Client(dispatch_url) as c:
                event = CloudEvent(
                    {
                        "type": ids.EVTYPE_ENSEMBLE_FAILED,
                        "source": f"/ert/ee/{self._ee_id}",
                    }, )
                c.send(to_json(event).decode())
Beispiel #2
0
    def run(self, expected_res=None):
        run_path = self._run_path / str(self._iens)
        storage = self.storage_driver(run_path)
        self.retrieve_resources(expected_res, storage)

        with Client(self._url) as ee_client:
            event = CloudEvent(
                {
                    "type": ids.EVTYPE_FM_STEP_START,
                    "source":
                    f"/ert/ee/{self._ee_id}/real/{self._iens}/stage/{self._stage_id}/step/{self._step_id}",
                    "datacontenttype": "application/json",
                }, )
            ee_client.send(to_json(event).decode())

            outputs = []
            self.run_jobs(ee_client, run_path)

            for output in self._outputs:
                if not (run_path / output).exists():
                    raise FileNotFoundError(
                        f"Output file {output} was not generated!")

                outputs.append(storage.store(output, self._iens))

            event = CloudEvent(
                {
                    "type": ids.EVTYPE_FM_STEP_SUCCESS,
                    "source":
                    f"/ert/ee/{self._ee_id}/real/{self._iens}/stage/{self._stage_id}/step/{self._step_id}",
                    "datacontenttype": "application/json",
                }, )
            ee_client.send(to_json(event).decode())
        return {"iens": self._iens, "outputs": outputs}
Beispiel #3
0
    def _evaluate(self, dispatch_url, ee_id):
        try:
            input_files = self._fetch_input_files()

            with Client(dispatch_url) as c:
                event = CloudEvent(
                    {
                        "type": ids.EVTYPE_ENSEMBLE_STARTED,
                        "source": f"/ert/ee/{self._ee_id}",
                    }, )
                c.send(to_json(event).decode())
            self.run_flow(ee_id, dispatch_url, input_files)

            with Client(dispatch_url) as c:
                event = CloudEvent(
                    {
                        "type": ids.EVTYPE_ENSEMBLE_STOPPED,
                        "source": f"/ert/ee/{self._ee_id}",
                    }, )
                c.send(to_json(event).decode())
        except Exception:
            logger.exception(
                "An exception occurred while starting the ensemble evaluation",
                exc_info=True,
            )
            with Client(dispatch_url) as c:
                event = CloudEvent(
                    {
                        "type": ids.EVTYPE_ENSEMBLE_FAILED,
                        "source": f"/ert/ee/{self._ee_id}",
                    }, )
                c.send(to_json(event).decode())
Beispiel #4
0
def test_cloudevent_missing_minimal_required_fields():
    attributes = {"type": "t"}
    with pytest.raises(cloud_exceptions.MissingRequiredFields) as e:
        _ = CloudEvent(attributes, None)
    assert f"Missing required keys: {set(['source'])}" in str(e.value)

    attributes = {"source": "s"}
    with pytest.raises(cloud_exceptions.MissingRequiredFields) as e:
        _ = CloudEvent(attributes, None)
    assert f"Missing required keys: {set(['type'])}" in str(e.value)
Beispiel #5
0
    def run_jobs(self, client, run_path):
        for index, job in enumerate(self._job_list):
            self.logger.info(f"Running command {self._cmd}  {job['name']}")
            event = CloudEvent(
                {
                    "type": ids.EVTYPE_FM_JOB_START,
                    "source":
                    f"/ert/ee/{self._ee_id}/real/{self._iens}/stage/{self._stage_id}/step/{self._step_id}/job/{job['id']}",
                    "datacontenttype": "application/json",
                }, )
            client.send(to_json(event).decode())

            shell_cmd = [self._cmd, job["executable"], *job["args"]]
            cmd_exec = subprocess.run(
                shell_cmd,
                universal_newlines=True,
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
                cwd=run_path,
            )
            self.logger.info(cmd_exec.stdout)

            if cmd_exec.returncode != 0:
                self.logger.error(cmd_exec.stderr)
                event = CloudEvent(
                    {
                        "type": ids.EVTYPE_FM_JOB_FAILURE,
                        "source":
                        f"/ert/ee/{self._ee_id}/real/{self._iens}/stage/{self._stage_id}/step/{self._step_id}/job/{job['id']}",
                        "datacontenttype": "application/json",
                    },
                    {
                        "stderr": cmd_exec.stderr,
                        "stdout": cmd_exec.stdout
                    },
                )
                client.send(to_json(event).decode())
                raise RuntimeError(
                    f"Script {job['name']} failed with exception {cmd_exec.stderr}"
                )

            event = CloudEvent(
                {
                    "type": ids.EVTYPE_FM_JOB_SUCCESS,
                    "source":
                    f"/ert/ee/{self._ee_id}/real/{self._iens}/stage/{self._stage_id}/step/{self._step_id}/job/{job['id']}",
                    "datacontenttype": "application/json",
                },
                {"stdout": cmd_exec.stdout},
            )

            client.send(to_json(event).decode())
Beispiel #6
0
    def _evaluate(self):
        get_event_loop()
        try:
            with Client(
                    self._ee_config.dispatch_uri,
                    self._ee_config.token,
                    self._ee_config.cert,
            ) as c:
                event = CloudEvent(
                    {
                        "type": ids.EVTYPE_ENSEMBLE_STARTED,
                        "source": f"/ert/ee/{self._ee_id}",
                    }, )
                c.send(to_json(event).decode())
            with prefect.context(
                    url=self._ee_config.dispatch_uri,
                    token=self._ee_config.token,
                    cert=self._ee_config.cert,
            ):
                self.run_flow(self._ee_id)

            with Client(
                    self._ee_config.dispatch_uri,
                    self._ee_config.token,
                    self._ee_config.cert,
            ) as c:
                event = CloudEvent(
                    {
                        "type": ids.EVTYPE_ENSEMBLE_STOPPED,
                        "source": f"/ert/ee/{self._ee_id}",
                        "datacontenttype": "application/octet-stream",
                    },
                    cloudpickle.dumps(self._outputs),
                )
                c.send(to_json(event).decode())
        except Exception as e:
            logger.exception(
                "An exception occurred while starting the ensemble evaluation",
                exc_info=True,
            )
            with Client(
                    self._ee_config.dispatch_uri,
                    self._ee_config.token,
                    self._ee_config.cert,
            ) as c:
                event = CloudEvent(
                    {
                        "type": ids.EVTYPE_ENSEMBLE_FAILED,
                        "source": f"/ert/ee/{self._ee_id}",
                    }, )
                c.send(to_json(event).decode())
Beispiel #7
0
async def run(loop):
    nc = NATS()

    await nc.connect("nats.wellorder.net:4222", loop=loop)
    while True:
        # read items from BMP680
        if sensor.get_sensor_data():
            # convert to kelvin
            temp = sensor.data.temperature + 273.15
            press = sensor.data.pressure
            humid = sensor.data.humidity
            # make cloudevent
            attributes = {
                "type": "com.wellorder.iot.indoorenv",
                "source": "https://pentax.wellorder.net/iot/bme680",
                "datacontenttype": "application/json"
            }
            data = {
                "loc": "office.rpi-cluster",
                "dt": time.time(),
                "temp": temp,
                "pressure": press,
                "humidity": humid,
                "sensorModel": "BME680"
            }
            event = CloudEvent(attributes, data)
            header, body = to_structured(event)
            await nc.publish("iot.indoorenv", body)
        # Exit
        await asyncio.sleep(1)
    # Terminate connection to NATS.
    await nc.close()
Beispiel #8
0
def create_response_cloudevent(model_name: str,
                               body: Union[Dict, CloudEvent],
                               response: Dict,
                               binary_event=False) -> tuple:
    ce_attributes = {}

    if os.getenv("CE_MERGE", "false").lower() == "true":
        if binary_event:
            ce_attributes = body._attributes
            if "datacontenttype" in ce_attributes:  # Optional field so must check
                del ce_attributes["datacontenttype"]
        else:
            ce_attributes = body
            del ce_attributes["data"]

        # Remove these fields so we generate new ones
        del ce_attributes["id"]
        del ce_attributes["time"]

    ce_attributes["type"] = os.getenv("CE_TYPE",
                                      "io.kserve.inference.response")
    ce_attributes["source"] = os.getenv("CE_SOURCE",
                                        f"io.kserve.kfserver.{model_name}")

    event = CloudEvent(ce_attributes, response)

    if binary_event:
        event_headers, event_body = to_binary(event)
    else:
        event_headers, event_body = to_structured(event)

    return event_headers, event_body
Beispiel #9
0
def test_functions_eventsource_storage(capsys):
    attributes = {
        "id": "5e9f24a",
        "type": "google.cloud.storage.object.v1.finalized",
        "source": "sourceUrlHere",
    }

    data = {
        "bucket": "test_bucket_for_storage",
        "name": "new_blob_uploaded",
        "generation": 1,
        "metageneration": 1,
        "timeCreated": "2021-10-10 00:00:00.000000Z",
        "updated": "2021-11-11 00:00:00.000000Z",
    }

    event = CloudEvent(attributes, data)

    main.hello_gcs(event)

    out, _ = capsys.readouterr()
    assert "Event ID: 5e9f24a" in out
    assert "Event type: google.cloud.storage.object.v1.finalized" in out
    assert "Bucket: test_bucket_for_storage" in out
    assert "File: new_blob_uploaded" in out
    assert "Metageneration: 1" in out
    assert "Created: 2021-10-10 00:00:00.000000Z" in out
    assert "Updated: 2021-11-11 00:00:00.000000Z" in out
Beispiel #10
0
def test_create_binary_image():
    # Create image and turn image into bytes
    attributes = {
        "type": "com.example.string",
        "source": "https://example.com/event-producer",
    }

    # Create CloudEvent
    event = CloudEvent(attributes, image_bytes)

    # Create http headers/body content
    headers, body = to_binary(event)

    # Unmarshall CloudEvent and re-create image
    reconstruct_event = from_http(headers,
                                  body,
                                  data_unmarshaller=lambda x: io.BytesIO(x))

    # reconstruct_event.data is an io.BytesIO object due to data_unmarshaller
    restore_image = Image.open(reconstruct_event.data)
    assert restore_image.size == image_expected_shape

    # # Test cloudevent extension from http fields and data
    assert isinstance(body, bytes)
    assert body == image_bytes
Beispiel #11
0
def test_create_structured_image():
    # Create image and turn image into bytes
    attributes = {
        "type": "com.example.string",
        "source": "https://example.com/event-producer",
    }

    # Create CloudEvent
    event = CloudEvent(attributes, image_bytes)

    # Create http headers/body content
    headers, body = to_structured(event)

    # Structured has cloudevent attributes marshalled inside the body. For this
    # reason we must load the byte object to create the python dict containing
    # the cloudevent attributes
    data = json.loads(body)

    # Test cloudevent extension from http fields and data
    assert isinstance(data, dict)
    assert base64.b64decode(data["data_base64"]) == image_bytes

    # Unmarshall CloudEvent and re-create image
    reconstruct_event = from_http(headers,
                                  body,
                                  data_unmarshaller=lambda x: io.BytesIO(x))

    # reconstruct_event.data is an io.BytesIO object due to data_unmarshaller
    restore_image = Image.open(reconstruct_event.data)
    assert restore_image.size == image_expected_shape
def test_functions_log_cloudevent_should_print_message(capsys):

    attributes = {
        "source": "test",
        "type": "google.cloud.audit.log.v1.written",
        "subject": "storage.googleapis.com/projects/_/buckets/my-bucket/objects/test.txt",
    }

    data = {
        "protoPayload": {
            "methodName": "storage.objects.create",
            "authenticationInfo": {"principalEmail": "*****@*****.**"},
            "resourceName": "some-resource",
        }
    }

    event = CloudEvent(attributes, data)

    main.hello_auditlog(event)

    out, _ = capsys.readouterr()
    assert "Event type: google.cloud.audit.log.v1.written" in out
    assert (
        "Subject: storage.googleapis.com/projects/_/buckets/my-bucket/objects/test.txt"
        in out
    )
    assert "API method: storage.objects.create" in out
    assert "Resource name: some-resource" in out
    assert "Principal: [email protected]" in out
Beispiel #13
0
def send_cloud_event(msg):

    ce_action_type = "manuela.cam-sim.image"
    ce_action_source = "manuela/eventing/cam-sim"

    # Create a CloudEvent
    # - The CloudEvent "id" is generated if omitted. "specversion" defaults to "1.0".
    try:
        attributes = {
            'type': ce_action_type,
            'source': ce_action_source,
        }

        event = CloudEvent(attributes, msg)

        # Creates the HTTP request representation of the CloudEvent in structured content mode
        headers, body = to_structured(event)

        # POST
        requests.post(web_server, data=body, headers=headers)

    except:
        logger.error(f'Failed to send CloudEvent to: {web_server}')

    return
Beispiel #14
0
    def _cancel(self):
        logger.debug("cancelling, waiting for wakeup...")
        self._allow_cancel.wait()
        logger.debug("got wakeup, killing evaluation process...")

        if self._eval_proc is not None:
            os.kill(self._eval_proc.pid, signal.SIGINT)
            start = time.time()
            while self._eval_proc.is_alive() and time.time() - start < 3:
                pass
            if self._eval_proc.is_alive():
                logger.debug(
                    "Evaluation process is not responding to SIGINT, escalating to SIGKILL"
                )
                os.kill(self._eval_proc.pid, signal.SIGKILL)

        self._eval_proc = None
        event = CloudEvent(
            {
                "type": ids.EVTYPE_ENSEMBLE_CANCELLED,
                "source": f"/ert/ee/{self._ee_id}",
                "datacontenttype": "application/json",
            },
        )

        loop = asyncio.new_event_loop()
        loop.run_until_complete(
            self.send_cloudevent(
                self._ee_config.dispatch_uri,
                event,
                token=self._ee_config.token,
                cert=self._ee_config.cert,
            )
        )
        loop.close()
Beispiel #15
0
    def _init_handler(self, msg):
        self._ee_id = msg.ee_id
        self._real_id = msg.real_id
        self._stage_id = msg.stage_id

        jobs = {}
        for job in msg.jobs:
            jobs[job.index] = job.job_data.copy()
            if job.job_data.get("stderr"):
                jobs[job.index]["stderr"] = str(
                    Path(job.job_data["stderr"]).resolve())
            if job.job_data.get("stdout"):
                jobs[job.index]["stdout"] = str(
                    Path(job.job_data["stdout"]).resolve())

        self._dump_event(
            CloudEvent(
                {
                    "type": _FM_STEP_START,
                    "source": self._step_path(),
                    "datacontenttype": "application/json",
                },
                {
                    "jobs": jobs,
                },
            ))
Beispiel #16
0
    def execute(self):

        # prepare the CloudEvents message

        attributes = {
            "type": self.type,
            "source": self.source,
            "time": datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
        }
        data = self.notification
        event = CloudEvent(attributes, data)

        # Creates the HTTP request representation of the CloudEvents in structured content mode
        headers, body = to_structured(event)
        self.headers.update(headers)

        logger.debug(f"Sending CloudEvents notification {data}")

        # send the message
        try:
            resp = requests.post(self.url,
                                 data=body,
                                 headers=self.headers,
                                 verify=False,
                                 timeout=self.timeout)
        except Exception as e:
            logger.error("Not able to POST CloudEvents notification")
            raise TriggerException(e)
        if resp.status_code != 200:
            raise TriggerException(
                f"Not able to POST CloudEvents notification to {self.url}, "
                f"status {resp.status_code}, {resp.reason}, {resp.content.decode()}"
            )

        logger.debug(f"CloudEvents notification sent successfully")
Beispiel #17
0
    def _dump_event(self, attributes: Dict[str, str], data: Any = None):
        if data is None and _CONTENT_TYPE in attributes:
            attributes.pop(_CONTENT_TYPE)

        event = CloudEvent(attributes=attributes, data=data)
        logger.debug(f'Schedule {type(event)} "{event["type"]}" for delivery')
        self._event_queue.put(event)
def test_to_structured_extensions(specversion):
    event = CloudEvent(test_attributes, test_data)
    headers, body = to_structured(event)

    body = json.loads(body)

    assert "ext1" in body
    assert "extensions" not in body
def test_none_data_cloudevent(specversion):
    event = CloudEvent({
        "source": "<my-url>",
        "type": "issue.example",
        "specversion": specversion,
    })
    to_binary(event)
    to_structured(event)
Beispiel #20
0
def test_http_cloudevent_mutates_equality(specversion):
    attributes = {
        "source": "<source>",
        "specversion": specversion,
        "id": "my-id",
        "time": "tomorrow",
        "type": "tests.cloudevents.override",
        "datacontenttype": "application/json",
        "subject": "my-subject",
    }
    data = '{"name":"john"}'
    event1 = CloudEvent(attributes, data)
    event2 = CloudEvent(attributes, data)
    event3 = CloudEvent(attributes, data)

    assert event1 == event2
    # Test different attributes
    for key in attributes:
        if key == "specversion":
            continue
        else:
            event2[key] = f"noise-{key}"
            event3[key] = f"noise-{key}"
        assert event2 == event3
        assert event1 != event2 and event3 != event1

    # Test different data
    event2.data = '{"name":"paul"}'
    event3.data = '{"name":"paul"}'
    assert event2 == event3
    assert event1 != event2 and event3 != event1
Beispiel #21
0
def test_json_can_talk_to_itself(specversion):
    event = CloudEvent(test_attributes, test_data)
    event_json = to_json(event)

    event = from_json(event_json)

    for key, val in test_attributes.items():
        assert event[key] == val
    assert event.data == test_data
Beispiel #22
0
def test_known_empty_edge_cases(binary_headers, test_data):
    expect_data = test_data
    if test_data in ["", b""]:
        expect_data = None
    elif test_data == ():
        # json.dumps(()) outputs '[]' hence list not tuple check
        expect_data = []

    # Remove ce- prefix
    headers = {key[3:]: value for key, value in binary_headers.items()}

    # binary
    event = from_http(*to_binary(CloudEvent(headers, test_data)))
    assert event.data == expect_data

    # structured
    event = from_http(*to_structured(CloudEvent(headers, test_data)))
    assert event.data == expect_data
Beispiel #23
0
def test_to_json(specversion):
    event = CloudEvent(test_attributes, test_data)
    event_json = to_json(event)
    event_dict = json.loads(event_json)

    for key, val in test_attributes.items():
        assert event_dict[key] == val

    assert event_dict["data"] == test_data
Beispiel #24
0
def send_dispatch_event(client, event_type, source, event_id, data,
                        **extra_attrs):
    event1 = CloudEvent(
        {
            "type": event_type,
            "source": source,
            "id": event_id,
            **extra_attrs
        }, data)
    client.send(to_json(event1))
Beispiel #25
0
 def _evaluate(self, client_url, dispatch_url, ee_id):
     super()._evaluate(dispatch_url, ee_id)
     with Client(client_url) as client:
         client.send(
             to_json(
                 CloudEvent({
                     "type": identifiers.EVTYPE_EE_USER_DONE,
                     "source": f"/ert/ee/{ee_id}",
                     "id": f"event-user-done",
                 })))
Beispiel #26
0
def cloudevent_0_3():
    attributes = {
        "id": "my-id",
        "source": "from-galaxy-far-far-away",
        "type": "cloudevent.greet.you",
        "specversion": "0.3",
        "time": "2020-08-16T13:58:54.471765",
    }
    data = {"name": "john"}
    return CloudEvent(attributes, data)
Beispiel #27
0
 def _translate_change_to_cloudevent(ee_id, real_id, status):
     return CloudEvent(
         {
             "type": _queue_state_event_type(status),
             "source": f"/ert/ee/{ee_id}/real/{real_id}/step/{0}",
             "datacontenttype": "application/json",
         },
         {
             "queue_event_type": status,
         },
     )
 def test_send_cloud_event_data_as_list(
         self, variables, eventgrid_cloud_event_topic_endpoint):
     client = self.create_eg_publisher_client(
         eventgrid_cloud_event_topic_endpoint)
     attributes = {
         "type": "com.example.sampletype1",
         "source": "https://example.com/event-producer",
     }
     data = "hello world"
     cloud_event = CloudEvent(attributes, data)
     client.send([cloud_event])
Beispiel #29
0
 def make_cloud_event(event_type: str, data: Dict[str, Any]) -> Any:
     tz = get_tz()
     attributes = {
         "id": str(uuid.uuid4()),
         "time": tz.localize(datetime.now()).isoformat(),
         "type": event_type,
         "source": "chaosiq-cloud",
         "datacontenttype": "application/json"
     }
     event = CloudEvent(attributes, data)
     return to_structured(event)
 async def test_send_cloud_event_data_none(
         self, variables, eventgrid_cloud_event_topic_endpoint):
     client = self.create_eg_publisher_client(
         eventgrid_cloud_event_topic_endpoint)
     attributes = {
         "type": "com.example.sampletype1",
         "source": "https://example.com/event-producer",
     }
     data = None
     cloud_event = CloudEvent(attributes, data)
     await client.send(cloud_event)