def fetch_and_lock(self, topic_names, process_variables=None): url = self.get_fetch_and_lock_url() body = { "workerId": str(self.worker_id ), # convert to string to make it JSON serializable "maxTasks": self.config["maxTasks"], "topics": self._get_topics(topic_names, process_variables), "asyncResponseTimeout": self.config["asyncResponseTimeout"] } if self.is_debug: self._log_with_context( f"trying to fetch and lock with request payload: {body}") http_timeout_seconds = self.__get_fetch_and_lock_http_timeout_seconds() response = requests.post(url, headers=self._get_headers(), json=body, timeout=http_timeout_seconds) raise_exception_if_not_ok(response) resp_json = response.json() if self.is_debug: self._log_with_context( f"fetch and lock response json: {resp_json} for request: {body}" ) return response.json()
def get_jobs(self, offset: int, limit: int, tenant_ids=None, with_failure=None, process_instance_id=None, task_name=None, sort_by="jobDueDate", sort_order="desc"): # offset starts with zero # sort_order can be "asc" or "desc url = f"{self.engine_base_url}/job" params = { "firstResult": offset, "maxResults": limit, "sortBy": sort_by, "sortOrder": sort_order, } if process_instance_id: params["processInstanceId"] = process_instance_id if task_name: params["failedActivityId"] = task_name if with_failure: params["withException"] = "true" if tenant_ids: params["tenantIdIn"] = ','.join(tenant_ids) response = requests.get(url, params=params, headers=self._get_headers()) raise_exception_if_not_ok(response) return response.json()
def test_does_not_raise_exception_if_response_is_ok(self): try: raise_exception_if_not_ok(self.__mock_response(HTTPStatus.OK, {})) except Exception: self.fail( "raise_exception_if_not_ok() should not raise Exception when response is ok" )
def correlate_message(self, message_name, process_instance_id=None, tenant_id=None, business_key=None, process_variables=None): """ Correlates a message to the process engine to either trigger a message start event or an intermediate message catching event. :param message_name: :param process_instance_id: :param tenant_id: :param business_key: :param process_variables: :return: response json """ url = f"{self.engine_base_url}/message" body = { "messageName": message_name, "resultEnabled": True, "processVariables": Variables.format(process_variables) if process_variables else None, "processInstanceId": process_instance_id, "tenantId": tenant_id, "withoutTenantId": not tenant_id, "businessKey": business_key, } body = {k: v for k, v in body.items() if v is not None} response = requests.post(url, headers=self._get_headers(), json=body) raise_exception_if_not_ok(response) return response.json()
def set_job_retry(self, job_id, retries=1): url = f"{self.engine_base_url}/job/{job_id}/retries" body = {"retries": retries} response = requests.put(url, headers=self._get_headers(), json=body) raise_exception_if_not_ok(response) return response.status_code == HTTPStatus.NO_CONTENT
def test_raise_exception_if_response_is_not_ok(self): data = {'type': "SomeExceptionClass", "message": "a detailed message"} with self.assertRaises(Exception) as context: raise_exception_if_not_ok( self.__mock_response(HTTPStatus.BAD_REQUEST, data)) self.assertEqual( "received 400 : SomeExceptionClass : a detailed message", str(context.exception))
def start_process_by_version(self, process_key, version_tag, variables, tenant_id=None, business_key=None): """ Start a process instance with the process_key and specified version tag and variables passed. If multiple versions with same version tag found, it triggers the latest one :param process_key: Mandatory :param version_tag: :param variables: Mandatory - can be empty dict :param tenant_id: Optional :param business_key: Optional :return: response json """ tenant_ids = [tenant_id] if tenant_id else [] process_definitions = self.get_process_definitions(process_key, version_tag, tenant_ids, sort_by="version", sort_order="desc", offset=0, limit=1) if len(process_definitions) == 0: raise Exception( f"cannot start process because no process definitions found " f"for process_key: {process_key}, version_tag: {version_tag} and tenant_id: {tenant_id}" ) process_definition_id = process_definitions[0]['id'] version = process_definitions[0]['version'] if len(process_definitions) > 1: logger.info( f"multiple process definitions found for process_key: {process_key}, " f"version_tag: {version_tag} and tenant_id: {tenant_id}, " f"using latest process_definition_id: {process_definition_id} with version: {version}" ) else: logger.info( f"exactly one process definition found for process_key: {process_key}, " f"version_tag: {version_tag} and tenant_id: {tenant_id}, " f"using process_definition_id: {process_definition_id} with version: {version}" ) url = self.get_start_process_url(process_definition_id) body = {"variables": Variables.format(variables)} if business_key: body["businessKey"] = business_key response = requests.post(url, headers=self._get_headers(), json=body) raise_exception_if_not_ok(response) return response.json()
def get_process_instance(self, process_key=None, variables=frozenset([]), tenant_ids=frozenset([])): url = f"{self.engine_base_url}/process-instance" url_params = self.__get_process_instance_url_params( process_key, tenant_ids, variables) response = requests.get(url, headers=self._get_headers(), params=url_params) raise_exception_if_not_ok(response) return response.json()
def complete(self, task_id, global_variables, local_variables={}): url = self.get_task_complete_url(task_id) body = { "workerId": self.worker_id, "variables": Variables.format(global_variables), "localVariables": Variables.format(local_variables) } response = requests.post(url, headers=self._get_headers(), json=body, timeout=self.http_timeout_seconds) raise_exception_if_not_ok(response) return response.status_code == HTTPStatus.NO_CONTENT
def get_process_definitions(self, process_key, version_tag, tenant_ids, sort_by="version", sort_order="desc", offset=0, limit=1): url = self.get_process_definitions_url() url_params = self.get_process_definitions_url_params( process_key, version_tag, tenant_ids, sort_by, sort_order, offset, limit) response = requests.get(url, headers=self._get_headers(), params=url_params) raise_exception_if_not_ok(response) return response.json()
def get_process_instance_variable(self, process_instance_id, variable_name, with_meta=False): url = f"{self.engine_base_url}/process-instance/{process_instance_id}/variables/{variable_name}" response = requests.get(url, headers=self._get_headers()) raise_exception_if_not_ok(response) resp_json = response.json() url_with_data = f"{url}/data" response = requests.get(url_with_data, headers=self._get_headers()) raise_exception_if_not_ok(response) decoded_value = base64.encodebytes(response.content).decode("utf-8") if with_meta: return dict(resp_json, value=decoded_value) return decoded_value
def failure(self, task_id, error_message, error_details, retries, retry_timeout): url = self.get_task_failure_url(task_id) logger.info(f"setting retries to: {retries} for task: {task_id}") body = { "workerId": self.worker_id, "errorMessage": error_message, "retries": retries, "retryTimeout": retry_timeout, } if error_details: body["errorDetails"] = error_details response = requests.post(url, headers=self._get_headers(), json=body, timeout=self.http_timeout_seconds) raise_exception_if_not_ok(response) return response.status_code == HTTPStatus.NO_CONTENT
def start_process(self, process_key, variables, tenant_id=None, business_key=None): """ Start a process instance with the process_key and variables passed. :param process_key: Mandatory :param variables: Mandatory - can be empty dict :param tenant_id: Optional :param business_key: Optional :return: response json """ url = self.get_start_process_instance_url(process_key, tenant_id) body = { "variables": Variables.format(variables) } if business_key: body["businessKey"] = business_key response = requests.post(url, headers=self._get_headers(), json=body) raise_exception_if_not_ok(response) return response.json()