Exemplo n.º 1
0
def query(event: Dict[str, Any]) -> None:
    """Run the query portion of a QJ"""
    query_config = QueryConfig()
    logger = Logger()
    logger.info(event=QJLogEvents.InitConfig, config=query_config)

    records = event.get("Records", [])
    if not records:
        raise Exception("No records found")
    if len(records) > 1:
        raise Exception(
            f"More than one record. BatchSize is probably not 1. event: {event}"
        )
    body = records[0].get("body")
    if body is None:
        raise Exception(
            f"No record body found. BatchSize is probably not 1. event: {event}"
        )
    body = json.loads(body)
    job = schemas.Job(**body)
    logger.info(event=QJLogEvents.InitJob, job=job)

    logger.info(event=QJLogEvents.RunQueryStart)
    query_result = run_query(job=job, config=query_config)
    logger.info(event=QJLogEvents.RunQueryEnd,
                num_results=query_result.get_length())

    results: List[schemas.Result] = []
    if query_config.account_id_key not in query_result.query_result_set.fields:
        raise Exception(
            f"Query results must contain field '{query_config.account_id_key}'"
        )
    for q_r in query_result.to_list():
        account_id = q_r[query_config.account_id_key]
        result = schemas.Result(
            account_id=account_id,
            result={
                key: val
                for key, val in q_r.items()
                if key != query_config.account_id_key
            },
        )
        results.append(result)

    graph_spec = schemas.ResultSetGraphSpec(
        graph_uris_load_times=query_result.graph_uris_load_times)
    result_set = schemas.ResultSetCreate(job=job,
                                         graph_spec=graph_spec,
                                         results=results)

    api_key = get_api_key(region_name=query_config.region)
    qj_client = QJAPIClient(host=query_config.api_host,
                            port=query_config.api_port,
                            api_key=api_key)
    logger.info(event=QJLogEvents.CreateResultSetStart)
    qj_client.create_result_set(result_set=result_set)
    logger.info(event=QJLogEvents.CreateResultSetEnd)
Exemplo n.º 2
0
 def get_jobs(self, active_only: bool = True) -> List[schemas.Job]:
     """Get all jobs, by default only active jobs"""
     url = f"{self._base_url_v1}/jobs"
     try:
         response = requests.get(url, params={"active_only": active_only})
     except Exception as ex:
         raise QJAPIClientError(
             f"Error connecting to {url}: {str(ex)}") from ex
     try:
         response.raise_for_status()
     except Exception as ex:
         raise_client_error(response, exception=ex)
     raw_json = response.json()
     return [schemas.Job(**job_json) for job_json in raw_json]
Exemplo n.º 3
0
 def create_job(self, job_in: schemas.JobCreate) -> schemas.Job:
     """Create a Job"""
     url = f"{self._base_url_v1}/jobs"
     try:
         response = requests.post(url,
                                  json=json.loads(job_in.json()),
                                  headers=self._auth_header)
     except Exception as ex:
         raise QJAPIClientError(
             f"Error connecting to {url}: {str(ex)}") from ex
     try:
         response.raise_for_status()
     except Exception as ex:
         raise_client_error(response, exception=ex)
     raw_json = response.json()
     return schemas.Job(**raw_json)
Exemplo n.º 4
0
 def get_job(self, job_name: str) -> Optional[schemas.Job]:
     """Get the active version of a Job by name"""
     url = f"{self._base_url_v1}/jobs/{job_name}"
     try:
         response = requests.get(url)
         if response.status_code == 404:
             return None
     except Exception as ex:
         raise QJAPIClientError(
             f"Error connecting to {url}: {str(ex)}") from ex
     try:
         response.raise_for_status()
     except Exception as ex:
         raise_client_error(response, exception=ex)
     raw_json = response.json()
     return schemas.Job(**raw_json)
Exemplo n.º 5
0
 def update_job(self, job_name: str, job_created: datetime,
                job_in: schemas.JobUpdate) -> schemas.Job:
     """Update a Job"""
     url = f"{self._base_url_v1}/jobs/{job_name}/versions/{job_created.isoformat()}"
     try:
         response = requests.patch(url,
                                   json=json.loads(job_in.json()),
                                   headers=self._auth_header)
     except Exception as ex:
         raise QJAPIClientError(
             f"Error connecting to {url}: {str(ex)}") from ex
     try:
         response.raise_for_status()
     except Exception as ex:
         raise_client_error(response, exception=ex)
     raw_json = response.json()
     return schemas.Job(**raw_json)
Exemplo n.º 6
0
 def create(self, db_session: Session,
            job_create_in: schemas.JobCreate) -> Job:
     """Create a Job"""
     logger = Logger()
     logger.info(event=QJLogEvents.CreateJob, job_create=job_create_in)
     try:
         query = rdflib.Graph().query(job_create_in.query)
     except Exception as ex:
         raise JobQueryInvalid(
             f"Invalid query {job_create_in.query}: {str(ex)}") from ex
     query_fields = [str(query_var) for query_var in query.vars]
     if self._account_id_key not in query_fields:
         raise JobQueryMissingAccountId(
             f"Query {job_create_in.query} missing '{self._account_id_key}' field"
         )
     if job_create_in.result_expiration_sec is None:
         job_create_in.result_expiration_sec = self._result_expiration_sec_default
     if job_create_in.result_expiration_sec > self._result_expiration_sec_limit:
         raise JobInvalid(
             f"Field result_expiration_sec value {job_create_in.result_expiration_sec} "
             f"must be <= {self._result_expiration_sec_limit}")
     if job_create_in.max_graph_age_sec is None:
         job_create_in.max_graph_age_sec = self._max_graph_age_sec_default
     else:
         if job_create_in.max_graph_age_sec > self._max_graph_age_sec_limit:
             raise JobInvalid(
                 f"Field max_graph_age_sec value {job_create_in.max_graph_age_sec} must be "
                 f"<= {self._max_graph_age_sec_limit}")
     if job_create_in.max_result_age_sec is None:
         job_create_in.max_result_age_sec = self._max_result_age_sec_default
     else:
         if job_create_in.max_result_age_sec > self._max_result_age_sec_limit:
             raise JobInvalid(
                 f"Field max_result_age_sec value {job_create_in.max_result_age_sec} must be "
                 f"<= {self._max_result_age_sec_limit}")
     obj_in_data = job_create_in.dict()
     obj_in_data["query_fields"] = query_fields
     job_create = schemas.Job(**obj_in_data)
     db_obj = Job(**job_create.dict())  # type: ignore
     db_session.add(db_obj)
     db_session.commit()
     db_session.refresh(db_obj)
     return db_obj