def check_health(): try: docker_service = Autowired("job_service") docker_service.list_containers() return jsonify(Health("UP").to_dict()) except ConnectionError as e: return jsonify( Health( "DOWN", { "error": "Docker ConnectionError, Check Docker Engine and Socket is up" }).to_dict()) except APIError: return jsonify( Health( "DOWN", { "error": "Docker APIError, Check Docker Engine is Running and API Socket is working" }).to_dict()) except Exception as e: print(e.__class__) return jsonify(Health("DOWN", {"error": str(e)}).to_dict())
class AgentInterface: """ Interface to the agent package, this can be used to schedule a new Job to be run via the agent. """ _job_service: JobService = Autowired("job_service") @staticmethod def run_job(uuid: str, name: str, url: str): """ Schedules a new Run of a job based off the job information provided. :param uuid: uuid of the Job for the callback functionality to map correctly :param name: Name of the Job :param url: url for the job :return: UUID of the Run which can be used to query status """ job = Job(uuid, name, url) run_task_uuid = AgentInterface._job_service.process_request(job) return run_task_uuid
from kodiak.server.papi._sqlite.step import StepDao, StepDto from kodiak.server.papi.repos import JobRepository, RunRepository from kodiak.utils.version import is_same_or_later_version MIN_SQLITE_VERSION = "3.22.0" LOGGER = logging.getLogger(__name__) sqlite_version = sqlite3.sqlite_version LOGGER.info(f"Running SQLite Version {sqlite_version}") if not is_same_or_later_version(sqlite_version, MIN_SQLITE_VERSION): raise Exception( f"SQLite version {sqlite_version} is not valid, requires minimum of {MIN_SQLITE_VERSION}" ) schema_interface: SchemaInterface = Autowired("schema_interface") schema_interface.check_for_updates() @Repository(name="job_repository") class SqliteJobRepository(JobRepository): def save(self, job: Job) -> Job: job_dto = JobDao.save(SqliteJobRepository._to_dto(job)) return SqliteJobRepository._to_job(job_dto) def delete_by_uuid(self, uuid: str) -> None: job: JobDto = JobDao.find_job_by_uuid(uuid) JobDao.delete(job) def find_all(self) -> List[Job]: return [
import logging from fxq.core.beans.factory.annotation import Autowired from kodiak.server.papi.repos import RunRepository LOGGER = logging.getLogger(__name__) _run_repository: RunRepository = Autowired("run_repository") def do_callback(run_task): _run_repository.save(run_task.get_run())
from fxq.core.beans.factory.annotation import Autowired from kodiak.agent import AgentInterface from kodiak.model.job import Job from kodiak.model.run import Run from kodiak.server.gql.schema_adapter import to_gql_schema from kodiak.server.papi.repos import JobRepository, RunRepository _job_repository: JobRepository = Autowired("job_repository") _run_repository: RunRepository = Autowired("run_repository") def add_job(value, info, **args): job = Job(name=args["name"], url=args["url"]) job = _job_repository.save(job) return to_gql_schema(job) def update_job(value, info, **args): existing_job = _job_repository.find_by_uuid(args["uuid"]) job = Job(uuid=args["uuid"], name=args["name"], url=args["url"]) _job_repository.save(job) return to_gql_schema(job) def remove_job(value, info, **args): _job_repository.delete_by_uuid(args["uuid"]) return True def start_job(value, info, **args):
def __init__(self, run_service=Autowired("run_service"), docker_service=Autowired("docker_service")): self.run_service = run_service self.docker_service = docker_service LOGGER.info("System Pipeline Base set to %s" % working_dir)
def __init__(self, docker_service=Autowired("docker_service")): self._docker_service: DockerService = docker_service