def update_job( job_id: str, job_def: JobDefinition, token: str = Depends(oauth2_scheme) ) -> JobWrapperDefinition: id_ = _convert_to_path(job_id) tasks = [_to_dict(id_, s) for s in job_def.tasks] # compute the id of the tasks if needed task_obj = {'tasks': tasks} configs = job_def.configs for config in configs: file_to_write = _get_job_file(id_, config.name) with open(file_to_write, 'w') as wf: wf.write(config.content) _add_to_job_repo(file_to_write, 'via odin-http create_job') job_loc = _get_job_loc(id_) if os.path.exists(job_loc) is False: logging.info('Creating job location {}'.format(job_loc)) os.makedirs(job_loc) file_to_write = _get_job_file(id_, 'main.yml') with open(file_to_write, 'w') as wf: yaml.dump(task_obj, wf) sha = _add_to_job_repo(file_to_write, "via odin-http create_job") LOGGER.info(f'Updated git {sha}') job_def.tasks = tasks return JobWrapperDefinition(job_def)
def create_pipeline( pipe_def: PipelineWrapperDefinition, token: str = Depends(oauth2_scheme) ) -> PipelineWrapperDefinition: job = _convert_to_path(pipe_def.pipeline.job) _update_job_repo() pipe_id = _run_ws(_submit_job(get_ws_url(), job)) p = PipelineDefinition(name=pipe_id, id=pipe_id, job=job) return PipelineWrapperDefinition(pipeline=p)
async def create_pipeline(pipe_def: PipelineWrapperDefinition, token: str=Depends(oauth2_scheme)) -> PipelineWrapperDefinition: job = _convert_to_path(pipe_def.pipeline.job) _update_job_repo() if _is_template(job): job = _substitute_template(job, pipe_def.context or {}) pipe_id = await _submit_job(get_ws_url(), job) p = PipelineDefinition(name=pipe_id, id=pipe_id, job=job) return PipelineWrapperDefinition(pipeline=p)
def get_jobs(q: Optional[str] = '*') -> JobResults: _update_job_repo() job_defs = [] q = _convert_to_path(q) file_star = '{}*'.format(q) for path_value in glob.glob(os.path.join(ODIN_FS_ROOT, file_star)): id_ = os.path.basename(path_value) job_loc = os.path.join(path_value, 'main.yml') if os.path.exists(job_loc) and os.path.isfile(job_loc): job_def = _job_def(id_) if job_def: job_defs.append(job_def) return JobResults(job_defs)
async def upload_job_file(job_id: str, filename: str, body: Request=Body(..., media_type="application/binary"), token: str=Depends(oauth2_scheme)): id_ = _convert_to_path(job_id) _validate_filename(filename) file_to_write = _get_job_file(id_, filename) if os.path.exists(file_to_write): logging.warning("Found {}. Overwriting".format(filename)) body = await body.body() with open(file_to_write, 'wb') as wf: wf.write(body) sha = _add_to_job_repo(file_to_write, "via odin-http upload_job_file") ud = UploadDefinition(location=f'{file_to_write}@{sha}', bytes=os.stat(file_to_write).st_size) return ud
def create_job(job_def: JobWrapperDefinition, token: str=Depends(oauth2_scheme)) -> JobWrapperDefinition: id_ = _convert_to_path(job_def.job.name) new_job_path = _get_job_loc(id_) if os.path.exists(new_job_path): raise Exception(f"There is already a job at {id}") os.makedirs(new_job_path) file_to_write = _get_job_file(id_, 'main.yml') task_obj = {'tasks': []} with open(file_to_write, 'w') as wf: yaml.dump(task_obj, wf) sha = _add_to_job_repo(file_to_write, "via odin-http create_job") LOGGER.info(f'Updated git {sha}') updated_job_def = JobDefinition(id=id_, name=id_, location=new_job_path, creation_time=datetime.now()) return JobWrapperDefinition(job=updated_job_def)
def download_job_file(job_id: str, filename: str) -> str: id_ = _convert_to_path(job_id) _update_job_repo() return _read_file(_get_job_file(id_, filename))
def get_job(job_id: str) -> JobWrapperDefinition: id_ = _convert_to_path(job_id) _update_job_repo() return JobWrapperDefinition(job=_job_def(id_))