def command_process(jobs): unique_jobs = remove_duplicates(jobs) log.info(f"Applying remaining queued jobs: {unique_jobs}") for job in unique_jobs: process_job(job) log.info("Finished applying queued jobs")
def remove_duplicates(jobs): log.info("Checking for duplicate jobs") duplicates = duplicate_jobs() if len(duplicates) > 0: log.warning(f"Removing duplicate jobs: {duplicates}") delete_jobs("queued", duplicates, "Deleting duplicated jobs") return [j for j in jobs if j not in duplicates]
def apply_job(job_name): try: log.info(f"Applying job {job_name}") if "IN_CLUSTER" in environ and environ["IN_CLUSTER"] == "true": k8s_config = config.load_incluster_config() else: k8s_config = config.load_kube_config() k8s_client = client.api_client.ApiClient(configuration=k8s_config) create_from_yaml(k8s_client, f"jobs/queued/{job_name}") except Exception as e: log.error(f"Failed to apply job {job_name}:") log.error(e) return False return True
def process_job(job): try: log.info(f"Applying queued job: {job}") log_level, new_state = (logging.INFO, "processing") if apply_job(job) else ( logging.WARNING, "failed") log.log(log_level, f"Applied queued {job}, new state is {new_state}") move_job(job, "queued", new_state) except Exception as e: log.error( f"Encountered an unexpected exception whilst processing {job}") log.error(e)
def command_template(args): try: if args.template in templates(args): print(templates(args)[args.template]) else: with open(args.template, "r") as template_file: new_job = template_file.read().format(args=args) with open(os.path.join("jobs", "queued", args.filename), "w+") as job_file: job_file.write(new_job) log.info("Template created") except Exception as e: log.error("Template creation failed") log.error(e) log.error(e.args)
def command_init(repository): if os.path.exists('jobs'): log.info("jobs directory already exists... skipping") elif repository: os.makedirs("jobs") log.info("Cloning " + repository) git.Repo.clone_from(repository, "jobs") log.info("Done") else: log.info("Scaffolding a jobs repository") for d in map(lambda x: os.path.join("jobs", x), job_states()): make_empty_git_dir(d) log.info('''Done, please run: $ cd jobs $ git init $ git add . $ git commit -m 'initial commit' $ git remote origin add your-bare-repository $ git push origin master''')
def move_job(filename, source_state, destination_state): log.info(f"Moving {filename} from {source_state} to {destination_state}") try: repo = git.Repo("jobs") repo.index.move([ os.path.join(source_state, filename), os.path.join(destination_state, filename) ]) log.info(f"Committing state transition for {filename}") commit = repo.index.commit( f"{filename} transitioned from {source_state} to {destination_state}" ) log.info(f"Pushing commit {commit} for {filename}") origin = repo.remote(name='origin') origin.push() log.info(f"Successfully moved {filename}") except Exception as e: log.error(f"Failed to move {filename}") log.error(e) # Pull the latest. In case of remote changes, next operation should re-push changes repo.remotes.origin.pull()