class Dagobah(object): """ Top-level controller for all Dagobah usage. This is in control of all the jobs for a specific Dagobah instance, as well as top-level parameters such as the backend used for permanent storage. """ def __init__(self, backend=BaseBackend(), event_handler=None): """ Construct a new Dagobah instance with a specified Backend. """ self.backend = backend self.event_handler = event_handler self.dagobah_id = self.backend.get_new_dagobah_id() self.jobs = [] self.hosts = [] self.created_jobs = 0 self.scheduler = Scheduler(self) self.scheduler.daemon = True self.scheduler.start() self.commit() def __repr__(self): return "<Dagobah with Backend %s>" % self.backend def set_backend(self, backend): """ Manually set backend after construction. """ self.backend = backend self.dagobah_id = self.backend.get_new_dagobah_id() for job in self.jobs: job.backend = backend for task in job.tasks.values(): task.backend = backend for host in self.hosts: host.backend = backend self.commit(cascade=True) def from_backend(self, dagobah_id): """ Reconstruct this Dagobah instance from the backend. """ rec = self.backend.get_dagobah_json(dagobah_id) if not rec: raise DagobahError("dagobah with id %s does not exist " "in backend" % dagobah_id) self._construct_from_json(rec) def _construct_from_json(self, rec): """ Construct this Dagobah instance from a JSON document. """ self.delete() for required_key in ["dagobah_id", "created_jobs"]: setattr(self, required_key, rec[required_key]) for job_json in rec.get("jobs", []): self._add_job_from_spec(job_json) for host_json in rec.get("hosts", []): self.add_host(host_name=host_json["host_name"], host_id=host_json["host_id"]) self.commit(cascade=True) def add_job_from_json(self, job_json, destructive=False): """ Construct a new Job from an imported JSON spec. """ rec = self.backend.decode_import_json(job_json) if destructive: try: self.delete_job(rec["name"]) except DagobahError: # expected if no job with this name pass self._add_job_from_spec(rec, use_job_id=False) self.commit(cascade=True) def _add_job_from_spec(self, job_json, use_job_id=True): """ Add a single job to the Dagobah from a spec. """ job_id = job_json["job_id"] if use_job_id else self.backend.get_new_job_id() self.add_job(str(job_json["name"]), job_id) job = self.get_job(job_json["name"]) if job_json.get("cron_schedule", None): job.schedule(job_json["cron_schedule"]) for task in job_json.get("tasks", []): self.add_task_to_job( job, str(task["command"]), str(task["name"]), soft_timeout=task.get("soft_timeout", 0), hard_timeout=task.get("hard_timeout", 0), host_id=task.get("host_id", None), ) dependencies = job_json.get("dependencies", {}) for from_node, to_nodes in dependencies.iteritems(): for to_node in to_nodes: job.add_dependency(from_node, to_node) def commit(self, cascade=False): """ Commit this Dagobah instance to the backend. If cascade is True, all child Jobs are commited as well. """ self.backend.commit_dagobah(self._serialize()) if cascade: [job.commit() for job in self.jobs] def delete(self): """ Delete this Dagobah instance from the Backend. """ self.jobs = [] self.hosts = [] self.created_jobs = 0 self.backend.delete_dagobah(self.dagobah_id) def add_job(self, job_name, job_id=None): """ Create a new, empty Job. """ if not self._name_is_available(job_name): raise DagobahError("name %s is not available" % job_name) if not job_id: job_id = self.backend.get_new_job_id() self.created_jobs += 1 self.jobs.append(Job(self, self.backend, job_id, job_name)) job = self.get_job(job_name) job.commit() def get_job(self, job_name): """ Returns a Job by name, or None if none exists. """ for job in self.jobs: if job.name == job_name: return job return None def get_host(self, host_id): """ Returns a Host by name, or None if none exists. """ for host in self.hosts: if host.id == host_id: return host return None def delete_job(self, job_name): """ Delete a job by name, or error out if no such job exists. """ for idx, job in enumerate(self.jobs): if job.name == job_name: self.backend.delete_job(job.job_id) del self.jobs[idx] self.commit() return raise DagobahError("no job with name %s exists" % job_name) def add_task_to_job(self, job_or_job_name, task_command, task_name=None, **kwargs): """ Add a task to a job owned by the Dagobah instance. """ if isinstance(job_or_job_name, Job): job = job_or_job_name else: job = self.get_job(job_or_job_name) if not job: raise DagobahError("job %s does not exist" % job_or_job_name) if not job.state.allow_change_graph: raise DagobahError("job's graph is immutable in its current state: %s" % job.state.status) job.add_task(task_command, task_name, **kwargs) job.commit() def add_host(self, host_name, host_id=None): """ Add a new host """ if not self._host_is_added(host_name=host_name): raise DagobahError("Host %s is already added." % host_name) if not host_id: host_id = self.backend.get_new_host_id() self.hosts.append(Host(self, self.backend, host_id, host_name)) host = self.get_host(host_id) host.commit() def delete_host(self, host_name): """ Delete a host """ for idx, host in enumerate(self.hosts): if host.name == host_name: self.backend.delete_host(host.id) del self.hosts[idx] self.commit() return raise DagobahError("no host with name %s exists" % host_name) def _host_is_added(self, host_name=None): """ Returns Boolean of whether the specified host is already added. """ return False if [host for host in self.hosts if host.name == host_name] else True def _name_is_available(self, job_name): """ Returns Boolean of whether the specified name is already in use. """ return False if [job for job in self.jobs if job.name == job_name] else True def _serialize(self, include_run_logs=False, strict_json=False): """ Serialize a representation of this Dagobah object to JSON. """ result = { "dagobah_id": self.dagobah_id, "created_jobs": self.created_jobs, "jobs": [job._serialize(include_run_logs=include_run_logs, strict_json=strict_json) for job in self.jobs], "hosts": [host._serialize() for host in self.hosts], } if strict_json: result = json.loads(json.dumps(result, cls=StrictJSONEncoder)) return result
class Dagobah(object): """ Top-level controller for all Dagobah usage. This is in control of all the jobs for a specific Dagobah instance, as well as top-level parameters such as the backend used for permanent storage. """ def __init__(self, backend=BaseBackend(), event_handler=None): """ Construct a new Dagobah instance with a specified Backend. """ self.backend = backend self.event_handler = event_handler self.dagobah_id = self.backend.get_new_dagobah_id() self.jobs = [] self.created_jobs = 0 self.scheduler = Scheduler(self) self.scheduler.daemon = True self.scheduler.start() self.commit() def __repr__(self): return '<Dagobah with Backend %s>' % self.backend def from_backend(self, dagobah_id): """ Reconstruct this Dagobah instance from the backend. """ rec = self.backend.get_dagobah_json(dagobah_id) if not rec: raise DagobahError('dagobah with id %s does not exist in backend' % dagobah_id) # delete current version of this Dagobah instance self.delete() for required_key in ['dagobah_id', 'created_jobs']: setattr(self, required_key, rec[required_key]) for job_json in rec.get('jobs', []): self.add_job(str(job_json['name']), job_json['job_id']) job = self.get_job(job_json['name']) if job_json.get('cron_schedule', None): job.schedule(job_json['cron_schedule']) for task in job_json.get('tasks', []): self.add_task_to_job(job, str(task['command']), str(task['name'])) dependencies = job_json.get('dependencies', {}) for from_node, to_nodes in dependencies.iteritems(): for to_node in to_nodes: job.add_dependency(from_node, to_node) self.commit(cascade=True) def commit(self, cascade=False): """ Commit this Dagobah instance to the backend. If cascade is True, all child Jobs are commited as well. """ self.backend.commit_dagobah(self._serialize()) if cascade: [job.commit() for job in self.jobs] def delete(self): """ Delete this Dagobah instance from the Backend. """ self.jobs = [] self.created_jobs = 0 self.backend.delete_dagobah(self.dagobah_id) def add_job(self, job_name, job_id=None): """ Create a new, empty Job. """ if not self._name_is_available(job_name): raise DagobahError('name %s is not available' % job_name) if not job_id: job_id = self.backend.get_new_job_id() self.created_jobs += 1 self.jobs.append(Job(self, self.backend, job_id, job_name)) job = self.get_job(job_name) job.commit() def get_job(self, job_name): """ Returns a Job by name, or None if none exists. """ for job in self.jobs: if job.name == job_name: return job return None def delete_job(self, job_name): """ Delete a job by name, or error out if no such job exists. """ for idx, job in enumerate(self.jobs): if job.name == job_name: self.backend.delete_job(job.job_id) del self.jobs[idx] return raise DagobahError('no job with name %s exists' % job_name) def add_task_to_job(self, job_or_job_name, task_command, task_name=None): """ Add a task to a job owned by the Dagobah instance. """ if isinstance(job_or_job_name, Job): job = job_or_job_name else: job = self.get_job(job_or_job_name) if not job: raise DagobahError('job %s does not exist' % job_or_job_name) if not job.state.allow_change_graph: raise DagobahError("job's graph is immutable in its current state: %s" % job.state.status) job.add_task(task_command, task_name) job.commit() def _name_is_available(self, job_name): """ Returns Boolean of whether the specified name is already in use. """ return (False if [job for job in self.jobs if job.name == job_name] else True) def _serialize(self, include_run_logs=False): """ Serialize a representation of this Dagobah object to JSON. """ return {'dagobah_id': self.dagobah_id, 'created_jobs': self.created_jobs, 'jobs': [job._serialize(include_run_logs=include_run_logs) for job in self.jobs]}
class Dagobah(object): """ Top-level controller for all Dagobah usage. This is in control of all the jobs for a specific Dagobah instance, as well as top-level parameters such as the backend used for permanent storage. """ def __init__(self, backend=BaseBackend(), event_handler=None): """ Construct a new Dagobah instance with a specified Backend. """ self.backend = backend self.event_handler = event_handler self.dagobah_id = self.backend.get_new_dagobah_id() self.jobs = [] self.created_jobs = 0 self.scheduler = Scheduler(self) self.scheduler.daemon = True self.scheduler.start() self.commit() def __repr__(self): return '<Dagobah with Backend %s>' % self.backend def set_backend(self, backend): """ Manually set backend after construction. """ self.backend = backend self.dagobah_id = self.backend.get_new_dagobah_id() for job in self.jobs: job.backend = backend for task in job.tasks.values(): task.backend = backend self.commit(cascade=True) def from_backend(self, dagobah_id): """ Reconstruct this Dagobah instance from the backend. """ rec = self.backend.get_dagobah_json(dagobah_id) if not rec: raise DagobahError('dagobah with id %s does not exist ' 'in backend' % dagobah_id) self._construct_from_json(rec) def _construct_from_json(self, rec): """ Construct this Dagobah instance from a JSON document. """ self.delete() for required_key in ['dagobah_id', 'created_jobs']: setattr(self, required_key, rec[required_key]) for job_json in rec.get('jobs', []): self._add_job_from_spec(job_json) self.commit(cascade=True) def add_job_from_json(self, job_json, destructive=False): """ Construct a new Job from an imported JSON spec. """ rec = self.backend.decode_import_json(job_json) if destructive: try: self.delete_job(rec['name']) except DagobahError: # expected if no job with this name pass self._add_job_from_spec(rec, use_job_id=False) def _add_job_from_spec(self, job_json, use_job_id=True): """ Add a single job to the Dagobah from a spec. """ job_id = (job_json['job_id'] if use_job_id else self.backend.get_new_job_id()) self.add_job(str(job_json['name']), job_id) job = self.get_job(job_json['name']) if job_json.get('cron_schedule', None): job.schedule(job_json['cron_schedule']) for task in job_json.get('tasks', []): self.add_task_to_job(job, str(task['command']), str(task['name']), soft_timeout=task.get('soft_timeout', 0), hard_timeout=task.get('hard_timeout', 0)) dependencies = job_json.get('dependencies', {}) for from_node, to_nodes in dependencies.iteritems(): for to_node in to_nodes: job.add_dependency(from_node, to_node) def commit(self, cascade=False): """ Commit this Dagobah instance to the backend. If cascade is True, all child Jobs are commited as well. """ self.backend.commit_dagobah(self._serialize()) if cascade: [job.commit() for job in self.jobs] def delete(self): """ Delete this Dagobah instance from the Backend. """ self.jobs = [] self.created_jobs = 0 self.backend.delete_dagobah(self.dagobah_id) def add_job(self, job_name, job_id=None): """ Create a new, empty Job. """ if not self._name_is_available(job_name): raise DagobahError('name %s is not available' % job_name) if not job_id: job_id = self.backend.get_new_job_id() self.created_jobs += 1 self.jobs.append(Job(self, self.backend, job_id, job_name)) job = self.get_job(job_name) job.commit() def get_job(self, job_name): """ Returns a Job by name, or None if none exists. """ for job in self.jobs: if job.name == job_name: return job return None def delete_job(self, job_name): """ Delete a job by name, or error out if no such job exists. """ for idx, job in enumerate(self.jobs): if job.name == job_name: self.backend.delete_job(job.job_id) del self.jobs[idx] self.commit() return raise DagobahError('no job with name %s exists' % job_name) def add_task_to_job(self, job_or_job_name, task_command, task_name=None, **kwargs): """ Add a task to a job owned by the Dagobah instance. """ if isinstance(job_or_job_name, Job): job = job_or_job_name else: job = self.get_job(job_or_job_name) if not job: raise DagobahError('job %s does not exist' % job_or_job_name) if not job.state.allow_change_graph: raise DagobahError( "job's graph is immutable in its current state: %s" % job.state.status) job.add_task(task_command, task_name, **kwargs) job.commit() def _name_is_available(self, job_name): """ Returns Boolean of whether the specified name is already in use. """ return (False if [job for job in self.jobs if job.name == job_name] else True) def _serialize(self, include_run_logs=False, strict_json=False): """ Serialize a representation of this Dagobah object to JSON. """ result = { 'dagobah_id': self.dagobah_id, 'created_jobs': self.created_jobs, 'jobs': [ job._serialize(include_run_logs=include_run_logs, strict_json=strict_json) for job in self.jobs ] } if strict_json: result = json.loads(json.dumps(result, cls=StrictJSONEncoder)) return result
class Dagobah(object): """ Top-level controller for all Dagobah usage. This is in control of all the jobs for a specific Dagobah instance, as well as top-level parameters such as the backend used for permanent storage. """ def __init__(self, backend=BaseBackend(), event_handler=None): """ Construct a new Dagobah instance with a specified Backend. """ self.backend = backend self.event_handler = event_handler self.dagobah_id = self.backend.get_new_dagobah_id() self.jobs = [] self.created_jobs = 0 self.scheduler = Scheduler(self) self.scheduler.daemon = True self.scheduler.start() self.commit() def __repr__(self): return '<Dagobah with Backend %s>' % self.backend def from_backend(self, dagobah_id): """ Reconstruct this Dagobah instance from the backend. """ rec = self.backend.get_dagobah_json(dagobah_id) if not rec: raise DagobahError('dagobah with id %s does not exist in backend' % dagobah_id) # delete current version of this Dagobah instance self.delete() for required_key in ['dagobah_id', 'created_jobs']: setattr(self, required_key, rec[required_key]) for job_json in rec.get('jobs', []): self.add_job(str(job_json['name']), job_json['job_id']) job = self.get_job(job_json['name']) if job_json.get('cron_schedule', None): job.schedule(job_json['cron_schedule']) for task in job_json.get('tasks', []): self.add_task_to_job(job, str(task['command']), str(task['name'])) dependencies = job_json.get('dependencies', {}) for from_node, to_nodes in dependencies.iteritems(): for to_node in to_nodes: job.add_dependency(from_node, to_node) self.commit(cascade=True) def commit(self, cascade=False): """ Commit this Dagobah instance to the backend. If cascade is True, all child Jobs are commited as well. """ self.backend.commit_dagobah(self._serialize()) if cascade: [job.commit() for job in self.jobs] def delete(self): """ Delete this Dagobah instance from the Backend. """ self.jobs = [] self.created_jobs = 0 self.backend.delete_dagobah(self.dagobah_id) def add_job(self, job_name, job_id=None): """ Create a new, empty Job. """ if not self._name_is_available(job_name): raise DagobahError('name %s is not available' % job_name) if not job_id: job_id = self.backend.get_new_job_id() self.created_jobs += 1 self.jobs.append(Job(self, self.backend, job_id, job_name)) job = self.get_job(job_name) job.commit() def get_job(self, job_name): """ Returns a Job by name, or None if none exists. """ for job in self.jobs: if job.name == job_name: return job return None def delete_job(self, job_name): """ Delete a job by name, or error out if no such job exists. """ for idx, job in enumerate(self.jobs): if job.name == job_name: self.backend.delete_job(job.job_id) del self.jobs[idx] return raise DagobahError('no job with name %s exists' % job_name) def add_task_to_job(self, job_or_job_name, task_command, task_name=None): """ Add a task to a job owned by the Dagobah instance. """ if isinstance(job_or_job_name, Job): job = job_or_job_name else: job = self.get_job(job_or_job_name) if not job: raise DagobahError('job %s does not exist' % job_or_job_name) if not job.state.allow_change_graph: raise DagobahError( "job's graph is immutable in its current state: %s" % job.state.status) job.add_task(task_command, task_name) job.commit() def _name_is_available(self, job_name): """ Returns Boolean of whether the specified name is already in use. """ return (False if [job for job in self.jobs if job.name == job_name] else True) def _serialize(self, include_run_logs=False): """ Serialize a representation of this Dagobah object to JSON. """ return { 'dagobah_id': self.dagobah_id, 'created_jobs': self.created_jobs, 'jobs': [ job._serialize(include_run_logs=include_run_logs) for job in self.jobs ] }