def create_job(self, **kwargs): ''' Create a new job based on this job. ''' eager_fields = kwargs.pop('_eager_fields', None) job_class = self.__class__ fields = self._get_job_field_names() unallowed_fields = set(kwargs.keys()) - set(fields) if unallowed_fields: logger.warn('Fields {} are not allowed as overrides.'.format( unallowed_fields)) map(kwargs.pop, unallowed_fields) job = copy_model_by_class(self, job_class, fields, kwargs) if eager_fields: for fd, val in eager_fields.items(): setattr(job, fd, val) # Set the job back-link on the job parent_field_name = job_class._get_parent_field_name() setattr(job, parent_field_name, self) job.save() from cyborgbackup.main.signals import disable_activity_stream with disable_activity_stream(): copy_m2m_relationships(self, job, fields, kwargs=kwargs) job.create_config_from_prompts(kwargs) return job
def create_prepare_repository(self, **kwargs): ''' Create a new prepare job for this repository. ''' eager_fields = kwargs.pop('_eager_fields', None) job_class = self._get_job_class() fields = ('extra_vars', 'job_type') unallowed_fields = set(kwargs.keys()) - set(fields) if unallowed_fields: logger.warn('Fields {} are not allowed as overrides.'.format( unallowed_fields)) map(kwargs.pop, unallowed_fields) job = copy_model_by_class(self, job_class, fields, kwargs) if eager_fields: for fd, val in eager_fields.items(): setattr(job, fd, val) # Set the job back-link on the job job.repository_id = self.pk job.name = "Prepare Repository {}".format(self.name) job.description = "Repository {} Borg Preparation".format(self.name) job.save() from cyborgbackup.main.signals import disable_activity_stream fields = () with disable_activity_stream(): copy_m2m_relationships(self, job, fields, kwargs=kwargs) return job
def copy_job(self, limit=None): ''' Returns saved object, including related fields. Create a copy of this unified job for the purpose of relaunch ''' job_class = self.__class__ parent_field_name = 'job' fields = job_class._get_job_field_names() | set([parent_field_name]) create_data = {"launch_type": "relaunch"} if limit: create_data["limit"] = limit copy_job = copy_model_by_class(self, job_class, fields, {}) for fd, val in create_data.items(): setattr(copy_job, fd, val) copy_job.old_pk = self.pk copy_job.save() # Labels coppied here copy_m2m_relationships(self, copy_job, fields) return copy_job
def create_restore_job(self, source_job, **kwargs): job_class = self._get_job_class() fields = ('extra_vars', ) unallowed_fields = set(kwargs.keys()) - set(fields) if unallowed_fields: logger.warn('Fields {} are not allowed as overrides.'.format( unallowed_fields)) map(kwargs.pop, unallowed_fields) job = copy_model_by_class(self, job_class, fields, kwargs) job.launch_type = 'manual' job.job_type = 'restore' job.policy_id = self.pk job.client_id = source_job.client.pk job.archive_name = source_job.archive_name job.status = 'waiting' job.name = "Restore Job {} {}".format(self.name, source_job.client.hostname) job.description = "Restore Job for Policy {} of client {}".format( self.name, source_job.client.hostname) job.save() return job
def create_job(self, **kwargs): ''' Create a new job based on this policy. ''' job_class = self._get_job_class() fields = ('extra_vars', 'job_type') unallowed_fields = set(kwargs.keys()) - set(fields) if unallowed_fields: logger.warn('Fields {} are not allowed as overrides.'.format( unallowed_fields)) map(kwargs.pop, unallowed_fields) try: setting = Setting.objects.get(key='cyborgbackup_catalog_enabled') if setting.value == 'True': catalog_enabled = True else: catalog_enabled = False except Exception: catalog_enabled = True try: setting = Setting.objects.get(key='cyborgbackup_auto_prune') if setting.value == 'True': auto_prune_enabled = True else: auto_prune_enabled = False except Exception: auto_prune_enabled = True app.send_task('cyborgbackup.main.tasks.cyborgbackup_notifier', args=('summary', self.pk)) have_prune_info = (self.keep_hourly or self.keep_daily or self.keep_weekly or self.keep_monthly or self.keep_yearly) jobs = [] previous_job = None for client in self.clients.all(): job = copy_model_by_class(self, job_class, fields, kwargs) job.policy_id = self.pk job.client_id = client.pk job.status = 'waiting' job.name = "Backup Job {} {}".format(self.name, client.hostname) job.description = "Backup Job for Policy {} of client {}".format( self.name, client.hostname) job.save() if catalog_enabled: catalog_job = copy_model_by_class(self, job_class, fields, kwargs) catalog_job.policy_id = self.pk catalog_job.client_id = client.pk catalog_job.status = 'waiting' catalog_job.job_type = 'catalog' catalog_job.name = "Catalog Job {} {}".format( self.name, client.hostname) catalog_job.description = "Catalog Job for Policy {} of client {}".format( self.name, client.hostname) catalog_job.save() job.dependent_jobs = catalog_job job.save() if auto_prune_enabled: if have_prune_info: prune_job = copy_model_by_class(self, job_class, fields, kwargs) prune_job.policy_id = self.pk prune_job.client_id = client.pk prune_job.status = 'waiting' prune_job.job_type = 'prune' prune_job.name = "Prune Job {} {}".format( self.name, client.hostname) prune_job.description = "Prune Job for Policy {} of client {}".format( self.name, client.hostname) prune_job.save() if catalog_enabled: catalog_job.dependent_jobs = prune_job catalog_job.save() else: job.dependent_jobs = prune_job job.save() if auto_prune_enabled: if have_prune_info: if previous_job: previous_job.dependent_jobs = prune_job previous_job.save() previous_job = prune_job elif catalog_enabled: if previous_job: previous_job.dependent_jobs = catalog_job previous_job.save() previous_job = catalog_job else: if previous_job: previous_job.dependent_jobs = job previous_job.save() previous_job = job jobs.append(job) jobs[0].status = 'new' jobs[0].save() return jobs[0]