def get_engine_repositories(): repository_list = [] with LocalSession() as session: repositories = session.query(Repository).filter_by() for repository in repositories: repository_list.append((repository.id, repository.name)) return repository_list
def insert_snapshots(items, repo_id): with LocalSession() as session: for item in items: item['snap_id'] = item.pop('id') item['snap_short_id'] = item.pop('short_id') item['snap_time'] = item.pop('time') if item['snap_time']: main_time = item['snap_time'][:-7] extra = item['snap_time'][-6:] main_time = main_time + extra # item['snap_time'] = datetime.strptime(main_time, "%Y-%m-%dT%H:%M:%S.%f%z") item['snap_time'] = parser.parse(main_time) new_snapshot = Snapshot( snap_id=item.get('snap_id'), snap_short_id=item.get('snap_short_id'), snap_time=item.get('snap_time'), hostname=item.get('hostname'), username=item.get('username'), tree=item.get('tree'), repository_id=repo_id, paths=json.dumps(item.get('paths')), tags=json.dumps(item.get('tags')) ) session.add(new_snapshot) session.commit()
def update_repository(info, repo_id, sync_db=False, unsync_db=False): with LocalSession() as session: repository = session.query(Repository).filter_by(id=repo_id).first() if repository.name != info['name']: credential_manager.set_service_id(repository.credential_group_id, info['name']) repository.name = info['name'] repository.description = info.get('description') repository.address = info['address'] repository.cache_repo = info['cache_repo'] repository.concurrent_uses = info['concurrent_uses'] repository.timeout = info['timeout'] repository.parameters = json.dumps(info['parameters']) session.commit() from resticweb.tools.job_build import JobBuilder if sync_db: job_builder = JobBuilder(job_name=f"Sync repo {repository.name}", job_class='repository_sync', parameters=dict(repository=repository.id, sync_type='full')) job_builder.run_job() if unsync_db: ''' for snapshot in repository.snapshots: snapshot.snapshot_objects = [] session.commit() ''' job_builder = JobBuilder(job_name=f'Clear db from repo {repository.name}', job_class='clear_snapshot_objects', parameters=dict(repo_id=repository.id)) job_builder.run_job() return repo_id
def get_repository_password(id): with LocalSession() as session: repository = session.query(Repository).filter_by(id=id).first() if repository: return credential_manager.get_credential(repository.credential_group_id, "repo_password") else: return None
def add_backup_set(data): with LocalSession() as session: if data['type'] == BackupSetTypes.BS_TYPE_FILESFOLDERS: json_object = json.loads(data['backup_object_data']['file_data']) backup_object_list = json_object['file_list'] display_state = json.dumps(json_object['state']) else: raise Exception(f"Unsupported backup set {data['type']}") backup_set = (BackupSet(name=data['name'], type=data['type'], source=data['source'], concurrent_uses=data['concurrent_uses'], timeout=data['timeout'], data=display_state)) session.add(backup_set) session.commit() if platform.system() == 'Windows': remove_trailing_slash = True else: remove_trailing_slash = False for backup_object in backup_object_list: if remove_trailing_slash: backup_object = backup_object[:1] + backup_object[2:] new_backup_object = BackupObject(data=backup_object, backup_set_id=backup_set.id) session.add(new_backup_object) session.commit()
def get_backup_set_info(id, include_backup_objects=True): with LocalSession() as session: backup_set = session.query(BackupSet).filter_by(id=id).first() if include_backup_objects: set_item_list = session.query(BackupObject).filter_by( backup_set_id=id) set_item_list_data = [] for item in set_item_list: set_item_list_data.append(item.data) if backup_set: info_dict = dict( id=backup_set.id, name=backup_set.name, source=backup_set.source, type_name=BackupSetList.BACKUP_SETS[backup_set.type], data=backup_set.data, type=backup_set.type, time_added=backup_set.time_added, concurrent_uses=backup_set.concurrent_uses, timeout=backup_set.timeout) else: info_dict = dict(id="UNDEFINED", name="UNDEFINED", source="UNDEFINED", type_name="UNDEFINED", type="UNDEFINED", time_added="UNDEFINED", concurrent_uses=0, timeout=0) if include_backup_objects: return info_dict, set_item_list_data else: return info_dict
def get_repository_address(id): with LocalSession() as session: repository = session.query(Repository).filter_by(id=id).first() if repository: return repository.address else: return None
def get_backup_sets_tuple(): with LocalSession() as session: backup_sets = session.query(BackupSet) return_list = [] for backup_set in backup_sets: return_list.append((backup_set.id, backup_set.name)) return return_list
def get_snapshot_objects(snap_id, use_cache=False): with LocalSession() as session: snapshot = session.query(Snapshot).filter_by(snap_id=snap_id).first() repository = session.query(Repository).filter_by(id=snapshot.repository_id).first() repository_interface = get_formatted_repository_interface_from_id(snapshot.repository_id) if not use_cache and repository_interface.is_online(): # if the repo is online, we can purge the snapshots from db as we will # just re-add them fresh from the actual repo object_list = repository_interface.get_snapshot_ls(snap_id) # if repository.cache_repo: # sync_snapshot_objects(repository.id, snap_id, repository_interface=repository_interface) return object_list else: with LocalSession() as session: snapshot_object_list = session.query(SnapshotObject).filter_by(snapshot_id=snap_id).all() snapshot_dict_list = [snapshot_object.to_dict() for snapshot_object in snapshot_object_list] return snapshot_dict_list
def update_job_times(id, info): with LocalSession() as session: job = session.query(SavedJobs).filter_by(id=id).first() if info.get('last_attempted_run'): job.last_attempted_run = info['last_attempted_run'] if info.get('last_successful_run'): job.last_successful_run = info['last_successful_run'] session.commit()
def validate_name(self, name): with LocalSession() as session: saved_job = session.query(SavedJobs).filter_by( name=name.data).first() if saved_job: raise ValidationError( f"There already exists a job with name '{name.data}'. Please pick a different name." )
def validate_name(self, name): with LocalSession() as session: repository_type = session.query(RepositoryType).filter_by( name=name.data).first() if repository_type: raise ValidationError( f"Repository type with name {name.data} already exists. Please pick a different name." )
def validate_name(self, name): with LocalSession() as session: repository = session.query(Repository).filter_by( name=name.data).first() if repository and repository.id != int(self.repository_id.data): raise ValidationError( f"Repository with name {name.data} already exists. Please pick a different name." )
def validate_name(self, name): with LocalSession() as session: saved_job = session.query(SavedJobs).filter_by( name=name.data).first() if saved_job and saved_job.id != int(self.saved_job_id.data): raise ValidationError( f"Backup set with name {name.data} already exists. Please pick a different name." )
def get_backup_set_objects(id): with LocalSession() as session: backup_set_object_list = session.query(BackupObject).filter_by( backup_set_id=id).all() return_list = [] for backup_object in backup_set_object_list: return_list.append(backup_object.data) return return_list
def validate_name(self, name): with LocalSession() as session: schedule = session.query(Schedule).filter_by( name=name.data).first() if schedule and schedule.id != int(self.schedule_id.data): raise ValidationError( f"There already exists a schedule with name '{name.data}'. Please pick a different name." )
def validate_name(self, name): with LocalSession() as session: backup_set = session.query(BackupSet).filter_by( name=name.data).first() if backup_set and backup_set.id != int(self.backup_set_id.data): raise ValidationError( f"Backup set with name {name.data} already exists. Please pick a different name." )
def delete_record(id): with LocalSession() as session: try: record = session.query(BackupRecord).filter_by(id=id).first() session.delete(record) session.commit() except Exception as e: logger.error( f"Failed to delete Backup Record with id {id}. Reason {e}")
def delete_backup_set(id): with LocalSession() as session: temp = session.query(BackupSet).filter_by(id=id).first() job_parameters = session.query(JobParameter).filter_by( param_name='backup_set', param_value=temp.id) for param in job_parameters: param.param_value = None session.delete(temp) session.commit()
def get_snapshot(repo_id, snapshot_id, use_cache=False): repository_interface = get_formatted_repository_interface_from_id(repo_id) if not use_cache and repository_interface.is_online(): snapshot = repository_interface.get_snapshots(snapshot_id)[0] return snapshot if snapshot else {} else: with LocalSession() as session: snapshot = session.query(Snapshot).filter_by(repository_id=repo_id, snap_short_id=snapshot_id).first() return snapshot
def set_repository_type(id, info): with LocalSession() as session: type = session.query(RepositoryType).filter_by(id=id).first() if type: type.name = info.get('name') type.type = info.get('type') type.internal_binding = info.get('internal_binding') type.description = info.get('description') session.commit()
def update_schedule(info_dict): with LocalSession() as session: schedule = session.query(Schedule).filter_by(info_dict['schedule_id']).first() schedule.name=info_dict['name'], schedule.description=info_dict.get('description'), schedule.time_unit=info_dict['time_unit'], schedule.time_interval=info_dict.get('time_interval'), schedule.time_at=info_dict['time_at'], schedule.missed_timeout=info_dict['missed_timeout']
def get_backup_sets(): with LocalSession() as session: backup_sets = session.query(BackupSet) return_list = [] for backup_set in backup_sets: return_list.append( dict(id=backup_set.id, name=backup_set.name, type=BackupSetList.BACKUP_SETS[backup_set.type])) return return_list
def get_jobs(type=None): jobs = [] with LocalSession() as session: if not type: job_list = session.query(JobHistory) else: job_list = session.query(JobHistory).filter_by(type=type) for job in job_list: jobs.append(job) return jobs
def get_snapshots(id, use_cache=False): repository_interface = get_formatted_repository_interface_from_id(id) snapshots = [] if not use_cache and repository_interface.is_online(): snapshots = repository_interface.get_snapshots() return snapshots if snapshots else {} else: with LocalSession() as session: snapshots = session.query(Snapshot).filter_by(repository_id=id).all() return snapshots
def delete_record_by_snap_id(repository_id, snapshot_id): with LocalSession() as session: try: record = session.query(BackupRecord).filter_by( repository_id=repository_id, snapshot_id=snapshot_id).first() session.delete(record) session.commit() except Exception as e: logger.error( f"Failed to delete Backup Record with snapshot_id {snapshot_id}. Reason {e}" )
def delete_records_by_repo_id(repository_id): with LocalSession() as session: try: records = session.query(BackupRecord).filter_by( repository_id=repository_id).all() for record in records: session.delete(record) except Exception as e: logger.error( f"Failed to delete all Backup Records with repository_id {repository_id}. Reason {e}" )
def add_repository_type(info): with LocalSession() as session: type = ( RepositoryType( name=info['name'], type=info['type'], internal_binding=info['internal_binding'], description=info.get('description') ) ) session.add(type) session.commit()
def run(self): super().run() with LocalSession() as session: try: session.execute("VACUUM") except Exception as e: self.log(e) self.status('error') return self.log('Successfully vacuumed the database') self.status('success') return
def get_job_info(id): with LocalSession() as session: job = session.query(SavedJobs).filter_by(id=id).first() if job: info_dict = dict(name=job.name, notes=job.notes, engine_class=job.engine_class, params=job.params, last_attempted_run=job.last_attempted_run, last_successful_run=job.last_successful_run, time_added=job.time_added) return info_dict