def add_job_to_queue(id): with LocalSession() as session: job = session.query(SavedJobs).filter_by(id=id).first() job_class = plugin_tools.get_engine_class(job.engine_name, job.engine_class) process_object = job_class() job_object = job_tools.JobObject(name=job.name, process=process_object, engine=job.engine_name) job_queue.add(job=job_object)
def add_backup_set(data): with LocalSession() as session: if data['type'] == BackupSetTypes.BS_TYPE_FILESFOLDERS: json_object = json.loads(data['backup_object_data']['file_list']) backup_object_list = json_object['file_list'] display_state = json.dumps(json_object['state']) else: raise Exception(f"Unsupported backup set {data['type']}") backup_set = ( BackupSet( name=data['name'], type=data['type'], source=data['source'], data=display_state ) ) print(display_state) session.add(backup_set) session.commit() for backup_object in backup_object_list: new_backup_object = BackupObject( data=backup_object, backup_set_id=backup_set.id) session.add(new_backup_object) session.commit()
def get_backup_set_info(id): with LocalSession() as session: backup_set = session.query(BackupSet).filter_by(id=id).first() set_item_list = session.query(BackupObject).filter_by(backup_set_id=id) set_item_list_data = [] for item in set_item_list: set_item_list_data.append(item.data) if backup_set: info_dict = dict( id=backup_set.id, name=backup_set.name, source=backup_set.source, type_name=BackupSetList.BACKUP_SETS[backup_set.type], data=backup_set.data, type=backup_set.type, time_added=backup_set.time_added ) else: info_dict = dict( id="UNDEFINED", name="UNDEFINED", source="UNDEFINED", type_name="UNDEFINED", type="UNDEFINED", time_added="UNDEFINED" ) return info_dict, set_item_list_data
def get_engine_repositories(engine_name): repository_list = [] with LocalSession() as session: repositories = session.query(Repository).filter_by(engine=engine_name) for repository in repositories: repository_list.append((repository.id, repository.name)) return repository_list
def set_location_type(id, info): with LocalSession() as session: type = session.query(PhysicalLocationType).filter_by(id=id).first() if type: type.name = info.get('name') type.type = info.get('subtype') type.description = info.get('description') session.commit()
def update_job_times(id, info): with LocalSession() as session: job = session.query(SavedJobs).filter_by(id=id).first() if info.get('last_attempted_run'): job.last_attempted_run = info['last_attempted_run'] if info.get('last_successful_run'): job.last_successful_run = info['last_successful_run'] session.commit()
def validate_ub_name(self, ub_name): with LocalSession() as session: repository = session.query(Repository).filter_by( name=ub_name.data).first() if repository and repository.id != int(self.repository_id.data): raise ValidationError( f"Location with name {ub_name.data} already exists. Please pick a different name." )
def validate_name(self, name): with LocalSession() as session: location = session.query(PhysicalLocation).filter_by( name=name.data).first() if location and location.id != int(self.location_type_id.data): raise ValidationError( f"Location with name {name.data} already exists. Please pick a different name." )
def get_backup_sets_tuple(): with LocalSession() as session: backup_sets = session.query(BackupSet) return_list = [] for backup_set in backup_sets: return_list.append( (backup_set.id, backup_set.name) ) return return_list
def set_location_info(id, info): with LocalSession() as session: location = session.query(PhysicalLocation).filter_by(id=id).first() if location: location.name = info.get('name') location.address = info.get('address') location.type = info.get('type') location.concurrent_jobs = info.get('concurrent_jobs') session.commit()
def get_jobs(type=None): jobs = [] with LocalSession() as session: if not type: job_list = session.query(JobHistory) else: job_list = session.query(JobHistory).filter_by(type=type) for job in job_list: jobs.append(job) return jobs
def add_repository(info): with LocalSession() as session: repository = Repository( name=info['name'], description=info.get('description'), data=info['data'], engine=info['engine'], physical_location_id=info['physical_location_id']) session.add(repository) session.commit()
def add_location_type(info): with LocalSession() as session: type = ( PhysicalLocationType( name=info['name'], subtype=info['subtype'], description=info.get('description') ) ) session.add(type) session.commit()
def add_job(info): with LocalSession() as session: jobs = SavedJobs( name=info['name'], notes=info.get('notes'), engine_name=info['engine-name'], engine_class=info['engine-class'], params=info.get('params') ) session.add(jobs) session.commit()
def get_location_type(id): return_dict = None with LocalSession() as session: type = session.query(PhysicalLocationType).filter_by(id=id).first() if type: return_dict = dict( id=type.id, name=type.name, subtype=type.subtype, description=type.description ) return return_dict
def get_info(id): info_dict = {} with LocalSession() as session: repository = session.query(Repository).filter_by(id=id).first() info_dict = dict( name=repository.name, description=repository.description, data=repository.data, engine=repository.engine, physical_location_id=repository.physical_location_id, physical_location_name=repository.physical_location.name) return info_dict
def get_location_types(): return_list = [] with LocalSession() as session: types = session.query(PhysicalLocationType).order_by(PhysicalLocationType.name.desc()) for type in types: return_list.append(dict( id=type.id, name=type.name, subtype=type.subtype, description=type.description )) return return_list
def add_location(info): with LocalSession() as session: location = ( PhysicalLocation( name=info.get('name'), address=info.get('address'), type=info.get('type'), concurrent_jobs=info.get('concurrent_jobs') ) ) session.add(location) session.commit()
def get_backup_sets(): with LocalSession() as session: backup_sets = session.query(BackupSet) return_list = [] for backup_set in backup_sets: return_list.append( dict( id=backup_set.id, name=backup_set.name, type=BackupSetList.BACKUP_SETS[backup_set.type] ) ) return return_list
def add_job(info): with LocalSession() as session: jobs = SavedJobs(name=info['name'], notes=info.get('notes'), engine_name=info['engine_name'], engine_class=info['engine_class']) session.add(jobs) session.commit() for key, value in info.get('params').items(): parameter = JobParameter(param_name=key, param_value=value, job_id=jobs.id) session.add(parameter) session.commit()
def get_job_info(id): with LocalSession() as session: job = session.query(SavedJobs).filter_by(id=id).first() if job: info_dict = dict( name=job.name, notes=job.notes, engine_name=job.engine_name, engine_class=job.engine_class, params=job.params, last_attempted_run=job.last_attempted_run, last_successful_run=job.last_successful_run, time_added=job.time_added ) return info_dict
def get_location_status(id): with LocalSession() as session: physical_location = session.query(PhysicalLocation).filter_by(id=id).first() if physical_location: # physical_location_type = session.query(PhysicalLocationType).filter_by(id=physical_location.type).first() address = physical_location.address type = physical_location.physical_location_type.name else: address = None type = None if type == 'localhost': return 'Online' if os.path.exists(address) and os.path.isdir(address) else 'Offline' else: return 'Unknown Type'
def get_physical_locations(get_status=False): with LocalSession() as session: physical_locations = session.query(PhysicalLocation) return_list = [] for location in physical_locations: return_list.append( dict( id=location.id, name=location.name, address=location.address, type=location.type, concurent_jobs=location.concurrent_jobs, status=get_location_status(location.id) if get_status else '' ) ) return return_list
def get_location_info(id): with LocalSession() as session: location = session.query(PhysicalLocation).filter_by(id=id).first() if location: info_dict = dict( id=location.id, name=location.name, address=location.address, type=location.type, concurrent_jobs=location.concurrent_jobs ) else: info_dict = dict( id=-1, name="UNDEFINED", address="UNDEFINED", type="UNDEFINED", concurrent_jobs=0) return info_dict
def init_physical_location(name, address, type, concurrent_jobs=1): # the location where the ub folder will go ub_directory = os.path.join(address, PL.UB_FOLDER_NAME) # we check if a folder already exists for ub usage if os.path.exists(ub_directory): if not os.path.isdir(ub_directory): raise Exception( f"A file named {PL.UB_FOLDER_NAME} " "exists, but it's not a folder.") # create a new folder in case one does not already exist else: try: os.mkdir(ub_directory) except Exception: raise Exception("Failed to initialize UB directory") physical_location = PhysicalLocation( name=name, type=type, address=ub_directory, concurrent_jobs=concurrent_jobs) with LocalSession() as session: session.add(physical_location) session.commit()
def delete_repositories(ids): with LocalSession() as session: for id in ids: session.query(Repository).filter_by(id=id).delete() session.commit()
def delete_backup_sets(ids): with LocalSession() as session: for id in ids: session.query(BackupSet).filter_by(id=id).delete() session.query(BackupObject).filter_by(backup_set_id=id).delete() session.commit()
def delete_backup_set(id): with LocalSession() as session: session.query(BackupSet).filter_by(id=id).delete() session.commit()
def init_repository(engine, name): with LocalSession() as session: pass
def delete_jobs(ids): with LocalSession() as session: for id in ids: session.query(JobHistory).filter_by(id=id).delete() session.commit()