def set_up(self): self.user = User.objects.create_user('tethys_super', '*****@*****.**', 'pass') self.scheduler = CondorScheduler( name='test_scheduler', host='localhost', username='******', password='******', ) self.scheduler.save() path = os.path.dirname(__file__) self.workspace_dir = os.path.join(path, 'workspace') self.condorjob = CondorJob( name='test condorbase', description='test_description', user=self.user, label='test_label', cluster_id='1', remote_id='test_machine', workspace=self.workspace_dir, scheduler=self.scheduler, condorpyjob_id='99', _attributes={'foo': 'bar'}, _remote_input_files=['test_file1.txt', 'test_file2.txt'], ) self.condorjob.save() self.id_val = TethysJob.objects.get(name='test condorbase').id
def set_up(self): self.condor_py = CondorPyJob( condorpyjob_id='99', _attributes={'foo': 'bar'}, _remote_input_files=['test_file1.txt', 'test_file2.txt'], ) self.condor_py.save() user = User.objects.create_user('tethys_super', '*****@*****.**', 'pass') scheduler = CondorScheduler( name='test_scheduler', host='localhost', username='******', password='******', ) self.condorjob = CondorJob( name='test condorbase', description='test_description', user=user, label='test_label', workspace='test_workspace', scheduler=scheduler, condorpyjob_id='98', )
def set_up(self): test_models_dir = os.path.dirname(__file__) self.workspace_dir = os.path.join(test_models_dir, 'workspace') self.user = User.objects.create_user('tethys_super', '*****@*****.**', 'pass') files_dir = os.path.join(os.path.dirname(test_models_dir), 'files') self.private_key = os.path.join(files_dir, 'keys', 'testkey') self.private_key_pass = '******' self.scheduler = CondorScheduler( name='test_scheduler', host='localhost', username='******', password='******', private_key_path=self.private_key, private_key_pass=self.private_key_pass) self.scheduler.save() self.condorworkflow = CondorWorkflow( _max_jobs={'foo': 10}, _config='test_config', name='test name', workspace=self.workspace_dir, user=self.user, scheduler=self.scheduler, ) self.condorworkflow.save() self.id_value = CondorWorkflow.objects.get( name='test name').condorpyworkflow_ptr_id self.condorpyworkflow = CondorPyWorkflow.objects.get( condorpyworkflow_id=self.id_value) self.condorworkflowjobnode_child = CondorWorkflowJobNode( name='Node_child', workflow=self.condorpyworkflow, _attributes={'test': 'one'}, _num_jobs=1, _remote_input_files=['test1.txt'], ) self.condorworkflowjobnode_child.save() self.condorworkflowjobnode = CondorWorkflowJobNode( name='Node_1', workflow=self.condorpyworkflow, _attributes={'test': 'one'}, _num_jobs=1, _remote_input_files=['test1.txt'], ) self.condorworkflowjobnode.save() # Django model many to many relationship add method self.condorworkflowjobnode.parent_nodes.add( self.condorworkflowjobnode_child) self.condorbase_id = CondorWorkflow.objects.get( name='test name').condorbase_ptr_id self.condorpyworkflow_id = CondorWorkflow.objects.get( name='test name').condorpyworkflow_ptr_id
def set_up(self): self.user = User.objects.create_user('tethys_super', '*****@*****.**', 'pass') self.scheduler = CondorScheduler(name='test_scheduler', host='localhost', username='******', password='******', private_key_path='test_path', private_key_pass='******') self.scheduler.save() self.condorbase = CondorBase(name='test_condorbase', description='test_description', user=self.user, label='test_label', cluster_id='1', remote_id='test_machine', scheduler=self.scheduler) self.condorbase.save() self.condorbase_exe = CondorBase(name='test_condorbase_exe', description='test_description', user=self.user, label='test_label', execute_time=timezone.now(), cluster_id='1', remote_id='test_machine', scheduler=self.scheduler) self.condorbase_exe.save()
def set_up(self): self.tz = pytz_timezone('America/Denver') self.user = User.objects.create_user('tethys_super', '*****@*****.**', 'pass') self.scheduler = CondorScheduler( name='test_scheduler', host='localhost', ) self.scheduler.save() self.tethysjob = TethysJob( name='test_tethysjob', description='test_description', user=self.user, label='test_label', ) self.tethysjob.save() self.tethysjob_execute_time = TethysJob( name='test_tethysjob_execute_time', description='test_description', user=self.user, label='test_label', execute_time=datetime(year=2018, month=1, day=1, tzinfo=self.tz), completion_time=datetime(year=2018, month=1, day=1, hour=1, tzinfo=self.tz), _status='VAR', _process_results_function=test_function) self.tethysjob_execute_time.save()
def condor_scheduler_create_command(args): load_apps() from tethys_compute.models.condor.condor_scheduler import CondorScheduler name = args.name host = args.endpoint username = args.username password = args.password private_key_path = args.private_key_path private_key_pass = args.private_key_pass existing_scheduler = CondorScheduler.objects.filter(name=name).first() if existing_scheduler: with pretty_output(FG_YELLOW) as p: p.write('A Condor Scheduler with name "{}" already exists. Command aborted.'.format(name)) exit(0) scheduler = CondorScheduler( name=name, host=host, username=username, password=password, private_key_path=private_key_path, private_key_pass=private_key_pass ) scheduler.save() with pretty_output(FG_GREEN) as p: p.write('Condor Scheduler created successfully!') exit(0)
def create_condor_scheduler(name, host, username=None, password=None, private_key_path=None, private_key_pass=None): """ Creates a new condor scheduler Args: name (str): The name of the scheduler host (str): The hostname or IP address of the scheduler username (str, optional): The username to use when connecting to the scheduler password (str, optional): The password for the username private_key_path (str, optional): The path to the location of the SSH private key file private_key_pass (str, optional): The passphrase for the private key Returns: The newly created condor scheduler Note: The newly created condor scheduler object is not committed to the database. """ condor_scheduler = CondorScheduler(name, host, username=username, password=password, private_key_path=private_key_path, private_key_pass=private_key_pass) return condor_scheduler
def setUpClass(cls): cls.app_model = TethysApp(name='test_app_job_manager', package='test_app_job_manager') cls.app_model.save() cls.user_model = User.objects.create_user( username='******', email='*****@*****.**', password='******') cls.group_model = Group.objects.create(name='test_group_job_manager') cls.group_model.user_set.add(cls.user_model) cls.scheduler = CondorScheduler( name='test_scheduler', host='localhost', ) cls.scheduler.save() cls.tethysjob = TethysJob( name='test_tethysjob', description='test_description', user=cls.user_model, label='test_app_job_manager', ) cls.tethysjob.save() cls.tethysjob.groups.add(cls.group_model)
def set_up(self): path = os.path.dirname(__file__) self.workspace_dir = os.path.join(path, 'workspace') self.user = User.objects.create_user('tethys_super', '*****@*****.**', 'pass') self.scheduler = CondorScheduler(name='test_scheduler', host='localhost', username='******', password='******', private_key_path='test_path', private_key_pass='******') self.scheduler.save() self.condorworkflow = CondorWorkflow( _max_jobs={'foo': 10}, _config='test_config', name='test name', workspace=self.workspace_dir, user=self.user, scheduler=self.scheduler, ) self.condorworkflow.save() self.id_value = CondorWorkflow.objects.get( name='test name').condorpyworkflow_ptr_id self.condorpyworkflow = CondorPyWorkflow.objects.get( condorpyworkflow_id=self.id_value) # One node can have many children nodes self.condorworkflowjobnode_child = CondorWorkflowJobNode( name='Job1', workflow=self.condorpyworkflow, _attributes={'test': 'one'}, _num_jobs=1, _remote_input_files=['test1.txt'], ) self.condorworkflowjobnode_child.save() # One node can have many children nodes self.condorworkflowjobnode_child2 = CondorWorkflowJobNode( name='Job2', workflow=self.condorpyworkflow, _attributes={'test': 'one'}, _num_jobs=1, _remote_input_files=['test1.txt'], ) self.condorworkflowjobnode_child2.save() self.condorworkflownode = CondorWorkflowNode( name='test_condorworkflownode', workflow=self.condorpyworkflow, ) self.condorworkflownode.save()
def set_up(self): test_models_dir = os.path.dirname(__file__) self.workspace_dir = os.path.join(test_models_dir, 'workspace') files_dir = os.path.join(os.path.dirname(test_models_dir), 'files') self.private_key = os.path.join(files_dir, 'keys', 'testkey') self.private_key_pass = '******' self.user = User.objects.create_user('tethys_super', '*****@*****.**', 'pass') self.scheduler = CondorScheduler( name='test_scheduler', host='localhost', username='******', password='******', private_key_path=self.private_key, private_key_pass=self.private_key_pass) self.scheduler.save() self.condorworkflow = CondorWorkflow( _max_jobs={'foo': 10}, _config='test_config', name='test name', workspace=self.workspace_dir, user=self.user, scheduler=self.scheduler, ) self.condorworkflow.save() self.id_value = CondorWorkflow.objects.get( name='test name').condorpyworkflow_ptr_id self.condorpyworkflow = CondorPyWorkflow.objects.get( condorpyworkflow_id=self.id_value) self.condorpyworkflow.condor_object = mock.MagicMock() self.condorworkflowjobnode_a = CondorWorkflowJobNode( name='Job1_a', workflow=self.condorpyworkflow, _attributes={'foo': 'one'}, _num_jobs=1, _remote_input_files=['test1.txt'], ) self.condorworkflowjobnode_a.save() self.condorworkflowjobnode_a1 = CondorWorkflowJobNode( name='Job1_a1', workflow=self.condorpyworkflow, _attributes={'foo': 'one'}, _num_jobs=1, _remote_input_files=['test1.txt'], ) self.condorworkflowjobnode_a1.save()