def test_get_system_envs(self): env = EnvironmentVarGuard() env.set('ES_HOST', 'test es host') env.set('ES_PORT', '9200') env.set('DB_HOST', 'test db host') env.set('DB_PORT', '3306') env.set('DB_DATABASE', 'fedlearner') env.set('DB_USERNAME', 'username') env.set('DB_PASSWORD', 'password') env.set('KVSTORE_TYPE', 'mysql') with env: self.assertEqual( get_system_envs(), '{"name": "POD_IP", "valueFrom": {"fieldRef": {"fieldPath": "status.podIP"}}},' '{"name": "POD_NAME", "valueFrom": {"fieldRef": {"fieldPath": "metadata.name"}}},' '{"name": "CPU_REQUEST", "valueFrom": {"resourceFieldRef": {"resource": "requests.cpu"}}},' '{"name": "MEM_REQUEST", "valueFrom": {"resourceFieldRef": {"resource": "requests.memory"}}},' '{"name": "CPU_LIMIT", "valueFrom": {"resourceFieldRef": {"resource": "limits.cpu"}}},' '{"name": "MEM_LIMIT", "valueFrom": {"resourceFieldRef": {"resource": "limits.memory"}}},' '{"name": "ES_HOST", "value": "test es host"},' '{"name": "ES_PORT", "value": "9200"},' '{"name": "DB_HOST", "value": "test db host"},' '{"name": "DB_PORT", "value": "3306"},' '{"name": "DB_DATABASE", "value": "fedlearner"},' '{"name": "DB_USERNAME", "value": "username"},' '{"name": "DB_PASSWORD", "value": "password"},' '{"name": "KVSTORE_TYPE", "value": "mysql"}')
def generate_job_run_yaml(job): system_dict = {'basic_envs': get_system_envs()} workflow = job.workflow.to_dict() workflow['variables'] = _make_variables_dict( job.workflow.get_config().variables) workflow['jobs'] = {} for j in job.workflow.get_jobs(): variables = _make_variables_dict(j.get_config().variables) j_dic = j.to_dict() j_dic['variables'] = variables workflow['jobs'][j.get_config().name] = j_dic project = job.project.to_dict() project['variables'] = _make_variables_dict( job.project.get_config().variables) participants = project['config']['participants'] for index, participant in enumerate(participants): project[f'participants[{index}]'] = {} project[f'participants[{index}]']['egress_domain'] =\ participant['domain_name'] project[f'participants[{index}]']['egress_host'] = \ participant['grpc_spec']['authority'] yaml = format_yaml(job.yaml_template, workflow=workflow, project=project, system=system_dict) yaml = json.loads(yaml) return yaml
def test_get_available_envs(self): self.assertEqual( get_system_envs(), '{"name": "POD_IP", "valueFrom": {"fieldRef": {"fieldPath": "status.podIP"}}},' '{"name": "POD_NAME", "valueFrom": {"fieldRef": {"fieldPath": "metadata.name"}}},' '{"name": "CPU_REQUEST", "valueFrom": {"resourceFieldRef": {"resource": "requests.cpu"}}},' '{"name": "MEM_REQUEST", "valueFrom": {"resourceFieldRef": {"resource": "requests.memory"}}},' '{"name": "CPU_LIMIT", "valueFrom": {"resourceFieldRef": {"resource": "limits.cpu"}}},' '{"name": "MEM_LIMIT", "valueFrom": {"resourceFieldRef": {"resource": "limits.memory"}}}' )
def _schedule_job(self, job_id): job = Job.query.get(job_id) assert job is not None, 'Job %d not found' % job_id if job.state != JobState.WAITING: return job.state deps = JobDependency.query.filter( JobDependency.dst_job_id == job.id).all() for dep in deps: src_job = Job.query.get(dep.src_job_id) assert src_job is not None, 'Job %d not found' % dep.src_job_id if not src_job.is_complete(): return job.state k8s_client = get_client() system_dict = {'basic_envs': get_system_envs()} workflow = job.workflow.to_dict() workflow['variables'] = self._make_variables_dict( job.workflow.get_config().variables) workflow['jobs'] = {} for j in job.workflow.get_jobs(): variables = self._make_variables_dict(j.get_config().variables) j_dic = j.to_dict() j_dic['variables'] = variables workflow['jobs'][j.get_config().name] = j_dic project = job.project.to_dict() project['variables'] = self._make_variables_dict( job.project.get_config().variables) yaml = format_yaml(job.yaml_template, workflow=workflow, project=project, system=system_dict) yaml = json.loads(yaml) try: k8s_client.create_or_replace_custom_object(CrdKind.FLAPP, yaml) except RuntimeError as e: logging.error('Start job %d has Runtime error msg: %s', job_id, e.args) return job.state job.start() db.session.commit() return job.state
def test_get_system_envs(self): self.assertEqual( get_system_envs(), '{"name": "POD_IP", "valueFrom": {"fieldRef": {"fieldPath": "status.podIP"}}},' '{"name": "POD_NAME", "valueFrom": {"fieldRef": {"fieldPath": "metadata.name"}}},' '{"name": "CPU_REQUEST", "valueFrom": {"resourceFieldRef": {"resource": "requests.cpu"}}},' '{"name": "MEM_REQUEST", "valueFrom": {"resourceFieldRef": {"resource": "requests.memory"}}},' '{"name": "CPU_LIMIT", "valueFrom": {"resourceFieldRef": {"resource": "limits.cpu"}}},' '{"name": "MEM_LIMIT", "valueFrom": {"resourceFieldRef": {"resource": "limits.memory"}}},' '{"name": "ES_HOST", "value": "test es host"},' '{"name": "ES_PORT", "value": "9200"},' '{"name": "DB_HOST", "value": "test db host"},' '{"name": "DB_PORT", "value": "3306"},' '{"name": "DB_DATABASE", "value": "fedlearner"},' '{"name": "DB_USERNAME", "value": "username"},' '{"name": "DB_PASSWORD", "value": "password"},' '{"name": "KVSTORE_TYPE", "value": "mysql"},' '{"name": "ETCD_NAME", "value": "fedlearner"},' '{"name": "ETCD_ADDR", "value": "fedlearner-stack-etcd.default.svc.cluster.local:2379"},' '{"name": "ETCD_BASE_DIR", "value": "fedlearner"}')
def generate_system_dict(): return {'basic_envs': get_system_envs()}