Ejemplo n.º 1
0
	def _init_pool_interface_remote(self, config, sched, collector, host):
		if self._remote_type == PoolType.SSH:
			self._proc_factory = ProcessHandler.create_instance('SSHProcessHandler',
				remote_host=host, sshLink=config.get_work_path('.ssh', self._name + host))
		else:
			self._proc_factory = ProcessHandler.create_instance('GSISSHProcessHandler',
				remote_host=host, sshLink=config.get_work_path('.gsissh', self._name + host))
		# ssh type instructions rely on commands being available on remote pool
		self._submit_exec = 'condor_submit'
		self._transfer_exec = 'false'  # disabled for this type
		# test availability of commands
		version_proc = self._proc_factory.logged_execute('condor_version')
		if version_proc.wait() != 0:
			version_proc.log_error(self._error_log_fn)
			raise BackendError('Failed to access remote Condor tools! ' +
				'The pool you are submitting to is very likely not configured properly.')
		# get initial workdir on remote pool
		remote_work_dn = config.get('remote workdir', '')
		if remote_work_dn:
			remote_user_name = self._proc_factory.logged_execute('whoami').get_output().strip()
			self._pool_work_dn = os.path.join(remote_work_dn, remote_user_name)
			remote_dn_proc = self._proc_factory.logged_execute('mkdir -p %s' % self._pool_work_dn)
		else:
			remote_dn_proc = self._proc_factory.logged_execute('pwd')
			self._pool_work_dn = remote_dn_proc.get_output().strip()
		if remote_dn_proc.wait() != 0:
			self._log.critical('Code: %d\nOutput Message: %s\nError Message: %s',
				remote_dn_proc.wait(), remote_dn_proc.get_output(), remote_dn_proc.get_error())
			raise BackendError('Failed to determine, create or verify base work directory on remote host')
Ejemplo n.º 2
0
	def _init_pool_interface_remote(self, config, sched, collector, host):
		if self._remote_type == PoolType.SSH:
			self._proc_factory = ProcessHandler.create_instance('SSHProcessHandler',
				remote_host=host, sshLink=config.get_work_path('.ssh', self._name + host))
		else:
			self._proc_factory = ProcessHandler.create_instance('GSISSHProcessHandler',
				remote_host=host, sshLink=config.get_work_path('.gsissh', self._name + host))
		# ssh type instructions rely on commands being available on remote pool
		self._submit_exec = 'condor_submit'
		self._transfer_exec = 'false'  # disabled for this type
		# test availability of commands
		version_proc = self._proc_factory.logged_execute('condor_version')
		if version_proc.wait() != 0:
			version_proc.log_error(self._error_log_fn)
			raise BackendError('Failed to access remote Condor tools! ' +
				'The pool you are submitting to is very likely not configured properly.')
		# get initial workdir on remote pool
		remote_work_dn = config.get('remote workdir', '')
		if remote_work_dn:
			remote_user_name = self._proc_factory.logged_execute('whoami').get_output().strip()
			self._pool_work_dn = os.path.join(remote_work_dn, remote_user_name)
			remote_dn_proc = self._proc_factory.logged_execute('mkdir -p %s' % self._pool_work_dn)
		else:
			remote_dn_proc = self._proc_factory.logged_execute('pwd')
			self._pool_work_dn = remote_dn_proc.get_output().strip()
		if remote_dn_proc.wait() != 0:
			self._log.critical('Code: %d\nOutput Message: %s\nError Message: %s',
				remote_dn_proc.wait(), remote_dn_proc.get_output(), remote_dn_proc.get_error())
			raise BackendError('Failed to determine, create or verify base work directory on remote host')
Ejemplo n.º 3
0
	def _initPoolInterfaces(self, config):
		# check submissal type
		self.remoteType = config.getEnum('remote Type', PoolType, PoolType.LOCAL)
		self.debugOut("Selected pool type: %s" % PoolType.enum2str(self.remoteType))

		# get remote destination features
		user,sched,collector = self._getDestination(config)
		nice_user = user or "<local default>"
		nice_sched = sched or "<local default>"
		nice_collector = collector or "<local default>"
		self.debugOut("Destination:\n")
		self.debugOut("\tuser:%s @ sched:%s via collector:%s" % (nice_user, nice_sched, nice_collector))
		# prepare commands appropriate for pool type
		if self.remoteType == PoolType.LOCAL or self.remoteType == PoolType.SPOOL:
			self.user=user
			self.Pool=self.Pool=ProcessHandler.createInstance("LocalProcessHandler")
			# local and remote use condor tools installed locally - get them
			self.submitExec = utils.resolveInstallPath('condor_submit')
			self.historyExec = utils.resolveInstallPath('condor_history')	# completed/failed jobs are stored outside the queue
			self.cancelExec = utils.resolveInstallPath('condor_rm')
			self.transferExec = utils.resolveInstallPath('condor_transfer_data')	# submission might spool to another schedd and need to fetch output
			self.configValExec = utils.resolveInstallPath('condor_config_val')	# service is better when being able to adjust to pool settings
			if self.remoteType == PoolType.SPOOL:
				# remote requires adding instructions for accessing remote pool
				self.submitExec+= " %s %s" % (utils.QM(sched,"-remote %s"%sched,""),utils.QM(collector, "-pool %s"%collector, ""))
				self.historyExec = "false"	# disabled for this type
				self.cancelExec+= " %s %s" % (utils.QM(sched,"-name %s"%sched,""),utils.QM(collector, "-pool %s"%collector, ""))
				self.transferExec+= " %s %s" % (utils.QM(sched,"-name %s"%sched,""),utils.QM(collector, "-pool %s"%collector, ""))
		else:
			# ssh type instructions are passed to the remote host via regular ssh/gsissh
			host="%s%s"%(utils.QM(user,"%s@" % user,""), sched)
			if self.remoteType == PoolType.SSH:
				self.Pool=ProcessHandler.createInstance("SSHProcessHandler", remoteHost = host, sshLink = config.getWorkPath(".ssh", self._name + host))
			else:
				self.Pool=ProcessHandler.createInstance("GSISSHProcessHandler", remoteHost = host, sshLink = config.getWorkPath(".gsissh", self._name + host))
			# ssh type instructions rely on commands being available on remote pool
			self.submitExec = 'condor_submit'
			self.historyExec = 'condor_history'
			self.cancelExec = 'condor_rm'
			self.transferExec = "false"	# disabled for this type
			self.configValExec = 'condor_config_val'
			# test availability of commands
			testProcess=self.Pool.LoggedExecute("condor_version")
			self.debugOut("*** Testing remote connectivity:\n%s"%testProcess.cmd)
			if testProcess.wait()!=0:
				testProcess.logError(self.errorLog)
				raise BackendError("Failed to access remote Condor tools! The pool you are submitting to is very likely not configured properly.")
			# get initial workdir on remote pool
			remote_workdir = config.get("remote workdir", '')
			if remote_workdir:
				uName = self.Pool.LoggedExecute("whoami").getOutput().strip()
				self.poolWorkDir = os.path.join(remote_workdir, uName)
				pwdProcess = self.Pool.LoggedExecute("mkdir -p %s" % self.poolWorkDir )
			else:
				pwdProcess=self.Pool.LoggedExecute("pwd")
				self.poolWorkDir=pwdProcess.getOutput().strip()
			if pwdProcess.wait()!=0:
				self._log.critical("Code: %d\nOutput Message: %s\nError Message: %s", pwdProcess.wait(), pwdProcess.getOutput(), pwdProcess.getError())
				raise BackendError("Failed to determine, create or verify base work directory on remote host")
Ejemplo n.º 4
0
	def _initPoolInterfaces(self, config):
		# check submissal type
		self.remoteType = config.getEnum('remote Type', PoolType, PoolType.LOCAL)
		self.debugOut("Selected pool type: %s" % PoolType.enum2str(self.remoteType))

		# get remote destination features
		user,sched,collector = self._getDestination(config)
		nice_user = user or "<local default>"
		nice_sched = sched or "<local default>"
		nice_collector = collector or "<local default>"
		self.debugOut("Destination:\n")
		self.debugOut("\tuser:%s @ sched:%s via collector:%s" % (nice_user, nice_sched, nice_collector))
		# prepare commands appropriate for pool type
		if self.remoteType == PoolType.LOCAL or self.remoteType == PoolType.SPOOL:
			self.user=user
			self.Pool=self.Pool=ProcessHandler.createInstance("LocalProcessHandler")
			# local and remote use condor tools installed locally - get them
			self.submitExec = utils.resolveInstallPath('condor_submit')
			self.historyExec = utils.resolveInstallPath('condor_history')	# completed/failed jobs are stored outside the queue
			self.cancelExec = utils.resolveInstallPath('condor_rm')
			self.transferExec = utils.resolveInstallPath('condor_transfer_data')	# submission might spool to another schedd and need to fetch output
			self.configValExec = utils.resolveInstallPath('condor_config_val')	# service is better when being able to adjust to pool settings
			if self.remoteType == PoolType.SPOOL:
				# remote requires adding instructions for accessing remote pool
				self.submitExec+= " %s %s" % (utils.QM(sched,"-remote %s"%sched,""),utils.QM(collector, "-pool %s"%collector, ""))
				self.historyExec = "false"	# disabled for this type
				self.cancelExec+= " %s %s" % (utils.QM(sched,"-name %s"%sched,""),utils.QM(collector, "-pool %s"%collector, ""))
				self.transferExec+= " %s %s" % (utils.QM(sched,"-name %s"%sched,""),utils.QM(collector, "-pool %s"%collector, ""))
		else:
			# ssh type instructions are passed to the remote host via regular ssh/gsissh
			host="%s%s"%(utils.QM(user,"%s@" % user,""), sched)
			if self.remoteType == PoolType.SSH:
				self.Pool=ProcessHandler.createInstance("SSHProcessHandler", remoteHost = host, sshLink = config.getWorkPath(".ssh", self._name + host))
			else:
				self.Pool=ProcessHandler.createInstance("GSISSHProcessHandler", remoteHost = host, sshLink = config.getWorkPath(".gsissh", self._name + host))
			# ssh type instructions rely on commands being available on remote pool
			self.submitExec = 'condor_submit'
			self.historyExec = 'condor_history'
			self.cancelExec = 'condor_rm'
			self.transferExec = "false"	# disabled for this type
			self.configValExec = 'condor_config_val'
			# test availability of commands
			testProcess=self.Pool.LoggedExecute("condor_version")
			self.debugOut("*** Testing remote connectivity:\n%s"%testProcess.cmd)
			if testProcess.wait()!=0:
				testProcess.logError(self.errorLog)
				raise BackendError("Failed to access remote Condor tools! The pool you are submitting to is very likely not configured properly.")
			# get initial workdir on remote pool
			remote_workdir = config.get("remote workdir", '')
			if remote_workdir:
				uName = self.Pool.LoggedExecute("whoami").getOutput().strip()
				self.poolWorkDir = os.path.join(remote_workdir, uName)
				pwdProcess = self.Pool.LoggedExecute("mkdir -p %s" % self.poolWorkDir )
			else:
				pwdProcess=self.Pool.LoggedExecute("pwd")
				self.poolWorkDir=pwdProcess.getOutput().strip()
			if pwdProcess.wait()!=0:
				self._log.critical("Code: %d\nOutput Message: %s\nError Message: %s", pwdProcess.wait(), pwdProcess.getOutput(), pwdProcess.getError())
				raise BackendError("Failed to determine, create or verify base work directory on remote host")
Ejemplo n.º 5
0
	def _init_pool_interface_local(self, config, sched, collector):
		# submission might spool to another schedd and need to fetch output
		self._submit_exec = resolve_install_path('condor_submit')
		self._transfer_exec = resolve_install_path('condor_transfer_data')
		if self._remote_type == PoolType.SPOOL:
			if sched:
				self._submit_exec += ' -remote %s' % sched
				self._transfer_exec += ' -name %s' % sched
			if collector:
				self._submit_exec += ' -pool %s' % collector
				self._transfer_exec += ' -pool %s' % collector
		self._proc_factory = ProcessHandler.create_instance('LocalProcessHandler')
Ejemplo n.º 6
0
	def _init_pool_interface_local(self, config, sched, collector):
		# submission might spool to another schedd and need to fetch output
		self._submit_exec = resolve_install_path('condor_submit')
		self._transfer_exec = resolve_install_path('condor_transfer_data')
		if self._remote_type == PoolType.SPOOL:
			if sched:
				self._submit_exec += ' -remote %s' % sched
				self._transfer_exec += ' -name %s' % sched
			if collector:
				self._submit_exec += ' -pool %s' % collector
				self._transfer_exec += ' -pool %s' % collector
		self._proc_factory = ProcessHandler.create_instance('LocalProcessHandler')