Example #1
0
def findGenericPilotCredentials( vo = False, group = False ):
  if not group and not vo:
    return S_ERROR( "Need a group or a VO to determine the Generic pilot credentials" )
  if not vo:
    vo = Registry.getVOForGroup( group )
    if not vo:
      return S_ERROR( "Group %s does not have a VO associated" % group )
  opsHelper = Operations.Operations( vo = vo )
  pilotGroup = opsHelper.getValue( "Pilot/GenericPilotGroup", "" )
  pilotDN = opsHelper.getValue( "Pilot/GenericPilotDN", "" )
  if pilotDN and pilotGroup:
    gLogger.verbose( "Pilot credentials have been defined in the CS. Using %s@%s" % ( pilotDN, pilotGroup ) )
    result = gProxyManager.userHasProxy( pilotDN, pilotGroup, 86400 )
    if not result[ 'OK' ]:
      return S_ERROR( "%s@%s has no proxy uploaded to the ProxyManager" )
    return S_OK( ( pilotDN, pilotGroup ) )
  #Auto discover
  gLogger.verbose( "Pilot credentials are not defined. Autodiscovering..." )
  if pilotGroup:
    pilotGroups = [ pilotGroup ]
  else:
    result = Registry.getGroupsWithProperty( Properties.GENERIC_PILOT )
    if not result[ 'OK' ]:
      return result
    pilotGroups = []
    groups = result[ 'Value' ]
    if not groups:
      return S_ERROR( "No group with %s property defined" % Properties.GENERIC_PILOT )
    result = Registry.getGroupsForVO( vo )
    if not result[ 'OK' ]:
      return result
    for voGroup in result[ 'Value' ]:
      if voGroup in groups:
        pilotGroups.append( voGroup )
  if not pilotGroups:
    return S_ERROR( "No group for VO %s is a generic pilot group" % vo )
  for pilotGroup in pilotGroups:
    DNs = Registry.getDNsInGroup( pilotGroup )
    if not DNs:
      continue
    if pilotDN:
      if pilotDN not in DNs:
        continue
      result = gProxyManager.userHasProxy( pilotDN, pilotGroup, 86400 )
      if result[ 'OK' ] and result[ 'Value' ]:
        gLogger.verbose( "Discovered pilot credentials are %s@%s" % ( pilotDN, pilotGroup ) )
        return S_OK( ( pilotDN, pilotGroup ) )
    else:
      for DN in DNs:
        result = gProxyManager.userHasProxy( DN, pilotGroup, 86400 )
        if result[ 'OK' ] and result[ 'Value' ]:
          gLogger.verbose( "Discovered pilot credentials are %s@%s" % ( DN, pilotGroup ) )
          return S_OK( ( DN, pilotGroup ) )

  if pilotDN:
    return S_ERROR( "DN %s does not have group %s" % ( pilotDN, pilotGroups ) )
  return S_ERROR( "No generic proxy in the Proxy Manager with groups %s" % pilotGroups )
Example #2
0
def findGenericPilotCredentials( vo = False, group = False ):
  if not group and not vo:
    return S_ERROR( "Need a group or a VO to determine the Generic pilot credentials" )
  if not vo:
    vo = Registry.getVOForGroup( group )
    if not vo:
      return S_ERROR( "Group %s does not have a VO associated" % group )
  opsHelper = Operations.Operations( vo = vo )
  pilotGroup = opsHelper.getValue( "Pilot/GenericPilotGroup", "" )
  pilotDN = opsHelper.getValue( "Pilot/GenericPilotDN", "" )
  if pilotDN and pilotGroup:
    gLogger.verbose( "Pilot credentials from CS: %s@%s" % ( pilotDN, pilotGroup ) )
    result = gProxyManager.userHasProxy( pilotDN, pilotGroup, 86400 )
    if not result[ 'OK' ]:
      return S_ERROR( "%s@%s has no proxy in ProxyManager" )
    return S_OK( ( pilotDN, pilotGroup ) )
  #Auto discover
  gLogger.verbose( "Pilot credentials are not defined. Autodiscovering..." )
  if pilotGroup:
    pilotGroups = [ pilotGroup ]
  else:
    result = Registry.getGroupsWithProperty( Properties.GENERIC_PILOT )
    if not result[ 'OK' ]:
      return result
    pilotGroups = []
    groups = result[ 'Value' ]
    if not groups:
      return S_ERROR( "No group with %s property defined" % Properties.GENERIC_PILOT )
    result = Registry.getGroupsForVO( vo )
    if not result[ 'OK' ]:
      return result
    for voGroup in result[ 'Value' ]:
      if voGroup in groups:
        pilotGroups.append( voGroup )
  if not pilotGroups:
    return S_ERROR( "No generic pilot group for VO %s" % vo )
  for pilotGroup in pilotGroups:
    DNs = Registry.getDNsInGroup( pilotGroup )
    if not DNs:
      continue
    if pilotDN:
      if pilotDN not in DNs:
        continue
      result = gProxyManager.userHasProxy( pilotDN, pilotGroup, 86400 )
      if result[ 'OK' ] and result[ 'Value' ]:
        gLogger.verbose( "Discovered pilot credentials: %s@%s" % ( pilotDN, pilotGroup ) )
        return S_OK( ( pilotDN, pilotGroup ) )
    else:
      for DN in DNs:
        result = gProxyManager.userHasProxy( DN, pilotGroup, 86400 )
        if result[ 'OK' ] and result[ 'Value' ]:
          gLogger.verbose( "Discovered pilot credentials: %s@%s" % ( DN, pilotGroup ) )
          return S_OK( ( DN, pilotGroup ) )

  if pilotDN:
    return S_ERROR( "DN %s does not have group %s" % ( pilotDN, pilotGroups ) )
  return S_ERROR( "No generic proxy in the Proxy Manager with groups %s" % pilotGroups )
Example #3
0
 def __checkIfProxyUploadIsRequired( self ):
   result = gProxyManager.userHasProxy( self.ownerDN, self.ownerGroup, validSeconds = 18000 )
   if not result[ 'OK' ]:
     gLogger.error( "Can't check if the user has proxy uploaded:", result[ 'Message' ] )
     return True
   #Check if an upload is required
   return result[ 'Value' ] == False
Example #4
0
 def __checkIfProxyUploadIsRequired(self):
   result = gProxyManager.userHasProxy(self.ownerDN, self.ownerGroup, validSeconds=18000)
   if not result['OK']:
     gLogger.error("Can't check if the user has proxy uploaded:", result['Message'])
     return True
   # Check if an upload is required
   return not result['Value']
Example #5
0
def findGenericPilotCredentials(vo=False, group=False, pilotDN='', pilotGroup=''):
  """ Looks into the Operations/<>/Pilot section of CS to find the pilot credentials.
      Then check if the user has a registered proxy in ProxyManager.

      if pilotDN or pilotGroup are specified, use them
  """
  if not group and not vo:
    return S_ERROR("Need a group or a VO to determine the Generic pilot credentials")
  if not vo:
    vo = Registry.getVOForGroup(group)
    if not vo:
      return S_ERROR("Group %s does not have a VO associated" % group)
  opsHelper = Operations.Operations(vo=vo)
  if not pilotGroup:
    pilotGroup = opsHelper.getValue("Pilot/GenericPilotGroup", "")
  if not pilotDN:
    pilotDN = opsHelper.getValue("Pilot/GenericPilotDN", "")
  if not pilotDN:
    pilotUser = opsHelper.getValue("Pilot/GenericPilotUser", "")
    if pilotUser:
      result = Registry.getDNForUsername(pilotUser)
      if result['OK']:
        pilotDN = result['Value'][0]
  if pilotDN and pilotGroup:
    gLogger.verbose("Pilot credentials: %s@%s" % (pilotDN, pilotGroup))
    result = gProxyManager.userHasProxy(pilotDN, pilotGroup, 86400)
    if not result['OK']:
      return S_ERROR("%s@%s has no proxy in ProxyManager")
    return S_OK((pilotDN, pilotGroup))

  if pilotDN:
    return S_ERROR("DN %s does not have group %s" % (pilotDN, pilotGroup))
  return S_ERROR("No generic proxy in the Proxy Manager with groups %s" % pilotGroup)
Example #6
0
def findGenericCloudCredentials(vo=False, group=False):
    if not group and not vo:
        return S_ERROR(
            "Need a group or a VO to determine the Generic cloud credentials")
    if not vo:
        vo = Registry.getVOForGroup(group)
        if not vo:
            return S_ERROR("Group %s does not have a VO associated" % group)
    opsHelper = Operations.Operations(vo=vo)
    cloudGroup = opsHelper.getValue("Cloud/GenericCloudGroup", "")
    cloudDN = opsHelper.getValue("Cloud/GenericCloudDN", "")
    if not cloudDN:
        cloudUser = opsHelper.getValue("Cloud/GenericCloudUser", "")
        if cloudUser:
            result = Registry.getDNForUsername(cloudUser)
            if result['OK']:
                cloudDN = result['Value'][0]
    if cloudDN and cloudGroup:
        gLogger.verbose("Cloud credentials from CS: %s@%s" %
                        (cloudDN, cloudGroup))
        result = gProxyManager.userHasProxy(cloudDN, cloudGroup, 86400)
        if not result['OK']:
            return S_ERROR("%s@%s has no proxy in ProxyManager")
        return S_OK((cloudDN, cloudGroup))
    return S_ERROR("Cloud credentials not found")
Example #7
0
def findGenericCloudCredentials(vo=False, group=False):
    """Get the cloud credentials to use for a specific VO and/or group."""
    if not group and not vo:
        return S_ERROR(
            "Need a group or a VO to determine the Generic cloud credentials")
    if not vo:
        vo = Registry.getVOForGroup(group)
        if not vo:
            return S_ERROR("Group %s does not have a VO associated" % group)
    opsHelper = Operations.Operations(vo=vo)
    cloudGroup = opsHelper.getValue("Cloud/GenericCloudGroup", "")
    cloudDN = opsHelper.getValue("Cloud/GenericCloudDN", "")
    if not cloudDN:
        cloudUser = opsHelper.getValue("Cloud/GenericCloudUser", "")
        if cloudUser:
            result = Registry.getDNForUsername(cloudUser)
            if result["OK"]:
                cloudDN = result["Value"][0]
            else:
                return S_ERROR("Failed to find suitable CloudDN")
    if cloudDN and cloudGroup:
        gLogger.verbose("Cloud credentials from CS: %s@%s" %
                        (cloudDN, cloudGroup))
        result = gProxyManager.userHasProxy(cloudDN, cloudGroup, 86400)
        if not result["OK"]:
            return result
        return S_OK((cloudDN, cloudGroup))
    return S_ERROR("Cloud credentials not found")
Example #8
0
    def __checkIfProxyUploadIsRequired(self):
        """Check if an upload is required

        :return: bool
        """
        result = gProxyManager.userHasProxy(self.ownerDN,
                                            self.ownerGroup,
                                            validSeconds=18000)
        if not result["OK"]:
            self.log.error("Can't check if the user has proxy uploaded",
                           result["Message"])
            return True
        # Check if an upload is required
        return not result["Value"]
Example #9
0
    def checkProxyUploaded(self, userDN, userGroup, requiredTime):
        """Set the persistence of a proxy in the Proxy Manager

       Example usage:

         >>> print diracAdmin.setProxyPersistency( 'some DN', 'dirac group', True )
         {'OK': True, 'Value' : True/False }

       :param userDN: User DN
       :type userDN: string
       :param userGroup: DIRAC Group
       :type userGroup: string
       :param requiredTime: Required life time of the uploaded proxy
       :type requiredTime: boolean
       :return: S_OK,S_ERROR
    """
        return gProxyManager.userHasProxy(userDN, userGroup, requiredTime)
Example #10
0
  def checkProxyUploaded( self, userDN, userGroup, requiredTime ):
    """Set the persistence of a proxy in the Proxy Manager

       Example usage:

       >>> print diracAdmin.setProxyPersistency( 'some DN', 'dirac group', True )
       {'OK': True, 'Value' : True/False }

       @param userDN: User DN
       @type userDN: string
       @param userGroup: DIRAC Group
       @type userGroup: string
       @param requiredTime: Required life time of the uploaded proxy
       @type requiredTime: boolean
       @return: S_OK,S_ERROR
    """
    return gProxyManager.userHasProxy( userDN, userGroup, requiredTime )
Example #11
0
def findGenericPilotCredentials(vo=False,
                                group=False,
                                pilotDN="",
                                pilotGroup=""):
    """Looks into the Operations/<>/Pilot section of CS to find the pilot credentials.
    Then check if the user has a registered proxy in ProxyManager.

    if pilotDN or pilotGroup are specified, use them

    :param str vo: VO name
    :param str group: group name
    :param str pilotDN: pilot DN
    :param str pilotGroup: pilot group

    :return: S_OK(tuple)/S_ERROR()
    """
    if not group and not vo:
        return S_ERROR(
            "Need a group or a VO to determine the Generic pilot credentials")
    if not vo:
        vo = Registry.getVOForGroup(group)
        if not vo:
            return S_ERROR("Group %s does not have a VO associated" % group)
    opsHelper = Operations.Operations(vo=vo)
    if not pilotGroup:
        pilotGroup = opsHelper.getValue("Pilot/GenericPilotGroup", "")
    if not pilotDN:
        pilotDN = opsHelper.getValue("Pilot/GenericPilotDN", "")
    if not pilotDN:
        pilotUser = opsHelper.getValue("Pilot/GenericPilotUser", "")
        if pilotUser:
            result = Registry.getDNForUsername(pilotUser)
            if result["OK"]:
                pilotDN = result["Value"][0]
    if pilotDN and pilotGroup:
        gLogger.verbose("Pilot credentials: %s@%s" % (pilotDN, pilotGroup))
        result = gProxyManager.userHasProxy(pilotDN, pilotGroup, 86400)
        if not result["OK"]:
            return S_ERROR("%s@%s has no proxy in ProxyManager")
        return S_OK((pilotDN, pilotGroup))

    if pilotDN:
        return S_ERROR("DN %s does not have group %s" % (pilotDN, pilotGroup))
    return S_ERROR("No generic proxy in the Proxy Manager with groups %s" %
                   pilotGroup)
Example #12
0
  def submitNewBigJob( self ):

    result = jobDB.getJobJDL( str( self.__jobID ) , True )
    classAdJob = ClassAd( result['Value'] )
    executableFile = ""
    if classAdJob.lookupAttribute( 'Executable' ):
      executableFile = classAdJob.getAttributeString( 'Executable' )

    tempPath = self.__tmpSandBoxDir
    dirac = Dirac()
    if not os.path.exists( tempPath ):
      os.makedirs( tempPath )

    settingJobSandBoxDir = dirac.getInputSandbox( self.__jobID, tempPath )
    self.log.info( 'Writting temporal SandboxDir in Server', settingJobSandBoxDir )
    moveData = self.__tmpSandBoxDir + "/InputSandbox" + str( self.__jobID )

    HiveV1Cli = HiveV1Client( self.__User , self.__publicIP )
    returned = HiveV1Cli.dataCopy( moveData, self.__tmpSandBoxDir )
    self.log.info( 'Copy the job contain to the Hadoop Master with HIVE: ', returned )

    jobInfo = jobDB.getJobAttributes( self.__jobID )
    if not jobInfo['OK']:
      return S_ERROR( jobInfo['Value'] )
    proxy = ""
    jobInfo = jobInfo['Value']
    if gProxyManager.userHasProxy( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] ):
      proxy = gProxyManager.downloadProxyToFile( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] )
    else:
      proxy = self.__requestProxyFromProxyManager( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] )

    HiveJob = "InputSandbox" + str( self.__jobID ) + "/" + executableFile
    HiveJobOutput = str( self.__jobID ) + "_" + executableFile + "_out"

    returned = HiveV1Cli.jobSubmit( tempPath, HiveJob, proxy['chain'], HiveJobOutput )
    self.log.info( 'Launch Hadoop-Hive job to the Master: ', returned )

    if not returned['OK']:
      return S_ERROR( returned['Message'] )
    else:
      self.log.info( 'Hadoop-Hive Job ID: ', returned['Value'] )

    return S_OK( returned['Value'] )
 def _getPilotProxyFromDIRACGroup( self, ownerDN, ownerGroup, requiredTimeLeft ):
   """
   Download a limited pilot proxy with VOMS extensions depending on the group
   """
   if gProxyManager.userHasProxy( ownerDN, ownerGroup ):
     return gProxyManager.downloadProxyToFile( ownerDN, ownerGroup )
   #Assign VOMS attribute
   vomsAttr = CS.getVOMSAttributeForGroup( ownerGroup )
   if not vomsAttr:
     self.log.info( "Downloading a proxy without VOMS extensions for %s@%s" % ( ownerDN, ownerGroup ) )
     return gProxyManager.downloadProxy( ownerDN, ownerGroup, limited = True,
                                         requiredTimeLeft = requiredTimeLeft )
   else:
     self.log.info( "Downloading a proxy with '%s' VOMS extension for %s@%s" % ( vomsAttr, ownerDN, ownerGroup ) )
     return gProxyManager.downloadVOMSProxy( ownerDN,
                                    ownerGroup,
                                    limited = True,
                                    requiredTimeLeft = requiredTimeLeft,
                                    requiredVOMSAttribute = vomsAttr )
Example #14
0
    def submitNewBigPilot(self):

        tempPath = self.__tmpSandBoxDir + str(self.__jobID)
        dirac = Dirac()
        if not os.path.exists(tempPath):
            os.makedirs(tempPath)

        settingJobSandBoxDir = dirac.getInputSandbox(self.__jobID, tempPath)
        self.log.info("Writting temporal SandboxDir in Server", settingJobSandBoxDir)

        jobXMLName = "job:" + str(self.__jobID) + ".xml"
        with open(os.path.join(tempPath, jobXMLName), "wb") as temp_file:
            temp_file.write(self.jobWrapper())
        self.log.info("Writting temporal Hadoop Job.xml")

        HadoopV1cli = HadoopV1Client(self.__User, self.__publicIP, self.__Port)
        # returned = HadoopV1cli.dataCopy( tempPath, self.__tmpSandBoxDir )
        # self.log.info( 'Copy the job contain to the Hadoop Master: ', returned )

        jobInfo = jobDB.getJobAttributes(self.__jobID)
        if not jobInfo["OK"]:
            return S_ERROR(jobInfo["Value"])
        proxy = ""
        jobInfo = jobInfo["Value"]
        if gProxyManager.userHasProxy(jobInfo["OwnerDN"], jobInfo["OwnerGroup"]):
            proxy = gProxyManager.downloadProxyToFile(jobInfo["OwnerDN"], jobInfo["OwnerGroup"])
        else:
            proxy = self.__requestProxyFromProxyManager(jobInfo["OwnerDN"], jobInfo["OwnerGroup"])

        returned = HadoopV1cli.submitPilotJob(tempPath, jobXMLName, proxy["chain"])

        self.log.info("Launch Hadoop pilot to the Hadoop Master: ", returned)

        if not returned["OK"]:
            return S_ERROR(returned["Value"])
        else:
            self.log.info("Hadoop Job ID: ", returned["Value"])

        return S_OK(returned["Value"])
Example #15
0
  def submitNewBigJob( self ):

    tempPath = self.__tmpSandBoxDir + str( self.__jobID )
    dirac = Dirac()
    if not os.path.exists( tempPath ):
      os.makedirs( tempPath )

    settingJobSandBoxDir = dirac.getInputSandbox( self.__jobID, tempPath )
    self.log.info( 'Writting temporal SandboxDir in Server', settingJobSandBoxDir )

    jobXMLName = "job:" + str( self.__jobID ) + '.xml'
    with open( os.path.join( tempPath, jobXMLName ), 'wb' ) as temp_file:
        temp_file.write( self.jobWrapper() )
    self.log.info( 'Writting temporal Hadoop Job.xml' )

    HadoopV1cli = HadoopV2Client( self.__User , self.__publicIP )
    returned = HadoopV1cli.dataCopy( tempPath, self.__tmpSandBoxDir )
    self.log.info( 'Copy the job contain to the Hadoop Master: ', returned )

    jobInfo = jobDB.getJobAttributes( self.__jobID )
    if not jobInfo['OK']:
      return S_ERROR( jobInfo['Value'] )
    proxy = ""
    jobInfo = jobInfo['Value']
    if gProxyManager.userHasProxy( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] ):
      proxy = gProxyManager.downloadProxyToFile( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] )
    else:
      proxy = self.__requestProxyFromProxyManager( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] )

    returned = HadoopV1cli.jobSubmit( tempPath, jobXMLName, proxy['chain'] )
    self.log.info( 'Launch Hadoop job to the Hadoop Master: ', returned )

    if not returned['OK']:
      return S_ERROR( returned['Message'] )
    else:
      self.log.info( 'Hadoop Job ID: ', returned['Value'] )

    return S_OK( returned['Value'] )
  def submitNewBigJob( self ):

    #1.- Creamos carpeta temporal
    self.log.debug( 'Step1::: mkdir temp folder' )
    tempPath = self.__tmpSandBoxDir + str( self.__jobID ) + "/"
    dirac = Dirac()
    if not os.path.exists( tempPath ):
      os.makedirs( tempPath )

    #2.- Introducimos el contenido del inputsandbox en la carpeta temporal
    self.log.debug( 'Step2::: download inputsand to temp folder' )
    settingJobSandBoxDir = dirac.getInputSandbox( self.__jobID, tempPath )
    self.log.info( 'Writting temporal SandboxDir in Server', settingJobSandBoxDir )
    moveData = tempPath + "/InputSandbox" + str( self.__jobID )

    #3.- Move the data to client
    self.log.debug( 'Step2::: download inputsandbox to temp folder' )
    HadoopV1InteractiveCli = HadoopV1InteractiveClient( self.__User , self.__publicIP, self.__Port )
    returned = HadoopV1InteractiveCli.dataCopy( tempPath, self.__tmpSandBoxDir )
    self.log.debug( 'Returned of copy the job contain to the Hadoop Master with HadoopInteractive::: ', returned )

    #3.- Get executable file
    result = jobDB.getJobJDL( str( self.__jobID ) , True )
    classAdJob = ClassAd( result['Value'] )
    executableFile = ""
    if classAdJob.lookupAttribute( 'Executable' ):
      executableFile = classAdJob.getAttributeString( 'Executable' )
    self.log.debug( 'Step3::: Get executable file: ', executableFile )

    jobInfo = jobDB.getJobAttributes( self.__jobID )
    if not jobInfo['OK']:
      return S_ERROR( jobInfo['Value'] )
    proxy = ""
    jobInfo = jobInfo['Value']
    if gProxyManager.userHasProxy( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] ):
      proxy = gProxyManager.downloadProxyToFile( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] )
    else:
      proxy = self.__requestProxyFromProxyManager( jobInfo["OwnerDN"], jobInfo["OwnerGroup"] )

    HadoopInteractiveJob = "InputSandbox" + str( self.__jobID ) + "/" + executableFile
    HadoopInteractiveJobCommand = "InputSandbox" + str( self.__jobID ) + "/" + executableFile + " " + self.__JobName
    HadoopInteractiveJobOutput = tempPath + str( self.__jobID ) + "_" + executableFile + "_out"

    #4.- Creating second part of the job name
    if ( len( re.split( " ", self.__JobName ) ) > 1 ):
    #(name for random writter with -D)name_job = re.split( " ", self.__JobName )[0] + " " + re.split( " ", self.__JobName )[1] + " " + re.split( " ", self.__JobName )[2]
      name_job = re.split( " ", self.__JobName )[0] + " " + re.split( " ", self.__JobName )[1]
    #(name for random writter with -D)output_job = moveData + "/" + re.split( " ", self.__JobName )[3]

    #(name for random writter with -D)cfg_job = ""
    #(name for random writter with -D)if ( len( re.split( " ", self.__JobName ) ) > 4 ):
    #(name for random writter with -D)  cfg_job = moveData + "/" + re.split( " ", self.__JobName )[4]

    #5.- Parsing execution command
    #cmd = "hadoop jar " + tempPath + HadoopInteractiveJob + " " + name_job + " " + output_job + " " + cfg_job
      cmd = "hadoop jar " + tempPath + HadoopInteractiveJob + " " + name_job + " " + tempPath + "/InputSandbox" + str( self.__jobID ) + "/" + "/dataset-USC-a-grep '[and]+'"
    else:
      dataset = re.split( "/", self.__Dataset )
      count = 0
      datasetname = ""
      for dir in dataset:
        count = count + 1
        if ( count > 2 ):
          datasetname = datasetname + "/" + dir
      cmd = "hadoop jar " + tempPath + HadoopInteractiveJob + " " + self.__JobName + " " + datasetname + " " + tempPath + "/" + self.__JobName.replace( " ", "" ) + "_" + str( self.__jobID )
    self.log.debug( 'Step4::: Making CMD for submission: ', cmd )

    self.log.debug( 'Step5::: Submit file to hadoop: ' )
    returned = HadoopV1InteractiveCli.jobSubmit( tempPath, HadoopInteractiveJob, proxy['chain'],
                                                 HadoopInteractiveJobOutput, cmd )
    self.log.info( 'Launch Hadoop-HadoopInteractive job to the Master: ', returned )

    if not returned['OK']:
      return S_ERROR( returned['Message'] )
    else:
      self.log.info( 'Hadoop-HadoopInteractive Job ID: ', returned['Value'] )

    return S_OK( returned['Value'] )