def getAttemptIdsForJobIdAndStoreInFile(jobId, myTask="map"):
    artifactsDir = CommonHadoopEnv.getArtifactsDir()
    saveFilePath = os.path.join(artifactsDir,"AttemptIdFile")  
    listAttemptCmd = " job -list-attempt-ids "+ jobId +" "+ myTask + " running " 
    out=Hadoop.run(listAttemptCmd)
    buf = StringIO.StringIO(out[1])    
    util.writeToFile(out[1],saveFilePath)
def checkJobCreatedTempFileInTT(logFileDir, currentUser, currentJobId, currentAttemptId, logfile, taskTrackerHost):      
    pathFile = os.path.join(logFileDir, 'taskTracker', currentUser, 'jobcache', currentJobId, currentAttemptId, 'work', logfile)
    logger.info("path file: " + pathFile)
    result = False
    if platform.system() == 'Windows':
        result = os.path.isfile(pathFile)
    else:
        if CommonHadoopEnv.getIsSecure():
            result = os.path.isfile(pathFile)
        else:
            cmd = "ls %s" % pathFile
            sudocmd = Machine.sudocmd(cmd,MAPRED_USER)
            sudocmd += "|wc -l"
            logger.info("sudocmd = " + sudocmd)
            out = Machine.run(sudocmd)            
            result =  (out[0] == 0 and out[1] == "1")
    return result
from beaver.component.hadoop import Hadoop, HDFS, MAPRED
from beaver.config import Config
from beaver.machine import Machine
from beaver import component
from beaver import util
import os
import logging
import pytest
import sys
import time
import StringIO
import platform
from beaver.component.common_hadoop_env import CommonHadoopEnv

#Get user from config file
HADOOPQA_USER = CommonHadoopEnv.getHadoopQAUser()
HDFS_USER = CommonHadoopEnv.getHDFSUser()
MAPRED_USER = CommonHadoopEnv.getMapredUser()

SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__))
DATA_PATH = os.path.join(SCRIPT_PATH, "data")
CREATE_FILE = "CreateFile.py"
CREATE_FILE_PATH_IN_HADOOP = '/user/' + HADOOPQA_USER + '/' + CREATE_FILE
CREATE_FILE_PATH_IN_LOCAL = os.path.join(SCRIPT_PATH, "data", CREATE_FILE)
CREATE_FILE_2 = "CreateFile2.py"
CREATE_FILE_2_PATH_IN_HADOOP = '/user/' + HADOOPQA_USER + '/' + CREATE_FILE_2
CREATE_FILE_2_PATH_IN_LOCAL = os.path.join(SCRIPT_PATH, "data", CREATE_FILE_2)
OUT_PATH_IN_HADOOP = '/user/' + HADOOPQA_USER + '/out1'
HADOOP_STREAMING_JAR = Config.get('hadoop', 'HADOOP_STREAMING_JAR')

logger = logging.getLogger(__name__)