def do_mergefet_worker(job_q, result_q, sftp):
    print 'process %d start working...'%(os.getpid())
    #construct feature file name base on MAC address and process id
    mac=uuid.UUID(int = uuid.getnode()).hex[-12:]
    feature_file_name = 'feature_%s_%d.tbl' %(mac,os.getpid())
    remote_feature_file = ''
    task_feature = {}
    while True:
        try:
            job = job_q.get_nowait()
            print 'will process %d jobs...,pid=%d'%(len(job),os.getpid())
            for remote_fet_file in job:
                if remote_fet_file.find('White') >= 0:
                    label = 1
                else:
                    label =0  
                if remote_feature_file == '':
                    remote_feature_file = remote_fet_file[:remote_fet_file.find(common.g_sample_fet)]
                    if remote_feature_file[-1] != '/':
                        remote_feature_file += '/'
                    remote_feature_file += common.g_feature_dir
                    remote_feature_file += '/'
                    remote_feature_file += feature_file_name
                md5_file = remote_fet_file[remote_fet_file.rfind('/')+1 : len(remote_fet_file)]
                local_fet_file = MERGE_FET_FET_CACHE_DIR + md5_file
                #sftp.get(remote_fet_file, local_fet_file)
                if robust_sftp(sftp,remote_fet_file,local_fet_file,True) == False:
                    continue
                try:
                    MergeFile_single.merge_fet_file(task_feature,local_fet_file,label)
                except Exception,ex:
                    print ex
                    print traceback.format_exc()

                os.remove(local_fet_file)
            result_q.put(len(job))
        except Exception,e:
            if job_q.empty():
                print 'Queue.Empty, will exit loop. pid=%d'%(os.getpid())
                break
            else:
                print 'Is not job_q.empty() exception, pid=%d'%(os.getpid())
                print e
                print traceback.format_exc()
                    print traceback.format_exc()

                os.remove(local_fet_file)
            result_q.put(len(job))
        except Exception,e:
            if job_q.empty():
                print 'Queue.Empty, will exit loop. pid=%d'%(os.getpid())
                break
            else:
                print 'Is not job_q.empty() exception, pid=%d'%(os.getpid())
                print e
                print traceback.format_exc()
    
    if len(task_feature) > 0:
        local_feature_file = MERGE_FET_FEATURE_CACHE_DIR + feature_file_name
        MergeFile_single.write_feature_file(task_feature, local_feature_file)
        if remote_feature_file != '':
            robust_sftp(sftp,remote_feature_file,local_feature_file,False)
            #sftp.put(local_feature_file,remote_feature_file)
        print 'local_feature_file is: ',local_feature_file
        print 'remote_feature_file is: ', remote_feature_file
        os.remove(local_feature_file)
    
    print 'process %d exit.'%(os.getpid())

def process_file_worker(job_q, result_q, server_ip, operation, user_name, db_ip, db_name, ngram):
    """ A worker function to be launched in a separate process. Takes jobs from
        job_q - each job is a list of files(one chunk of files) to process. When the job is done,
        the result (number of files processed) is placed into
        result_q. Runs until job_q is empty.
    """
                    print ex
                    print traceback.format_exc()

                os.remove(local_fet_file)
            result_q.put(len(job))
        except Exception,e:
            if job_q.empty():
                print 'Queue.Empty, will exit loop. pid=%d'%(os.getpid())
                break
            else:
                print 'Is not job_q.empty() exception, pid=%d'%(os.getpid())
                print e
                print traceback.format_exc()

    if output_incremental_fet == True:
        MergeFile_single.get_inc_feature(task_feature,db_ip,db_name,os.getpid())

    if len(task_feature) > 0:
        local_feature_file = MERGE_FET_FEATURE_CACHE_DIR + feature_file_name
        MergeFile_single.write_feature_file(task_feature, local_feature_file)
        if remote_feature_file != '':
            robust_sftp(sftp,remote_feature_file,local_feature_file,False)
            #sftp.put(local_feature_file,remote_feature_file)
        print 'local_feature_file is: ',local_feature_file
        print 'remote_feature_file is: ', remote_feature_file
        os.remove(local_feature_file)
    
    print 'process %d exit.'%(os.getpid())

def process_file_worker(job_q, result_q, server_ip, operation, user_name, db_ip, db_name, ngram):
    """ A worker function to be launched in a separate process. Takes jobs from