def main():
    result_information = []
    load_key = {}
    
    ####################
    #command line parser
    """
    if len(sys.argv) <5:
        print 'cannot get any legal files, argv < 5 %d'%(len(sys.argv))
        return 
    """
    #python uehprocess.py -o "G:\UEH\Origin.json" -r "G:\UEH\Test.json" -f "G:\UEH\source\ECR28_0427.log"
    parser = OptionParser()
    parser.add_option('-o','--output',
                      action = 'store', 
                      type = 'string',
                      dest = 'output_str')
    parser.add_option('-f','--file',
                      action = 'store',
                      type = 'string',
                      dest = 'file_str')
    parser.add_option('-r','--return',
                      action = 'store',
                      type = 'string',
                      dest = 'return_file')
    parser.add_option('-l','--logdir',
                      action = 'store',
                      type = 'string',
                      dest = 'log_path')
    parser.add_option('-s','--savedir',
                      action = 'store',
                      type = 'string',
                      dest = 'save_path')   
    print sys.argv     
    args_test = ['-f',"D:\\ECR09_0407.log",
            '-r',"D:\UEH\Test.json",
            '-o',"D:\\724d4958-7aa6-4b34-aa26-c1c66ae2a904_Out.json",
            '-l',"D:\UEH\\log\\" ,
            '-s',"D:\UEH\\result"]
    (options,args) = parser.parse_args(args_test)
    
    print options.output_str
    print options.file_str
    print options.return_file
    print options.log_path
    print options.save_path
    #os.path.join(options.log_path)
    if not os.path.exists(options.log_path):
        os.mkdir(os.path.join(options.log_path))
    if not os.path.exists(options.save_path):
        os.mkdir(os.path.join(options.save_path))
    ############
    #JSON 格式解析
    json_val = simplejson.load(file(options.output_str))
    table_id = json_val[0].get("TableID")
    field_colums =  json_val[0].get("Columns")
    #close and delete file
    cnt = 0
    for field in field_colums:
        if field == None:
            tmp = "___" + str(cnt) + "___"
            load_key[tmp] = cnt
        else:
            keys = [key.strip(" ") for key in field.split(field_colums_split)]
#            load_key[str(field)] = cnt
            for key in keys: load_key[key] = cnt
        cnt = cnt + 1
    
    print load_key
    print table_id
    
    ############
    #start parsing
    table_id = table_id
    file_str = options.file_str
    return_file = options.return_file
    log_file = options.log_path
    save_file = options.save_path
    
    print 'Start Time:' + time.ctime()
    start = time.clock()
    
    files = []
    files.append(file_str)
    for f in files:
        if not os.path.exists(f):
            print "cannot find %s"%f
            return 
        #p = processStr(f,DBInfo.loadkey(table_id))
        p = processStr(f,load_key,table_id,log_file,save_file)
        ret = p.parseProcess()
        result_information.append(ret)
        break
    
    print "End Time", time.ctime()
    print "total time(seconds)", str(time.clock() - start)
    
    #####################
    #Write into JSON file
    json_object = []
    for i in range(0,MAX_TIME_SPAN+1):
        if ret[i]['DataStartTime'] != None and  ret[i]['DataStartTime'] != "":
            json_object.append(ret[i])
        else:
            break
    print json_object
    target_file = open(return_file,"w")
    simplejson.dump(json_object,target_file)
    target_file.close()
    DBInfo.close()
def main():
    result_information = []
    load_key = {}
    
    ####################
    #command line parser
    
    parser =   OptionParser()
    parser.add_option('-f','--file',
                     action = 'store',
                     type = 'string',
                     dest = 'file_str'
                     )  
    parser.add_option('-s','--savedir',
                     action = 'store',
                      type = 'string',
                      dest = 'save_path'
                    )  
    parser.add_option('-r','--return',
                      action = 'store',
                      type = 'string',
                      dest = 'return_file')
    
    #ATUTEST2210465292330515664201206271450_120627_145004.tmp
    #ATUTEST3059711936502054577201206271451_120627_145103.tmp
    #ATUTEST5219307231012006613201206271720_120627_172001.tmp
    #eventData_120419_085606.txt
    #eventData_120419_113412.txt
    
    arge_test = ['-f',FILE_SOURCE,
                 '-s','D:\\EricssonData\\ParsedData',
                 '-r','D:\\EricssonData\\output.json',
                 ]

    arge = sys.argv[1:] if len(sys.argv) > 1 else arge_test
    
    (options,args) = parser.parse_args(arge)
    
    print options.save_path
    print options.file_str
    print options.return_file
    
    ############
    #start parsing
    file_str = options.file_str
    save_file = options.save_path
    if not os.path.exists(save_file):
        os.mkdir(save_file)             
    return_file = options.return_file 
    
    print 'Start Time:' + time.ctime()
    start = time.clock()
    
    files = []
    files.append(file_str)
    
    for f in files:
        if not os.path.exists(f):
            print "cannot find %s"%f
            raise Exception("cannot find %s"%f)
            return 
        p = processStr(filename = f,save_path = save_file)
        ret = p.parseProcess()
        result_information.append(ret)
        break
    
    json_object = p.merge_json()
    
    print "End Time", time.ctime()
    print "total time(seconds)", str(time.clock() - start)
    
    #print ret
    #if json_object == None:
    #   return
#    #####################
#    #Write into JSON file
#    json_object = {
#                   MR_FILE:[],
#                   EVENT_FILE:[],
#                   CALL_FILE:[]
#                   }
#    for f in FILE_SETS:
#        for i in range(0,MAX_TIME_SPAN+1):
#            if ret[f][i]['DataStartTime'] != None and  ret[f][i]['DataStartTime'] != "":
#                json_object[f].append(ret[f][i])
#            else:
#                break

    #print json_object
    target_file = open(return_file,"w")
    simplejson.dump(json_object,target_file)
    target_file.close()
    DBInfo.close()   
def main():
    result_information = []
    load_key = {}
    
    ####################
    #command line parser
    
    parser =   OptionParser()
    parser.add_option('-f','--file',
                     action = 'store',
                     type = 'string',
                     dest = 'file_str'
                     )  
    parser.add_option('-s','--savedir',
                      action = 'store',
                      type = 'string',
                      dest = 'save_path'
                    )  
    parser.add_option('-r','--return',
                      action = 'store',
                      type = 'string',
                      dest = 'return_file')
    
    arge_test = ['-f','G:\\MR\\source\\eventData_120419_113412.txt',
                 '-s','G:\\MR\\result',
                 '-r','G:\\MR\\output.json',
                 ]
    
    (options,args) = parser.parse_args(arge_test)
    
    print options.save_path
    print options.file_str
    print options.return_file
    
    ############
    #start parsing
    file_str = options.file_str
    save_file = options.save_path            
    return_file = options.return_file 
    
    print 'Start Time:' + time.ctime()
    start = time.clock()
    
    files = []
    files.append(file_str)
    for f in files:
        if not os.path.exists(f):
            print "cannot find %s"%f
            return 
        p = processStr(filename = f,save_path = save_file)
        ret = p.parseProcess()
        print ret
        result_information.append(ret)
        break
    
    print "End Time", time.ctime()
    print "total time(seconds)", str(time.clock() - start)
    
    #####################
    #Write into JSON file
    json_object = {
                   MR_FILE:[],
                   EVENT_FILE:[],
                   CALL_FILE:[]
                   }
    for f in FILE_SETS:
        for i in range(0,MAX_TIME_SPAN+1):
            if ret[f][i]['DataStartTime'] != None and  ret[f][i]['DataStartTime'] != "":
                json_object[f].append(ret[f][i])
            else:
                break

    print json_object
    target_file = open(return_file,"w")
    simplejson.dump(json_object,target_file)
    target_file.close()
    DBInfo.close()