Exemple #1
0
def main_ex(output):
    tokenized_text = commonUtility.word_tokenize(output)
    classified_text = commonUtility.st.tag(tokenized_text)
    data = {}
    data['docType'] = "Driving Licence"
    data['name'] = commonUtility.nameex(classified_text)
    data['dob'] = dateex(output)[1]
    # print(data['dob'])
    data['age'] = dateex(output)[0]
    data['address'] = addex(reg(output))
    data['bloodGroup'] = commonUtility.bloodGroup(output)
    return commonUtility.jsonify(data)
Exemple #2
0
def main_ex(output):
    tokenized_text = commonUtility.word_tokenize(output)
    classified_text = commonUtility.st.tag(tokenized_text)
    data = {}
    data['docType'] = "PanCard"
    data['name'] = commonUtility.nameex(classified_text)
    data['dob'] = commonUtility.dateex(output)
    data['age'] = commonUtility.age(data['dob'])
    data['gender'] = ""
    data['bloodGroup'] = ""
    data['address'] = ""
    return commonUtility.jsonify(data)
def main_ex(output):
    tokenized_text = commonUtility.word_tokenize(output)
    classified_text = commonUtility.st.tag(tokenized_text)
    data = {}
    print("HI")
    data['docType'] = "Passport"
    data['name'] = commonUtility.nameex(classified_text)
    data['dob'] = dateex(output)[0]
    data['age'] = dateex(output)[1]
    data['gender'] = ""
    data['bloodGroup'] = ""
    data['address'] = ""    
    return commonUtility.jsonify(data)
Exemple #4
0
def addex(c3):
    """
    In: Proccesed text
    Out: Address
    """
    try:
        tkn_add = commonUtility.word_tokenize(reg(c3))
        print(tkn_add)
        add = " "
        y = 0
        pin = re.compile('^\d{6}$')
        for i in tkn_add:
            # print(i)
            p = pin.search(i)
            if y == 1:
                add += i + ' '
            if i == 'ADDRESS' or i == 'Address' or i == 'Add':
                y = 1
            elif p is not None:
                y = 0
        return(add)
    except:
        return "None"