Exemple #1
0
def unresolved_case_list():
    try:
        record = CourtCase.query.filter_by(is_closed=False).all()
        ret_dict = {}
        ret_dict['confirm'] = '1'
        ret_dict['case_list'] = []
        for i in record:
            temp_dict = {}
            temp_dict['cin'] = str(i.cin)
            temp_dict['def_name'] = i.defendent_name
            temp_dict['def_addr'] = i.defendent_address
            temp_dict['crime_type'] = i.crime_type
            temp_dict['crime_date'] = {}
            temp_dict['crime_date']['month'] = str(i.crime_date.month)
            temp_dict['crime_date']['day'] = str(i.crime_date.day)
            temp_dict['crime_date']['year'] = str(i.crime_date.year)
            temp_dict['arrest_date'] = {}
            temp_dict['arrest_date']['month'] = str(i.date_of_arrest.month)
            temp_dict['arrest_date']['day'] = str(i.date_of_arrest.day)
            temp_dict['arrest_date']['year'] = str(i.date_of_arrest.year)
            temp_dict['starting_date'] = {}
            temp_dict['starting_date']['month'] = str(i.starting_date.month)
            temp_dict['starting_date']['day'] = str(i.starting_date.day)
            temp_dict['starting_date']['year'] = str(i.starting_date.year)
            temp_dict['crime_loc'] = i.crime_location
            temp_dict['arresting_off_name'] = i.arresting_officer_name
            temp_dict['name_pres_judge'] = i.judge_name
            temp_dict['public_prosecutor_name'] = i.public_prosecutor_name
            ret_dict['case_list'].append(temp_dict)
        ret_json = json.dumps(ret_dict)
        return ret_json
    except:
        db.session.rollback()
        return json.dumps({"confirm": "0", "message": "Some Error occured"})
Exemple #2
0
def add_lawyer_judge(json_obj):
    try:
        y = json_obj
        user_type = y['usr_type']
        username = y["username"]
        name = y["name"]
        passw = y["password"]
        address = y['usr_addr']
        hashed_password = bcrypt.generate_password_hash(passw).decode('utf-8')
        usr = User.query.filter_by(username=username).first()
        if(usr):
            return json.dumps({"add_status": "0", "err_msg": "Username Already Exists"})
        user = User(username=username, address=address, name=name,
                    password=hashed_password, user_type=user_type)

        db.session.add(user)
        db.session.commit()
        ret_val = {}
        ret_val['add_status'] = "1"
        ret_val['err_msg'] = "The account of has been created successfully!!"
        ret_json = json.dumps(ret_val)
        return ret_json

    except:
        db.session.rollback()
        ret_val = {}
        ret_val['add_status'] = "0"
        ret_val['err_msg'] = "Sorry!!We were unable to create the account!! The username probably exists !!"
        ret_json = json.dumps(ret_val)
        return ret_json
Exemple #3
0
def search_by_id(cin_str, username):
    try:
        search_by_id_charge = 100
        ret_dict = {}
        ret_dict['due_amt'] = "0"
        cin = int(cin_str)
        i = CourtCase.query.get(cin)
        if i is None:
            ret_dict["confirm"] = "0"
            ret_dict["message"] = "Please search for a valid CIN number!!"
        elif i.is_closed!=True:
            ret_dict["confirm"] = "0"
            ret_dict["message"] = "This is a pending Case!!"
        else:
            ret_dict['case_details'] = {}
            ret_dict['case_details']['CIN'] = str(i.cin)
            ret_dict['case_details']['def_name'] = i.defendent_name
            ret_dict['case_details']['def_addr'] = i.defendent_address
            ret_dict['case_details']['crime_type'] = i.crime_type
            ret_dict['case_details']['crime_date'] = str(
                i.crime_date.day)+"-"+str(i.crime_date.month)+"-"+str(i.crime_date.year)
            ret_dict['case_details']['date_arrest'] = str(
                i.date_of_arrest.day)+"-"+str(i.date_of_arrest.month)+"-"+str(i.date_of_arrest.year)
            ret_dict['case_details']['start_date'] = str(
                i.starting_date.day)+"-"+str(i.starting_date.month)+"-"+str(i.starting_date.year)
            ret_dict['case_details']['latest_hearing_date'] = str(
                i.hearing_date.day)+"-"+str(i.hearing_date.month)+"-"+str(i.hearing_date.year)
            ret_dict['case_details']['expected_completion_date'] = str(
                i.expected_completion_date.day)+"-"+str(i.expected_completion_date.month)+"-"+str(i.expected_completion_date.year)
            ret_dict['case_details']['crime_loc'] = i.crime_location
            ret_dict['case_details']['arresting_off_name'] = i.arresting_officer_name
            ret_dict['case_details']['name_pres_judge'] = i.judge_name
            ret_dict['case_details']['pros_name'] = i.public_prosecutor_name
            ret_dict['case_details']['adj_details'] = []
            adj = i.hearing_details
            if adj is not None:
                for x in adj.split('|'):
                    jobj = json.loads(x)
                    temp_dict = {}
                    temp_dict["date"] = jobj["date"]
                    temp_dict["reason"] = jobj["reason"]
                    ret_dict['case_details']['adj_details'].append(temp_dict)
            record = User.query.filter_by(username=username).first()
            if record is None:
                ret_dict2 = {}
                ret_dict2["confirm"] = "0"
                ret_dict2["messaage"] = "Please enter a valid username!!"
                ret_json2 = json.dumps(ret_dict2)
                return ret_json2
            else:
                if record.user_type == 'Lawyer':
                    record.due_amount = record.due_amount + search_by_id_charge
                    db.session.commit()
                    ret_dict['due_amt'] = str(record.due_amount)
        ret_json = json.dumps(ret_dict)
        return ret_json
    except:
        db.session.rollback()
        return json.dumps({"confirm": "0", "message": "Some Error occured"})
Exemple #4
0
    def on_get(self, request, resp, **kwargs):
        """
        Falcon resource method, for handling HTTP request GET method

        Falcon request provides: parameters embedded in URL via a keyword args
        dict, as well as convenience class variables falcon.HTTP_*
        """
        api_config = loader.get_api_config()
        request_url = ResourceUtil.get_request_url(request, api_config)

        with warehouse.get_source_model_session() as model:
            sources = source.SourceUtil.get_list_of_data_sources(
                request_url, auth.get_user_id(request), model)
            str_dataset_id = source_parameters.get_requested_dataset_id(
                sources, request, resp, kwargs)
            dict_variables = ResourceUtil.get_self_dict(request_url)
            try:
                dwsupport_variables = get_list_of_variables(str_dataset_id)
            except NotImplementedError as e:
                #error; not a warehouse request & Dataset does not match requested ID
                logging.exception(e)
                raise falcon.HTTPError(falcon.HTTP_NOT_IMPLEMENTED,
                                       'Not Implemented', str(e))
            # format the DWSupport info, for publishing
            tables, associations = model['tables'], model['associations']
            list_variables = [
                to_web_variable(v, tables, associations)
                for v in dwsupport_variables
            ]
            # alphabatize the fields
            list_variables.sort(key=lambda v: v['id'])
            dict_variables[str_route] = list_variables
            str_nesting_indent_char = '\t'
            resp.body = json.dumps(dict_variables,
                                   indent=str_nesting_indent_char)
Exemple #5
0
def close_case(json_obj):
    ret_dict = {}
    try:
        y = json_obj
        cin = int(y['cin'])
        summary = y['case_summary']
        record = CourtCase.query.filter_by(cin=cin).first()
        if record is None:
            ret_dict['confirm'] = "0"
            ret_dict['message'] = "Sorry!! The given CIN does not exist!!"
        elif record.is_closed == True:
            ret_dict['confirm'] = "0"
            ret_dict['message'] = "The case has already been closed!!"
        else:
            record.is_closed = True
            record.summary = summary
            db.session.commit()
            ret_dict['confirm'] = "1"
            ret_dict['message'] = "The case has been successfully closed!!"
    except:
        db.session.rollback()
        ret_dict['confirm'] = "0"
        ret_dict['message'] = "There was some problem closing the case!!"
    ret_json = json.dumps(ret_dict)
    return ret_json
Exemple #6
0
 def on_get(self, request, resp, **kwargs):
     """
     respond with Python module representation of the current config
     """
     session_user = auth.get_user_id(request)
     with warehouse.get_source_model_session() as dwsupport_model:
         if not management_auth.is_management_permitted(
                 session_user, dwsupport_model):
             msg = 'Warehouse management not authorized'
             raise falcon.HTTPUnauthorized(title='401', description=msg)
         #else
         requested_format = kwargs['format'].lower()
         if requested_format not in formats:
             msg = 'Dump format not found. Leave blank for Python (py) or include a supported format in dump URL: {}'.format(
                 formats)
             raise falcon.HTTPNotFound(title='404', description=msg)
         #else
         if requested_format == 'py':
             conf_module_text = DWSUPPORT_CONFIG_HEADER
             conf_module_text += 'model = {}'.format(get_model_string())
             conf_module_text += '\n'
             resp.body = conf_module_text
             return
         if requested_format == 'json':
             resp.body = json.dumps({'model': dwsupport_model})
             return
Exemple #7
0
def schedule_case(jsonobj):
    ret_dict = {}
    try:
        y = jsonobj
        cin = int(y['cin'])
        slot = int(y['slot'])
        new_hearing_date = datetime.datetime(
            int(y["date"]["year"]), int(y["date"]["month"]), int(y["date"]["day"]))
        record = CourtCase.query.filter_by(cin=cin).first()
        if record is None:
            ret_dict['confirm'] = "0"
            ret_dict['message'] = "The given CIN does not exist!!"
        elif record.hearing_date >= new_hearing_date:
            ret_dict['confirm'] = "0"
            ret_dict['message'] = "The new hearing date cannot be older than the previous one!!"
        else:
            record.hearing_date = new_hearing_date

            record.hearing_slot = slot
            db.session.commit()
            print(record.hearing_date)  # DEBUG
            add_to_slotlist(cin, slot, record.hearing_date.year,
                            record.hearing_date.month, record.hearing_date.day)

            ret_dict['confirm'] = "1"
            ret_dict["message"] = "New hearing date assigned successfully!!"

    except:
        db.session.rollback()
        ret_dict['confirm'] = "0"
        ret_dict['message'] = "There was a problem assigning a new hearing date!"

    ret_json = json.dumps(ret_dict)
    return ret_json
Exemple #8
0
def resolved_case_list(jsonobj):
    ret_dict = {}
    try:
        y = jsonobj
        beg_date = datetime.datetime(int(y["beg_date"]["year"]), int(
            y["beg_date"]["month"]), int(y["beg_date"]["day"]))
        end_date = datetime.datetime(int(y["end_date"]["year"]), int(
            y["end_date"]["month"]), int(y["end_date"]["day"]))
        record = CourtCase.query.filter(CourtCase.starting_date.between(
            beg_date, end_date)).filter_by(is_closed=True).order_by(CourtCase.starting_date).all()
        ret_dict["confirm"] = 1
        ret_dict["case_list"] = []
        for i in record:
            temp_dict = {}
            temp_dict['cin'] = i.cin
            temp_dict['name_pres_judge'] = i.judge_name
            temp_dict['starting_date'] = {}
            temp_dict['starting_date']['month'] = str(i.starting_date.month)
            temp_dict['starting_date']['day'] = str(i.starting_date.day)
            temp_dict['starting_date']['year'] = str(i.starting_date.year)
            temp_dict['latest_date'] = {}
            temp_dict['latest_date']['month'] = str(i.hearing_date.month)
            temp_dict['latest_date']['day'] = str(i.hearing_date.day)
            temp_dict['latest_date']['year'] = str(i.hearing_date.year)
            temp_dict['case_summary'] = i.summary
            ret_dict["case_list"].append(temp_dict)
    except:
        db.session.rollback()
        ret_dict["confirm"] = "0"
        ret_dict["message"] = "Sorry!! There was a problem in viewing the resolved case list!!"
    ret_json = json.dumps(ret_dict)
    return ret_json
Exemple #9
0
 def on_get(self, request, resp):
     """
     return JSON object, representing the current session's user info
     """
     user_id = auth.get_user_id(request)
     # return JSON user representation
     user = get_user(user_id)
     json_user = json.dumps(user)
     resp.body = json_user
Exemple #10
0
def search_by_key(key, username):
    try:
        search_by_key_charge = 100
        record = User.query.filter_by(username=username).first()
        ret_dict = {}
        if record is None:
            ret_dict2 = {}
            ret_dict2["confirm"] = "0"
            ret_dict2["message"] = "Please enter a valid username!!"
            ret_json2 = json.dumps(ret_dict2)
            return ret_json2
        else:
            if record.user_type == 'Lawyer':
                record.due_amount = record.due_amount + search_by_key_charge
                db.session.commit()
                ret_dict['due_amt'] = str(record.due_amount)
            rec = CourtCase.query.all()
            ret_dict["confirm"] = "1"
            ret_dict["message"] = "The search was successful!!"
            ret_dict['cin_list'] = []
            for i in rec:

                str_row = ""
                str_row = str_row + i.defendent_name + " " + i.defendent_address + " " + i.judge_name + " " + i.crime_type + \
                    " " + i.crime_location + " " + i.arresting_officer_name + \
                    " " + i.public_prosecutor_name + " "
                if i.hearing_details is not None:
                    str_row = str_row + i.hearing_details
                if i.summary is not None:
                    str_row = str_row + i.summary
                if key in str_row:
                    temp_dict = {}
                    temp_dict['cin'] = i.cin
                    temp_dict['crime_type'] = i.crime_type
                    temp_dict['name_pres_judge'] = i.judge_name
                    temp_dict['start_date'] = str(
                        i.starting_date.day)+"-"+str(i.starting_date.month)+"-"+str(i.starting_date.year)
                    ret_dict['cin_list'].append(temp_dict)
            ret_json = json.dumps(ret_dict)
            return ret_json
    except:
        db.session.rollback()
        return json.dumps({"confirm": "0", "message": "Some Error occured"})
Exemple #11
0
def get_user_list():
    ret_dict = {}
    ret_dict['usr_list'] = []
    recr = User.query.all()
    for i in recr:
        if i.user_type != "Registrar":
            dct = {}
            dct['username'] = i.username
            dct['name'] = i.name
            dct['usr_type'] = i.user_type
            ret_dict['usr_list'].append(dct)
    ret_json = json.dumps(ret_dict)
    return ret_json
Exemple #12
0
def adjournment_details_add(jsonobj):
    ret_dict = {}
    try:
        y = jsonobj
        cin = int(y['cin'])
        reason = y['reason']
        record = CourtCase.query.filter_by(cin=cin).first()
        if record is None:
            ret_dict['confirm'] = "0"
            ret_dict['message'] = "The entered CIN is invalid!!"
        elif record.is_closed == True:
            ret_dict['confirm'] = "0"
            ret_dict['message'] = "The case has already been closed!!"
        elif record.hearing_date <= record.latest_adjournment_date:
            ret_dict['confirm'] = "0"
            ret_dict['message'] = "Please assign the next hearing of the case!"
        else:
            adj_dict = {}
            adj_dict["date"] = str(record.hearing_date.day)+"-"+str(
                record.hearing_date.month)+"-"+str(record.hearing_date.year)
            adj_dict["reason"] = reason
            adj_json = json.dumps(adj_dict)
            record.latest_adjournment_date = record.hearing_date
            record.latest_adjournment_slot = record.hearing_slot
            if record.hearing_details is None:
                record.hearing_details = adj_json
            else:
                record.hearing_details = record.hearing_details + "|" + adj_json
            db.session.commit()
            ret_dict['confirm'] = "1"
            ret_dict['message'] = "Adjournment details added successfully!!"
    except:
        db.session.rollback()
        ret_dict['confirm'] = "0"
        ret_dict['message'] = "There was some problem closing the case!!"
    ret_json = json.dumps(ret_dict)
    return ret_json
Exemple #13
0
def search_vacant_slot(jsonobj):
    y = jsonobj
    date = datetime.datetime(int(y["year"]), int(y["month"]), int(y["day"]))
    list_of_case = SlotList.query.filter_by(date_of_hearing=date).all()
    slot_list = ['1', '1', '1', '1', '1']
    for i in list_of_case:
        slot_list[i.slot_of_hearing-1] = '0'
    ret_dict = {}
    ret_dict['free_slot'] = {}
    ret_dict['free_slot']['slot1'] = slot_list[0]
    ret_dict['free_slot']['slot2'] = slot_list[1]
    ret_dict['free_slot']['slot3'] = slot_list[2]
    ret_dict['free_slot']['slot4'] = slot_list[3]
    ret_dict['free_slot']['slot5'] = slot_list[4]
    ret_val = json.dumps(ret_dict)
    return ret_val
Exemple #14
0
    def on_get(self, request, resp):
        """
        Falcon resource method, for HTTP request method: GET

        Falcon request provides a request.url convenience instance variable
        """
        api_config = api.config_loader.get_api_config()
        request_url = SourceUtil.get_request_url(request, api_config)

        with warehouse.get_source_model_session() as model:
            sources = SourceUtil.get_list_of_data_sources(
                request_url, auth.get_user_id(request), model)
            # Build a dict, representing the Source RESTful entity/endpoint
            dictSource = SourceUtil.get_source(sources, request_url)
            str_nesting_indent_char = '\t'
            resp.body = json.dumps(dictSource, indent=str_nesting_indent_char)
Exemple #15
0
    def on_get(self, request, resp):
        """
        terminate API login session
        """
        # Retrieve WSGI session
        session = request.env['beaker.session']

        # Check if there was a login
        try:
            user_id = session['user_id']
        except KeyError:
            user_id = None  #no login, still close session though
        # In either case, delete the existing (or empty) session
        session.delete()
        # Return a JSON response
        logout_message = get_logout(user_id)
        json_message = json.dumps(logout_message)
        resp.body = json_message  #Return
Exemple #16
0
def remove_lawyer_judge(json_obj):
    ret_dict = {}
    try:
        y = json_obj
        username = y["username"]
        recr = User.query.filter_by(username=username).first()
        if recr is None:
            ret_dict['removed_status'] = "0"
            ret_dict['err_msg'] = "Sorry!! The username does not exist!!"
        else:
            db.session.delete(recr)
            db.session.commit()
            ret_dict['removed_status'] = "1"
            ret_dict['err_msg'] = "Username removed successfully!!"
    except:
        db.session.rollback()
        ret_dict['removed_status'] = "0"
        ret_dict['err_msg'] = "Sorry!!There was some problem !!"
    ret_json = json.dumps(ret_dict)
    return ret_json
Exemple #17
0
def get_user_list():
    ret_dict = {}
    try:
        ret_dict['usr_list'] = []
        recr = User.query.all()
        for i in recr:
            if i.user_type != "Registrar":
                dct = {}
                dct['username'] = i.username
                dct['name'] = i.name
                dct['usr_type'] = i.user_type
                ret_dict['usr_list'].append(dct)
        ret_dict['confirm'] = "1"
        ret_dict['message'] = "Success!! Here is the list of users!!"
    except:
        db.session.rollback()
        ret_dict['confirm'] = "0"
        ret_dict['message'] = "Sorry!!There was a problem getting the user list!!"
    ret_json = json.dumps(ret_dict)
    return ret_json
Exemple #18
0
 def on_get(self, request, resp, **kwargs):
     """
     return JSON representing referenced table's associated columns
     """
     session_user = auth.get_user_id(request)
     with warehouse.get_source_model_session() as dwsupport_model:
         if not management_auth.is_management_permitted(
                 session_user, dwsupport_model):
             msg = 'Warehouse management not authorized'
             raise falcon.HTTPUnauthorized(title='401', description=msg)
         #else
         sources = source.SourceUtil.get_list_of_data_sources(
             request.url, auth.get_user_id(request), dwsupport_model)
         requested_source_id = selection.get_requested_dataset_id(
             sources, request, resp, kwargs)
         rows_for_management_app = get_variable_identifier_queries_dicts(
             requested_source_id, dwsupport_model)
         resp.body = json.dumps({'variables': rows_for_management_app},
                                indent='\t')
         return
Exemple #19
0
def enter_details_into_db(jsonobj):
    try:
    
        y = jsonobj
        def_name = y["def_name"]
        def_addr = y["def_addr"]
        crime_type = y["crime_type"]
        crime_date = datetime.datetime(int(y["crime_date"]["year"]), int(
            y["crime_date"]["month"]), int(y["crime_date"]["day"]))
        crime_loc = y["crime_loc"]
        arresting_off_name = y["arresting_off_name"]
        arrest_date = datetime.datetime(int(y["arrest_date"]["year"]), int(
            y["arrest_date"]["month"]), int(y["arrest_date"]["day"]))
        name_pres_judge = y["name_pres_judge"]
        pub_pros_name = y["public_prosecutor_name"]
        starting_date = datetime.datetime(int(y["starting_date"]["year"]), int(
            y["starting_date"]["month"]), int(y["starting_date"]["day"]))
        expected_completion_date = datetime.datetime(int(y["expected_completion_date"]["year"]), int(
            y["expected_completion_date"]["month"]), int(y["expected_completion_date"]["day"]))
        if int(y["hearing_slot"]) != -1:
            hearing_date = datetime.datetime(int(y["hearing_date"]["year"]), int(
                y["hearing_date"]["month"]), int(y["hearing_date"]["day"]))
            hearing_slot = y["hearing_slot"]
            case = CourtCase(defendent_name=def_name, defendent_address=def_addr, crime_type=crime_type, crime_date=crime_date, crime_location=crime_loc, arresting_officer_name=arresting_off_name, date_of_arrest=arrest_date,
                             judge_name=name_pres_judge, public_prosecutor_name=pub_pros_name, starting_date=starting_date, expected_completion_date=expected_completion_date, hearing_date=hearing_date, hearing_slot=hearing_slot)
        else:
            case = CourtCase(defendent_name=def_name, defendent_address=def_addr, crime_type=crime_type, crime_date=crime_date, crime_location=crime_loc, arresting_officer_name=arresting_off_name,
                             date_of_arrest=arrest_date, judge_name=name_pres_judge, public_prosecutor_name=pub_pros_name, starting_date=starting_date, expected_completion_date=expected_completion_date)
        db.session.add(case)
        db.session.commit()
        if int(y["hearing_slot"]) != -1:
            add_to_slotlist(case.cin, case.hearing_slot, case.hearing_date.year,
                            case.hearing_date.month, case.hearing_date.day)
        data_ret = {}
        data_ret['is_added'] = "1"
        data_ret['cin'] = str(case.cin)
        data_ret['message'] = "The Case has been added successfully!!"
        json_data_ret = json.dumps(data_ret)
        return json_data_ret
Exemple #20
0
def case_status_query(json_obj):
    ret_dict = {}
    try:
        y = json_obj
        cin = int(y['cin'])
        record = CourtCase.query.get(cin)
        if record is None:
            ret_dict['confirm'] = "0"
            ret_dict['message'] = "The input CIN does not exist!!"
        else:
            ret_dict['confirm'] = "1"
            ret_dict["message"] = "Success!!!"
            if record.is_closed == True:
                ret_dict["case_status"] = "Resolved"
            else:
                ret_dict["case_status"] = "Pending"
    except:
        db.session.rollback()
        ret_dict["confirm"] = "0"
        ret_dict["message"] = "Sorry!There was a problem in the query!!"
    ret_json = json.dumps(ret_dict)
    return ret_json
Exemple #21
0
def get_datatables_editor_exception(falcon_http_error):
    """
    Make Falcon HTTPError 'to_json' method DataTables Editor compatible

    Returns the referenced HTTPError, with modifications

    Keyword Parameters:
    falcon_http_error  -- Falcon HTTPError exception

    >>> # Check unmodified Exception
    >>> plain = falcon.HTTPError(status=200, description='Plain!')
    >>> plain.to_json()
    '{\\n    "description": "Plain!"\\n}'
    >>> # Check modified
    >>> out = get_datatables_editor_exception(plain)
    >>> out.to_json()
    '{"data": [], "error": "{\\'description\\': \\'Plain!\\'}"}'
    """
    falcon_http_error.to_json = lambda:json.dumps({
         'error': pformat(falcon_http_error.to_dict()) #value must be String
        ,'data': [] #data must be present & be an empty Array
    }, sort_keys=True)
    return falcon_http_error
Exemple #22
0
def search_vacant_slot(jsonobj):
    ret_dict = {}
    try:
        y = jsonobj
        date = datetime.datetime(int(y["year"]), int(y["month"]), int(y["day"]))
        list_of_case = SlotList.query.filter_by(date_of_hearing=date).all()
        slot_list = ['1', '1', '1', '1', '1']
        for i in list_of_case:
            slot_list[i.slot_of_hearing-1] = '0'
        ret_dict['free_slot'] = {}
        ret_dict['free_slot']['slot1'] = slot_list[0]
        ret_dict['free_slot']['slot2'] = slot_list[1]
        ret_dict['free_slot']['slot3'] = slot_list[2]
        ret_dict['free_slot']['slot4'] = slot_list[3]
        ret_dict['free_slot']['slot5'] = slot_list[4]
        ret_dict["confirm"] = "1"
        ret_dict["message"] = "Success!!!"
    except:
        db.session.rollback()
        ret_dict['confirm'] = "0"
        ret_dict['message'] = "Sorry!! There was a problem in finding the vacant slots!!"
    ret_val = json.dumps(ret_dict)
    return ret_val
Exemple #23
0
def court_cases_by_date(json_obj):
    ret_dict = {}
    try:
        y = json_obj
        query_date = datetime.datetime(int(y["query_date"]["year"]), int(
            y["query_date"]["month"]), int(y["query_date"]["day"]))
        record = CourtCase.query.filter_by(
            hearing_date=query_date, is_closed=False).all()
        ret_dict['confirm'] = "1"
        ret_dict['case_list'] = []
        for i in record:
            temp_dict = {}
            temp_dict['cin'] = str(i.cin)
            temp_dict['slot'] = i.hearing_slot
            temp_dict['name_pres_judge'] = i.judge_name
            temp_dict['crime_type'] = i.crime_type
            ret_dict['case_list'].append(temp_dict)
    except:
        db.session.rollback()
        ret_dict["confirm"] = "0"
        ret_dict["message"] = "Sorry!! There was a issue with the date!!"
    ret_json = json.dumps(ret_dict)
    return ret_json
Exemple #24
0
def get_result_from_db(str_source_id,
                       variables=[],
                       filters=[],
                       columns=[],
                       empty_cell_dimensions=[],
                       user_id=None):
    """
    returns list of db result rows, w/ 1x 'header' row prepended

    Keyword Parameters:
    str_source_id -- String, representing requested dataset's API id
    variables  -- list of requested variable names
    filters  -- list of specified filter expression strings
    columns  -- list of names for requested dimension variables to
      be pivoted as additional columns,for all requested value variables
    empty_cell_dimensions  -- list of Strings representing Dimension
       tables (or OLAP-Roles) which are to be OUTER JOINED to produce
       empty Fact value cells for all Dimensional values not found in
       the fact.
    user_id  -- String, representing an authenticated User principal

    Exceptions:
    FilterVariableError -- filters variable not found in header
    NoSourceException -- raised when no dataset matches str_source_id
    NotAuthorizedException  -- user_id not authorized to select data
      from the specified source.

    """
    #generate query
    tables = warehouse.get_source_tables()
    # break dataset identifier down into project/source substrings
    project_name, source_name = str_source_id.split('.')
    for warehouse_table in tables:
        if warehouse_table['name'] == source_name:
            # get connection
            connection = util._get_source_connection(warehouse.dict_source)
            with warehouse.get_source_model_session() as cached_model:
                if warehouse_table['confidential']:
                    # Attempt to obtain a sensitive connection IF user is authorized
                    if not auth.is_select_permitted(user_id, warehouse_table,
                                                    cached_model):
                        raise NotAuthorizedException()
                    connection.close()
                    connection = util._get_source_connection({
                        'id':
                        'Fake .ini source',
                        'db_file':
                        'db_dwsensitive.ini'
                    })
                # retrieve filter info
                if warehouse_table['type'] == 'fact':
                    two_dicts = warehouse.get_fact_variables(
                        warehouse_table, cached_model)
                    variable_by_field, unused = two_dicts
                if warehouse_table['type'] == 'dimension':
                    variable_by_field = warehouse.get_variables(
                        warehouse_table)
                if warehouse_table['type'] == 'dimension role':
                    variable_by_field = warehouse.get_role_variables(
                        warehouse_table)
                python_types = {}
                for field in variable_by_field:
                    var = variable_by_field[field]
                    python_types[field] = var['python_type']
                json_python_types = json.dumps(python_types)
                # get sql & binds
                try:
                    table_type.validate(warehouse_table['type'])
                except table_type.ValidateUnexpectedValue as e:
                    raise NotImplementedError(
                        'No SQL Generation method, for type: {}'.format(
                            warehouse_table)
                    ) from e  #TODO: make this into a local class
                sql_with_filters, binds = warehouse.get_sql_filtered(
                    warehouse_table, json_python_types, filters,
                    empty_cell_dimensions)
                db_config_file_name = warehouse.dict_source['db_file']
                break  # source found, exit!
    else:
        raise NoSourceException(e)
    if len(binds) > 0:
        result = connection.execution_options(stream_results=True).execute(
            sql_with_filters, binds)
    else:
        result = connection.execution_options(
            stream_results=True).execute(sql_with_filters)
    # compose results list
    result_generator = database_row_generator(result, connection)

    subset_generator = parameters.get_result_subset(result_generator,
                                                    variables)
    if columns:
        # pivot, i.e.: replace 'columns' fields & all measured value
        # fields with new measured-value breakdowns for the 'columns'
        # field values.
        all_variables = warehouse.get_source_variables()
        fact_variables = [
            v for v in all_variables if v['table'] == source_name
        ]
        return pivot.get_result(subset_generator, columns, fact_variables)
    #else, no pivot needed - just return
    return subset_generator
Exemple #25
0
    def on_post(self, request, resp, **kwargs):
        """
        Create/update table column, associated with referenced source_id

        TODO: add a JSON response body,compatible with DataTables Editor
        TODO2: improve documentation, unit test coverage
        """
        session_user = auth.get_user_id(request)
        with warehouse.get_source_model_session() as dwsupport_model:
            if not management_auth.is_management_permitted(
                    session_user, dwsupport_model):
                msg = 'Warehouse management not authorized'
                raise falcon.HTTPUnauthorized(title='401', description=msg)
            #else
            sources = source.SourceUtil.get_list_of_data_sources(
                request.url, auth.get_user_id(request), dwsupport_model)
            requested_source_id = selection.get_requested_dataset_id(
                sources, request, resp, kwargs)
            source_project, source_table = requested_source_id.split(
                '.')  #TODO: refactor this
            # Add DTO variable (if needed)
            get_func = util.get_dwsupport_connection
            if request.params['action'] == 'create':
                table_name = request.params['data[0][table]']
                column_name = request.params['data[0][column]']
                python_type = request.params['data[0][python_type]']
                column_title = request.params['data[0][title]']
                variable_dto = {
                    'table': table_name,
                    'column': column_name,
                    'title': column_title,
                    'python_type': python_type,
                    'physical_type': None,
                    'units': None,
                    'max_length': None,
                    'precision': None,
                    'allowed_values': None,
                    'description': None
                }
                dto.variable.save([variable_dto], connection_func=get_func)
            # get new default Queries this column should be in
            ## DataTables editor returns URLEncoded table,column defaults
            ## in format: data[{table_name}.{column_name}][defaults] = '{query1}(,{queryN})'
            key_prefix = 'data['
            key_suffix = '][column]'
            column_key_generator = (key for key in request.params
                                    if key.endswith(key_suffix))
            column_key = next(column_key_generator)
            # get column details
            table_dot_column_plus_suffix = column_key[len(key_prefix):]
            table_dot_column = table_dot_column_plus_suffix[:len(key_suffix) *
                                                            -1]
            table_name, column_name = table_dot_column.split('.')
            # get query details
            defaults_key = 'data[{}.{}][defaults]'.format(
                table_name, column_name)
            try:
                defaults_text = request.params[defaults_key]
                default_queries = {
                    query.strip().lower()  #parse text
                    for query in defaults_text.split(',')
                }
            except KeyError as defaults_empty_or_missing:
                default_queries = set()
            query_variable_table = table_name
            # get table_name for a role
            association_key = 'data[{}.{}][association]'.format(
                table_name, column_name)
            try:
                association_column = request.params[association_key]
                association_dto = next(
                    (association
                     for association in dwsupport_model['associations']
                     if association['table'] == source_table
                     and association['column'] == association_column))
                query_variable_table = association_dto['parent']
            except KeyError as association_empty_or_missing:
                pass  # done. no able association is specified
            # update DTOs
            changes = dict()  #track changes
            changes['add'] = list()
            changes['update'] = list()
            for query_name in default_queries:
                try:
                    # add column
                    query_dto = next((query
                                      for query in dwsupport_model['queries']
                                      if query['name'] == query_name
                                      and query['table'] == source_table))
                    try:
                        query_dto['variables'][table_name].append(column_name)
                    except KeyError as new_table:
                        query_dto['variables'][table_name] = [column_name]
                    dto.query.update_by_table_and_name(
                        source_table,
                        query_name,
                        query_dto,
                        connection_func=get_func)
                    changes['update'].append(query_dto)
                except StopIteration as no_query_exists:
                    query_dto = {
                        'name': query_name,
                        'table': source_table,
                        'variables': {
                            table_name: [column_name]
                        }
                    }
                    dto.query.save([query_dto], connection_func=get_func)
                    changes['add'].append(query_dto)
            if default_queries == set():
                # remove column
                for query_dto in (query for query in dwsupport_model['queries']
                                  if query['table'] == source_table):
                    variable_tables = query_dto['variables'].keys()
                    if (query_variable_table in variable_tables and column_name
                            in query_dto['variables'][query_variable_table]):
                        query_dto['variables'][query_variable_table].remove(
                            column_name)
                        if len(query_dto['variables']
                               [query_variable_table]) == 0:
                            del (query_dto['variables'][query_variable_table])
                        dto.query.update_by_table_and_name(
                            source_table,
                            query_dto['name'],
                            query_dto,
                            connection_func=get_func)
                        changes['update'].append(query_dto)
            # JSON response per https://editor.datatables.net/manual/server
            msg = None
            if len(changes) == 0:
                msg = "No changes made"
            resp.body = json.dumps({'data': [changes], "error": msg})
            return
Exemple #26
0
def _copy_fact_table( source_id, new_project_name, new_table_name
                     ,new_custom_id_by_old_ids, dwsupport_model):
    """
    Return table & related objects,copied from DWSupport source_id table

    Returns 5-tuple, representing new Table DTO and lists of new
      association DTOs, variable DTOs, variable_custom_identifier DTOs
      & query DTOs

    Keyword Parameters:
    source_id  -- Sting, identifying the project Fact table to copy
    new_project_name  -- String, representing project name copy will
      belong to
    new_table_name  -- String, representing name for the new table
    new_custom_id_by_old_ids  -- Dict, representing new custom
      variable IDs, mapped by the existing custom IDs they're replacing
    dwsupport_model  -- Dict, representing current DWSupport schema

    Exceptions:
    CopyTableUnsupportedTableType  -- unsupported source_id
    CopyTableDuplicateCopyName  -- new_table_name already exists
    CopyTableMissingVariableCustomIdentifiers  -- missing values in
      new_custom_id_by_old_ids
    CopyTableNonuniqueVariableCustomIdentifiers  -- values in
      new_custom_id_by_old_ids are not globally unique

    >>> from pprint import pprint
    >>> # Check generated objects
    >>> source = 'trawl.catch_fact'
    >>> proj = 'trawl' #same project
    >>> table = 'new_catch_fact' #different table name
    >>> custom_ids = {} #none
    >>> model = { 'tables': [ { 'name': 'catch_fact', 'type': 'fact'
    ...                        ,'project': 'trawl'}
    ...                      ,{ 'name': 'depth_dim', 'type': 'dimension'
    ...                        ,'project': 'warehouse'}
    ...                      ,{ 'name': 'operation_fact', 'type': 'fact'
    ...                        ,'project': 'acoustics'}]
    ...          ,'associations': [ { 'table': 'catch_fact'
    ...                              ,'parent': 'depth_dim'}
    ...                            ,{ 'table': 'operation_fact'
    ...                              ,'parent': 'depth_dim'}]
    ...          ,'variables': [ { 'table': 'catch_fact'
    ...                           ,'column': 'retained_kg'}
    ...                         ,{ 'table': 'catch_fact'
    ...                           ,'column': 'retained_ct'}
    ...                         ,{ 'table': 'depth_dim'
    ...                           ,'column': 'meters'}
    ...                         ,{ 'table': 'depth_dim'
    ...                           ,'column': 'fathoms'}
    ...                         ,{ 'table': 'operation_fact'
    ...                           ,'column': 'frequency_mhz'}]
    ...          ,'variable_custom_identifiers': [
    ...               { 'id': 'freq', 'table': 'operation_fact'
    ...                ,'column': 'frequency_mhz'}]
    ...          ,'queries': [
    ...               {'name': 'core',
    ...                'table': 'catch_fact',
    ...                'variables': {'depth_dim': ['meters'],
    ...                              'catch_fact': ['retained_kg']}
    ...               }]
    ... }
    >>> out = _copy_fact_table(source, proj, table, custom_ids, model)
    >>> new_table, new_assocs, new_vars, new_customs, new_queries = out
    >>> pprint(new_table)
    {'name': 'new_catch_fact', 'project': 'trawl', 'type': 'fact'}
    >>> pprint(new_assocs)
    [{'parent': 'depth_dim', 'table': 'new_catch_fact'}]
    >>> pprint(new_vars)
    [{'column': 'retained_kg', 'table': 'new_catch_fact'},
     {'column': 'retained_ct', 'table': 'new_catch_fact'}]
    >>> pprint(new_customs)
    []
    >>> pprint(new_queries)
    [{'name': 'core',
      'table': 'new_catch_fact',
      'variables': {'depth_dim': ['meters'], 'new_catch_fact': ['retained_kg']}}]
    >>> # Check table with a customized field identifier
    >>> source = 'acoustics.operation_fact'
    >>> proj = 'cancelled' #different project
    >>> table = 'bad_operation_fact' # and new, unique table name
    >>> custom_ids = {'freq': 'bad_freq', 'extra_mapping': 'ignored'}
    >>> out = _copy_fact_table(source, proj, table, custom_ids, model)
    >>> new_table, new_assocs, new_vars, new_customs, new_queries = out
    >>> pprint(new_table)
    {'name': 'bad_operation_fact', 'project': 'cancelled', 'type': 'fact'}
    >>> pprint(new_assocs)
    [{'parent': 'depth_dim', 'table': 'bad_operation_fact'}]
    >>> pprint(new_vars)
    [{'column': 'frequency_mhz', 'table': 'bad_operation_fact'}]
    >>> pprint(new_customs)
    [{'column': 'frequency_mhz', 'id': 'bad_freq', 'table': 'bad_operation_fact'}]
    >>> pprint(new_queries)
    []
    >>> # Check ambiguous table name
    >>> table = 'operation_fact' #same name as 'acoustics' project table
    >>> _copy_fact_table(source, proj, table, custom_ids, model)
    Traceback (most recent call last):
       ...
    api.resources.source.warehouse.support.configure.CopyTableDuplicateCopyName: operation_fact
    """
    source_project, source_table = source_id.split('.')
    # create Table DTO
    source_table_generator = (t for t in dwsupport_model['tables']
                              if t['name'] == source_table
                              and t['project'] == source_project)
    all_table_names_generator = (t['name'] for t in dwsupport_model['tables'])
    new_table = dict(next(source_table_generator)) #construct a new dict
    new_table['name'] = new_table_name
    new_table['project'] = new_project_name
    if new_table['type'] != 'fact':
        raise CopyTableUnsupportedTableType(new_table['type'])
    if new_table_name in all_table_names_generator:
        raise CopyTableDuplicateCopyName(new_table_name)
    # create Variable DTOs
    def _set_table(dto_with_table):
        dto_with_table['table'] = new_table_name
        return dto_with_table
    new_variables = [_set_table(v) for v
                     in dwsupport_model['variables']
                     if v['table'] == source_table]
    # create Variable Custom Identifier DTOs
    new_variable_custom_identifiers = [
        custom for custom
        in dwsupport_model['variable_custom_identifiers']
        if custom['table'] == source_table]
    # (error if a custom identifier not in input parameters)
    existing_ids = {c['id'] for c in new_variable_custom_identifiers}
    unmapped_existing_ids = existing_ids.difference(
        set(new_custom_id_by_old_ids.keys())
    )
    if unmapped_existing_ids != set():
        needs_values = json.dumps(
            {key: "PROVIDE_NEW_UNIQUE_VALUE" #example, to prompt the user
             for key in unmapped_existing_ids}
        )
        raise CopyTableMissingVariableCustomIdentifiers(needs_values)
    # (error if any proposed IDs are not unique)
    nonunique_ids = set()
    input_repeats_ids = set()
    for new_id, count_in_input in Counter(new_custom_id_by_old_ids.values()).items():
        if new_id in existing_ids:
            nonunique_ids.add(new_id)
        if count_in_input > 1:
            input_repeats_ids.add(new_id)
    all_duplicates = nonunique_ids.union(input_repeats_ids)
    if all_duplicates != set():
        raise CopyTableNonuniqueVariableCustomIdentifiers(all_duplicates)
    for custom in new_variable_custom_identifiers:
        key = custom['id']#existing custom variable identifier
        new_id = new_custom_id_by_old_ids[key]
        custom['id'] = new_id
        custom['table'] = new_table_name
    # create Association DTOs
    new_associations = [_set_table(a) for a
                        in dwsupport_model['associations']
                        if a['table'] == source_table]
    # create Query DTOs
    new_queries = [_set_table(q) for q
                        in dwsupport_model['queries']
                        if q['table'] == source_table]
    for new_query in new_queries:
        # update the variable table names too (if any)
        for table_name, column_list in new_query['variables'].items():
            if table_name == source_table:
                # transfer columns to new table name
                new_query['variables'][new_table_name] = column_list
                # delete query's list of columns from original table
                del(new_query['variables'][source_table])
    return new_table, new_associations, new_variables, \
        new_variable_custom_identifiers, new_queries
Exemple #27
0
 def on_post(self, request, resp, **kwargs):
     """
     Make copy of referenced DWSupport table, with specified changes
     """
     session_user = auth.get_user_id(request)
     with warehouse.get_source_model_session() as dwsupport_model:
         if not management_auth.is_management_permitted(session_user, dwsupport_model):
             msg = 'Warehouse management not authorized'
             raise falcon.HTTPUnauthorized(title='401', description=msg)
         #else
         sources = source.SourceUtil.get_list_of_data_sources(
              request.url
             ,auth.get_user_id(request)
             ,dwsupport_model)
         requested_source_id = selection.get_requested_dataset_id(sources, request, resp, kwargs)
         try:
             new_table = request.params['name']
             new_project = request.params['project-name']
             new_variable_custom_identifiers = request.params['variable-custom-identifiers']
         except KeyError as error:
             raise falcon.HTTPBadRequest( #TODO: add functional test coverage
                     title="Missing Parameter"
                     ,description=(
                        "Unable to make copy of"
                        " data source: '{}'."
                        " (Copy request must specify HTTP POST parameter: {})"
                        ).format(requested_source_id, error))
         try:
             new_custom_ids_by_old_id = json.loads(new_variable_custom_identifiers)
         except json.json.scanner.JSONDecodeError as e:
             msg = ("Unable to make copy of"
                    " data source: '{}'."
                    " (Parameter is not valid JSON object: {})"
                   ).format(requested_source_id, e)
             raise falcon.HTTPInvalidParam(msg, 'variable-custom-identifiers')
         if type(new_custom_ids_by_old_id) != dict:
             msg = ("Unable to make copy of"
                    " data source: '{}'."
                    ' Parameter must be a JSON object: {{"existing_table_custom_variable_id": "new_id"}}'
                   ).format(requested_source_id)
             raise falcon.HTTPInvalidParam(msg, 'variable-custom-identifiers')
         try:
             new_dto_tuple = configure.copy_table(
                  requested_source_id
                 ,new_project
                 ,new_table
                 ,new_custom_ids_by_old_id
             )
             new_table, new_associations, new_variables, \
             new_variable_custom_identifiers, new_queries = new_dto_tuple
             resp.body = json.dumps(
                  { 'table': new_table, 'associations': new_associations
                   ,'variables': new_variables
                   ,'variable_custom_identifiers': new_variable_custom_identifiers
                   ,'queries': new_queries}
                 ,indent='\t'
             )
             return
         except configure.CopyTableUnsupportedTableType as e:
             raise falcon.HTTPBadRequest( #TODO: add functional test coverage
                     title="Bad Path"
                     ,description=("Copy only supported for tables of type"
                                   " 'fact'. (The '{}' data source in URL is"
                                   " type: '{}')"
                                  ).format(requested_source_id, e)
             )
         except configure.CopyTableDuplicateCopyName as e:
             msg = ("Unable to make copy of"
                    " data source: '{}'."
                    " (Please specify a new table name, a table with"
                    " the provided name already exists: {})"
                   ).format(requested_source_id, e)
             raise falcon.HTTPInvalidParam(msg, 'name')
         except configure.CopyTableNonuniqueVariableCustomIdentifiers as e:
             msg = ("Unable to make copy of"
                    " data source: '{}'."
                    " (The following new IDs must not duplicate any other"
                    " variable custom IDs: {})"
                   ).format(requested_source_id, e)
             raise falcon.HTTPInvalidParam(msg, 'variable-custom-identifiers')
         except configure.CopyTableMissingVariableCustomIdentifiers as e:
             msg = ("Unable to make copy of"
                    " data source: '{}'."
                    " (Copy request parameter must include new, unique"
                    " IDs for these existing variable custom IDs: {})"
                   ).format(requested_source_id, e)
             raise falcon.HTTPInvalidParam(msg, 'variable-custom-identifiers')
Exemple #28
0
def get_requested_filters_implicit(request):
    """
    Extracts the requested dataset equality filters, specified directly in the
    URL query string.

    Keyword parameters:
    request -- Falcon request

    >>> import falcon.testing #Set up fake Falcon app
    >>> tb = falcon.testing.TestBase()
    >>> tb.setUp()
    >>> tr = falcon.testing.TestResource()
    >>> tb.api.add_route(tb.test_route, tr)
    >>> wsgi_iterable = tb.simulate_request(tb.test_route) #populate req
    >>> tr.req.query_string = 'variables=var1&var2=42'
    >>> get_requested_filters_implicit( tr.req)
    ['var2=42']
    >>> tr.req.query_string = 'variables=var1&filters=var2=42'
    >>> get_requested_filters_implicit( tr.req)
    []
    >>> tr.req.query_string = ''
    >>> get_requested_filters_implicit( tr.req)
    []
    >>> tr.req.query_string = 'var2=42&var3=9'
    >>> l = get_requested_filters_implicit( tr.req)
    >>> l.sort() # filter order isnt stable; sort, for testing
    >>> l
    ['var2=42', 'var3=9']
    >>> tr.req.query_string = 'variables=v1&var2=42&var3=9'
    >>> l = get_requested_filters_implicit( tr.req)
    >>> l.sort() # filter order isnt stable; sort, for testing
    >>> l
    ['var2=42', 'var3=9']
    >>> tr.req.query_string = 'var2=42&var2=9'
    >>> l = get_requested_filters_implicit( tr.req)
    >>> l
    ['var2|=["42", "9"]']
    """
    bool_use_empty_string_for_empty_field_values = True  #falcon parameter
    dict_query_string = falcon.util.uri.parse_query_string(
        request.query_string, bool_use_empty_string_for_empty_field_values)
    # get all candidate filter params (query params, without defined functions)
    candidate_filter_keys = []
    for key in dict_query_string.keys():
        if key == '':
            continue  #skip any empty key.
        if key not in ReservedParameterNames.get_all():
            candidate_filter_keys.append(key)
    # construct a list of filter strings
    filter_strings = []
    for key in candidate_filter_keys:
        #TODO: check if key is a valid field name
        parameter_value = dict_query_string[key]
        # translate multiple values, into an 'OR' filter string
        if isinstance(parameter_value, list):
            #Falcon has converted multiple parameter values into a list
            parameter_value.sort(
            )  #sort strings('88'<'9')to make things pretty
            json_value = json.dumps(parameter_value)
            new_filter_string = '{}|={}'.format(key, json_value)
            filter_strings.append(new_filter_string)
            continue
        # translate single parameter values, into a single equality filter.
        if isinstance(parameter_value, str):
            new_filter_string = '{}={}'.format(key, parameter_value)
            filter_strings.append(new_filter_string)
            continue
    return filter_strings
Exemple #29
0
        db.session.commit()
        if int(y["hearing_slot"]) != -1:
            add_to_slotlist(case.cin, case.hearing_slot, case.hearing_date.year,
                            case.hearing_date.month, case.hearing_date.day)
        data_ret = {}
        data_ret['is_added'] = "1"
        data_ret['cin'] = str(case.cin)
        data_ret['message'] = "The Case has been added successfully!!"
        json_data_ret = json.dumps(data_ret)
        return json_data_ret
    except:
        db.session.rollback()
        data_ret = {}
        data_ret['is_added'] = "0"
        data_ret['message'] = "Sorry!! There was a problem adding the Case !!"
        json_data_ret = json.dumps(data_ret)
        return json_data_ret


def get_user_list():
    ret_dict = {}
    ret_dict['usr_list'] = []
    recr = User.query.all()
    for i in recr:
        if i.user_type != "Registrar":
            dct = {}
            dct['username'] = i.username
            dct['name'] = i.name
            dct['usr_type'] = i.user_type
            ret_dict['usr_list'].append(dct)
    ret_json = json.dumps(ret_dict)