def POST(self, name): """Login - obtain a token""" logger.debug(web.data()) try: data = json.loads(web.data().decode('utf-8')) except json.decoder.JSONDecodeError: raise Error(BADPARAMS, msg="Could not decode JSON.") email = data.get('email') passwd = data.get('password') try: assert email and is_valid_email(email) except AssertionError: raise Error(BADPARAMS, msg="Invalid email provided.") try: assert passwd account = Account(email, passwd) assert account.is_valid() except AssertionError: raise Error(BADAUTH) account.reload() result = account.__dict__ result['token'] = account.issue_token() del result['password'] del result['hash'] return [result]
def POST(self, name): """Create an account""" logger.debug(web.data()) try: data = json.loads(web.data().decode('utf-8')) except json.decoder.JSONDecodeError: raise Error(BADPARAMS, msg="Could not decode JSON.") account_id = data.get('account_id') email = data.get('email') passwd = data.get('password') name = data.get('name') surname = data.get('surname') authority = data.get('authority') if not passwd or not name or not surname: raise Error(BADPARAMS) account = AccountController.create_acccount(email, passwd, account_id, name, surname, authority) account = account.__dict__ del account['password'] del account['hash'] return [account]
def search_by_id(cin_str, username): try: search_by_id_charge = 100 ret_dict = {} ret_dict['due_amt'] = "0" cin = int(cin_str) i = CourtCase.query.get(cin) if i is None: ret_dict["confirm"] = "0" ret_dict["message"] = "Please search for a valid CIN number!!" elif i.is_closed!=True: ret_dict["confirm"] = "0" ret_dict["message"] = "This is a pending Case!!" else: ret_dict['case_details'] = {} ret_dict['case_details']['CIN'] = str(i.cin) ret_dict['case_details']['def_name'] = i.defendent_name ret_dict['case_details']['def_addr'] = i.defendent_address ret_dict['case_details']['crime_type'] = i.crime_type ret_dict['case_details']['crime_date'] = str( i.crime_date.day)+"-"+str(i.crime_date.month)+"-"+str(i.crime_date.year) ret_dict['case_details']['date_arrest'] = str( i.date_of_arrest.day)+"-"+str(i.date_of_arrest.month)+"-"+str(i.date_of_arrest.year) ret_dict['case_details']['start_date'] = str( i.starting_date.day)+"-"+str(i.starting_date.month)+"-"+str(i.starting_date.year) ret_dict['case_details']['latest_hearing_date'] = str( i.hearing_date.day)+"-"+str(i.hearing_date.month)+"-"+str(i.hearing_date.year) ret_dict['case_details']['expected_completion_date'] = str( i.expected_completion_date.day)+"-"+str(i.expected_completion_date.month)+"-"+str(i.expected_completion_date.year) ret_dict['case_details']['crime_loc'] = i.crime_location ret_dict['case_details']['arresting_off_name'] = i.arresting_officer_name ret_dict['case_details']['name_pres_judge'] = i.judge_name ret_dict['case_details']['pros_name'] = i.public_prosecutor_name ret_dict['case_details']['adj_details'] = [] adj = i.hearing_details if adj is not None: for x in adj.split('|'): jobj = json.loads(x) temp_dict = {} temp_dict["date"] = jobj["date"] temp_dict["reason"] = jobj["reason"] ret_dict['case_details']['adj_details'].append(temp_dict) record = User.query.filter_by(username=username).first() if record is None: ret_dict2 = {} ret_dict2["confirm"] = "0" ret_dict2["messaage"] = "Please enter a valid username!!" ret_json2 = json.dumps(ret_dict2) return ret_json2 else: if record.user_type == 'Lawyer': record.due_amount = record.due_amount + search_by_id_charge db.session.commit() ret_dict['due_amt'] = str(record.due_amount) ret_json = json.dumps(ret_dict) return ret_json except: db.session.rollback() return json.dumps({"confirm": "0", "message": "Some Error occured"})
def get_list_of_variables(str_dataset_id): """ Returns a list of dicts,representing all of source's available variables Keyword Parameters: str_dataset_id -- String, representing API id for the dataset """ if warehouse.is_warehouse(str_dataset_id): return warehouse.get_list_of_warehouse_variables() list_variables = [] for dict_source in loader.get_list_of_etl_dicts(): if dict_source['id'] == str_dataset_id: # retrieve & decode the configured list of fields+types str_field_types_json = dict_source['python_types'] dict_field_types = json.loads(str_field_types_json) # add the field names, to our list list_variables.extend(dict_field_types.keys()) return list_variables # if loop did not return,continue search through db-configured sources # break dataset identifier down into project/source substrings with warehouse.get_source_model_session() as current_model: project_name, source_name = str_dataset_id.split('.') source_tables = warehouse.get_source_tables() for source_table in source_tables: if source_table['name'] == source_name: variables_by_field = {} source_type = source_table['type'] if not source_type in ['fact', 'dimension', 'dimension role']: #TODO: Make exception into a locally defined class raise NotImplementedError( 'no method to list variables for {} tables: {}'.format( source_type, source_name)) if source_type == 'fact': two_dicts = warehouse.get_fact_variables( source_table, current_model) variables_by_field, unused = two_dicts if source_type == 'dimension': # retrieve the fields -to-types mapping variables_by_field = warehouse.get_variables(source_table) if source_type == 'dimension role': # retrieve aliased versions, of underlying dimension's mapping variables_by_field = warehouse.get_role_variables( source_table) # add the variable dicts, to our list list_variables.extend(variables_by_field.values()) return list_variables else: str_msg = 'Unable to list variables, source id {} not found.' raise falcon.HTTPNotFound( description=str_msg.format(str_dataset_id))
def POST(self, name): """Add titles to an existing work""" data = json.loads(web.data().decode('utf-8')) title = data.get('title') work_id = data.get('UUID') or data.get('uuid') titles = strtolist(title) require_params_or_fail([work_id], "a (work) UUID") require_params_or_fail([titles], "at least a title") work = Work.find_or_fail(work_id, titles=titles) work.save() work.load_titles() work.load_identifiers() return [work.__dict__]
def get_list_of_warehouse_variables(): """ Returns a list of names, representing all available Warehouse variables """ list_variables = [] loader = api.config_loader for dict_source in loader.get_list_of_etl_dicts(): str_dataset_id = dict_source['id'] # retrieve & decode the configured list of fields+types str_field_types_json = dict_source['python_types'] dict_field_types = json.loads(str_field_types_json) # add the field names, to our list for str_source_variable in dict_field_types.keys(): str_warehouse_variable = util.prefix_field_name( str_source_variable, str_dataset_id) list_variables.append(str_warehouse_variable) return list_variables
def POST(self, name): """Create a work relation""" data = json.loads(web.data().decode('utf-8')) parent_uuid = data.get('parent_UUID') or data.get('parent_uuid') child_uuid = data.get('child_UUID') or data.get('child_uuid') require_params_or_fail([parent_uuid, child_uuid], 'a parent and a child UUID') parent = Work.find_or_fail(parent_uuid) child = Work.find_or_fail(child_uuid) parent.set_children([child.UUID]) parent.save() parent.load_titles() parent.load_identifiers() parent.load_children() parent.load_parents() return [parent.__dict__]
def POST(self, name): """Create a work""" data = json.loads(web.data().decode('utf-8')) wtype = data.get('type', '') title = data.get('title') uri = data.get('URI') or data.get('uri') parent = data.get('parent') child = data.get('child') titles = strtolist(title) uris = strtolist(uri) require_params_or_fail([wtype], 'a (work) type') require_params_or_fail(titles, 'at least one title') require_params_or_fail(uris, 'at least one URI') WorkType.find_or_fail(wtype) for i in uris: # attempt to get scheme from URI try: ident = i.get('URI') or i.get('uri') scheme, value = Identifier.split_uri(ident) try: i['canonical'] = i['canonical'] in (True, "true", "True") except Exception: i['canonical'] = False except Exception: identifier = ident if ident else '' raise Error(BADPARAMS, msg="Invalid URI '%s'" % (identifier)) # check whether the URI scheme exists in the database UriScheme.find_or_fail(scheme) # instantiate a new work with the input data uuid = generate_uuid() work = Work(uuid, wtype, titles, uris) # check relatives and associate them with the work work.check_and_set_relatives(parent, child) work.save() return [work.__dict__]
def POST(self, name): """Add identifiers to an existing work""" data = json.loads(web.data().decode('utf-8')) uri = data.get('URI') or data.get('uri') canonical = data.get('canonical') in (True, "true", "True") work_id = data.get('UUID') or data.get('uuid') require_params_or_fail([uri, work_id], "a (work) UUID and a URI") try: scheme, value = Identifier.split_uri(uri) uris = [{'URI': uri, 'canonical': canonical}] except Exception: raise Error(BADPARAMS, msg="Invalid URI '%s'" % (uri)) UriScheme.find_or_fail(scheme) work = Work.find_or_fail(work_id, uris=uris) work.save() work.load_identifiers() return [work.__dict__]
def get_filter_processed_3tuple(str_json_python_types, str_filter_urlencoded): """ return tuple of field,operator string & Object bind value,from URLdecoded filter Keyword Parameters: str_json_python_types -- String representing a JSON Object, denominating data set fields & associated Python types. >>> test_func = get_filter_processed_3tuple >>> test_func( '{"var": "str", "baz": "float"}', 'var=1') ('var', '=', '1') >>> test_func( '{"var": "str", "baz": "float"}', 'var>=1') ('var', '>=', '1') >>> test_func( '{"var": "str", "baz": "float"}', 'var<=1') ('var', '<=', '1') >>> test_func( '{"var": "str", "baz": "float"}', 'var!=1') ('var', '!=', '1') >>> test_func( '{"var": "str", "baz": "float"}', 'var<1') ('var', '<', '1') >>> test_func( '{"var": "str", "baz": "float"}', 'var>1') ('var', '>', '1') >>> test_func( '{"var": "str", "baz": "float"}', 'var~=1') ('var', '~=', '1') >>> test_func( '{"name": "str", "score": "float"}', 'name=bob') ('name', '=', 'bob') >>> test_func( '{"name": "str", "score": "float"}', 'name=1.2') ('name', '=', '1.2') >>> test_func( '{"name": "str", "score": "float"}', 'score=1.2') ('score', '=', 1.2) >>> test_func( '{"name": "str", "score": "float"}', 'name|=["bob", "bif"]') ('name', '|=', ['bob', 'bif']) >>> test_func( '{"name": "str", "score": "float"}','score|=["1.2", "2.1"]') ('score', '|=', [1.2, 2.1]) """ #FIXME: above tests aren't very good try: list_parsed_elements = get_list_filter_partitioned( str_filter_urlencoded) except FilterParseError as e: raise e # now process the elements #FIXME: validate 'list_filter_elements' assumptions below str_left, str_operator, str_right = list_parsed_elements # Copy the left-element & operator, verbatim list_filter_parsed = [] list_filter_parsed.append(str_left) list_filter_parsed.append(str_operator) # Identify Python type, for the left-hand element try: dict_types_by_field_name = json.loads(str_json_python_types) list_names_unknown_case = list(dict_types_by_field_name.keys()) # Assume the left-hand element is the field name name int_index = get_index_caseinsensitive(str_left, list_names_unknown_case) except ValueError: # not found, abort! str_msg = 'Filters variable "{}" does not exist in data source' raise FilterVariableError(str_msg.format(str_left)) # coerce the right-hand filter value,to type of left-hand Variable field try: str_field_name = list_names_unknown_case[int_index] str_field_type = dict_types_by_field_name[str_field_name] #locate constructor function type_constructor = pydoc.locate(str_field_type) if type_constructor is None: # Unknown type! raise SourceTypeError(str_field_type) if str_operator == '~=': #for RegExp, always attempt string comparison list_filter_parsed.append(str_right) return tuple(list_filter_parsed) if str_operator == '|=': #for OR equality, always attempt string comparison str_values = json.loads(str_right) values = [ str_to_python_obj(x, type_constructor) for x in str_values ] list_filter_parsed.append(values) return tuple(list_filter_parsed) #Else, attempt dynamic type coercion obj_right = str_to_python_obj(str_right, type_constructor) list_filter_parsed.append(obj_right) return tuple(list_filter_parsed) except TypeError as e: raise FilterValueError(str(e))
def base_callback(_, response): if raise_for_status: response.raise_for_status() return response.content if raw else json.loads(response.text)
def POST(self, name=None): """Create a new event""" data = json.loads(web.data()) return save_event(data)
def on_post(self, request, resp, **kwargs): """ Make copy of referenced DWSupport table, with specified changes """ session_user = auth.get_user_id(request) with warehouse.get_source_model_session() as dwsupport_model: if not management_auth.is_management_permitted(session_user, dwsupport_model): msg = 'Warehouse management not authorized' raise falcon.HTTPUnauthorized(title='401', description=msg) #else sources = source.SourceUtil.get_list_of_data_sources( request.url ,auth.get_user_id(request) ,dwsupport_model) requested_source_id = selection.get_requested_dataset_id(sources, request, resp, kwargs) try: new_table = request.params['name'] new_project = request.params['project-name'] new_variable_custom_identifiers = request.params['variable-custom-identifiers'] except KeyError as error: raise falcon.HTTPBadRequest( #TODO: add functional test coverage title="Missing Parameter" ,description=( "Unable to make copy of" " data source: '{}'." " (Copy request must specify HTTP POST parameter: {})" ).format(requested_source_id, error)) try: new_custom_ids_by_old_id = json.loads(new_variable_custom_identifiers) except json.json.scanner.JSONDecodeError as e: msg = ("Unable to make copy of" " data source: '{}'." " (Parameter is not valid JSON object: {})" ).format(requested_source_id, e) raise falcon.HTTPInvalidParam(msg, 'variable-custom-identifiers') if type(new_custom_ids_by_old_id) != dict: msg = ("Unable to make copy of" " data source: '{}'." ' Parameter must be a JSON object: {{"existing_table_custom_variable_id": "new_id"}}' ).format(requested_source_id) raise falcon.HTTPInvalidParam(msg, 'variable-custom-identifiers') try: new_dto_tuple = configure.copy_table( requested_source_id ,new_project ,new_table ,new_custom_ids_by_old_id ) new_table, new_associations, new_variables, \ new_variable_custom_identifiers, new_queries = new_dto_tuple resp.body = json.dumps( { 'table': new_table, 'associations': new_associations ,'variables': new_variables ,'variable_custom_identifiers': new_variable_custom_identifiers ,'queries': new_queries} ,indent='\t' ) return except configure.CopyTableUnsupportedTableType as e: raise falcon.HTTPBadRequest( #TODO: add functional test coverage title="Bad Path" ,description=("Copy only supported for tables of type" " 'fact'. (The '{}' data source in URL is" " type: '{}')" ).format(requested_source_id, e) ) except configure.CopyTableDuplicateCopyName as e: msg = ("Unable to make copy of" " data source: '{}'." " (Please specify a new table name, a table with" " the provided name already exists: {})" ).format(requested_source_id, e) raise falcon.HTTPInvalidParam(msg, 'name') except configure.CopyTableNonuniqueVariableCustomIdentifiers as e: msg = ("Unable to make copy of" " data source: '{}'." " (The following new IDs must not duplicate any other" " variable custom IDs: {})" ).format(requested_source_id, e) raise falcon.HTTPInvalidParam(msg, 'variable-custom-identifiers') except configure.CopyTableMissingVariableCustomIdentifiers as e: msg = ("Unable to make copy of" " data source: '{}'." " (Copy request parameter must include new, unique" " IDs for these existing variable custom IDs: {})" ).format(requested_source_id, e) raise falcon.HTTPInvalidParam(msg, 'variable-custom-identifiers')
def base_callback(_, response): if raise_for_status: response.raise_for_status() return response.content if raw else json.loads(response.text)