def get_table(self, full_table_name): """get table from schemata """ schema, table_name = full_table_name.replace('`', '').split('.') schema_module = self['schemata'].get(schema, None) if schema_module is None: raise LorisError( f'schema {schema} not in database; refresh database') table_name = table_name.strip('_#') table_name_list = table_name.split('__') if len(table_name_list) == 1: table_name = to_camel_case(table_name) try: return getattr(schema_module, table_name) except AttributeError: raise LorisError(f'table {table_name} not in schema {schema}; ' 'refresh database') else: assert len(table_name_list) == 2, \ f'invalid table name {table_name}.' table_name = to_camel_case(table_name_list[0]) part_table_name = to_camel_case(table_name_list[1]) try: return getattr(getattr(schema_module, table_name), part_table_name) except AttributeError: raise LorisError( f'table {table_name} not in schema {schema} ' f'or part table {part_table_name} not in table {table_name}' '; refresh database')
def list_tables(jwt_payload: dict, schema_name: str): """ List all tables and their type give a schema :param jwt_payload: Dictionary containing databaseAddress, username and password strings :type jwt_payload: dict :param schema_name: Name of schema to list all tables from :type schema_name: str :return: Contains a key for a each table type and it corressponding table names :rtype: dict """ DJConnector.set_datajoint_config(jwt_payload) # Get list of tables names tables_name = dj.Schema(schema_name, create_schema=False).list_tables() # Dict to store list of table name for each type tables_dict_list = dict(manual_tables=[], lookup_tables=[], computed_tables=[], imported_tables=[], part_tables=[]) # Loop through each table name to figure out what type it is and add them to # tables_dict_list for table_name in tables_name: table_type = dj.diagram._get_tier('`' + schema_name + '`.`' + table_name + '`').__name__ if table_type == 'Manual': tables_dict_list['manual_tables'].append( dj.utils.to_camel_case(table_name)) elif table_type == 'Lookup': tables_dict_list['lookup_tables'].append( dj.utils.to_camel_case(table_name)) elif table_type == 'Computed': tables_dict_list['computed_tables'].append( dj.utils.to_camel_case(table_name)) elif table_type == 'Imported': tables_dict_list['imported_tables'].append( dj.utils.to_camel_case(table_name)) elif table_type == 'Part': table_name_parts = table_name.split('__') tables_dict_list['part_tables'].append( to_camel_case(table_name_parts[-2]) + '.' + to_camel_case(table_name_parts[-1])) else: raise UnsupportedTableType(table_name + ' is of unknown table type') return tables_dict_list
def _list_tables(jwt_payload: dict, schema_name: str) -> dict: """ List all tables and their type given a schema. :param jwt_payload: Dictionary containing databaseAddress, username, and password strings :type jwt_payload: dict :param schema_name: Name of schema to list all tables from :type schema_name: str :return: Contains a key for each table type where values are the respective list of table names :rtype: dict """ _DJConnector._set_datajoint_config(jwt_payload) # Get list of tables names tables_name = dj.Schema(schema_name, create_schema=False).list_tables() # Dict to store list of table name for each type tables_dict_list = dict(manual=[], lookup=[], computed=[], imported=[], part=[]) # Loop through each table name to figure out what type it is and add them to # tables_dict_list for table_name in tables_name: table_type = dj.diagram._get_tier("`" + schema_name + "`.`" + table_name + "`").__name__ if table_type == "Manual": tables_dict_list["manual"].append( dj.utils.to_camel_case(table_name)) elif table_type == "Lookup": tables_dict_list["lookup"].append( dj.utils.to_camel_case(table_name)) elif table_type == "Computed": tables_dict_list["computed"].append( dj.utils.to_camel_case(table_name)) elif table_type == "Imported": tables_dict_list["imported"].append( dj.utils.to_camel_case(table_name)) elif table_type == "Part": table_name_parts = table_name.split("__") tables_dict_list["part"].append( to_camel_case(table_name_parts[-2]) + "." + to_camel_case(table_name_parts[-1])) else: raise UnsupportedTableType(table_name + " is of unknown table type") return tables_dict_list
def create_userrelation_from_table(self, table_name): """ Creates the appropriate python user relation classes from tables in a database. The tier of the class is inferred from the table name. Schema stores the class objects in a class dictionary and returns those when prompted for the same table from the same database again. This way, the id of both returned class objects is the same and comparison with python's "is" works correctly. """ class_name = to_camel_case(table_name) def _make_tuples(other, key): raise NotImplementedError( "This is an automatically created class. _make_tuples is not implemented." ) if (self.database, table_name) in Schema.table2class: class_name, class_obj = Schema.table2class[self.database, table_name] else: if re.fullmatch(Part._regexp, table_name): groups = re.fullmatch(Part._regexp, table_name).groupdict() master_table_name = groups['master'] master_name, master_class = self.create_userrelation_from_table( master_table_name) class_name = to_camel_case(groups['part']) class_obj = type(class_name, (Part, ), dict(definition=...)) setattr(master_class, class_name, class_obj) class_name, class_obj = master_name, master_class elif re.fullmatch(Computed._regexp, table_name): class_obj = type( class_name, (Computed, ), dict(definition=..., _make_tuples=_make_tuples)) elif re.fullmatch(Imported._regexp, table_name): class_obj = type( class_name, (Imported, ), dict(definition=..., _make_tuples=_make_tuples)) elif re.fullmatch(Lookup._regexp, table_name): class_obj = type(class_name, (Lookup, ), dict(definition=...)) elif re.fullmatch(Manual._regexp, table_name): class_obj = type(class_name, (Manual, ), dict(definition=...)) else: class_obj = None Schema.table2class[self.database, table_name] = class_name, class_obj return class_name, class_obj
def get_table(self, full_table_name, as_string=False): """get table from schemata using full_table_name """ schema, table_name = full_table_name.replace('`', '').split('.') schema_module = self['schemata'].get(schema, None) if schema_module is None: raise LorisError( f'schema {schema} not in database; refresh database') table_name = table_name.strip('_#') table_name_list = table_name.split('__') if as_string: output = schema else: output = schema_module for tname in table_name_list: tname = to_camel_case(tname) try: if as_string: output = '.'.join([output, tname]) else: output = getattr(output, tname) except AttributeError: raise LorisError(f'table {table_name} not in schema {schema}; ' 'refresh database') return output
def spawn_missing_classes(self): """ Creates the appropriate python user relation classes from tables in the database and places them in the context. """ tables = [row[0] for row in self.connection.query('SHOW TABLES in `%s`' % self.database)] # declare master relation classes master_classes = {} part_tables = [] for table_name in tables: class_name = to_camel_case(table_name) if class_name not in self.context: try: cls = next(cls for cls in (Lookup, Manual, Imported, Computed) if re.fullmatch(cls.tier_regexp, table_name)) except StopIteration: if re.fullmatch(Part.tier_regexp, table_name): part_tables.append(table_name) else: master_classes[table_name] = type(class_name, (cls,), dict()) # attach parts to masters for part_table in part_tables: groups = re.fullmatch(Part.tier_regexp, part_table).groupdict() class_name = to_camel_case(groups['part']) try: master_class = master_classes[groups['master']] except KeyError: # if master not found among the spawned classes, check in the context master_class = self.context[to_camel_case(groups['master'])] if not hasattr(master_class, class_name): part_class = type(class_name, (Part,), dict(definition=...)) part_class._master = master_class self.process_relation_class(part_class, context=self.context, assert_declared=True) setattr(master_class, class_name, part_class) else: setattr(master_class, class_name, type(class_name, (Part,), dict())) # place classes in context upon decorating them with the schema for cls in master_classes.values(): self.context[cls.__name__] = self(cls)
def create_userrelation_from_table(self, table_name): """ Creates the appropriate python user relation classes from tables in a database. The tier of the class is inferred from the table name. Schema stores the class objects in a class dictionary and returns those when prompted for the same table from the same database again. This way, the id of both returned class objects is the same and comparison with python's "is" works correctly. """ class_name = to_camel_case(table_name) def _make_tuples(other, key): raise NotImplementedError("This is an automatically created class. _make_tuples is not implemented.") if (self.database, table_name) in Schema.table2class: class_name, class_obj = Schema.table2class[self.database, table_name] else: if re.fullmatch(Part._regexp, table_name): groups = re.fullmatch(Part._regexp, table_name).groupdict() master_table_name = groups['master'] master_name, master_class = self.create_userrelation_from_table(master_table_name) class_name = to_camel_case(groups['part']) class_obj = type(class_name, (Part,), dict(definition=...)) setattr(master_class, class_name, class_obj) class_name, class_obj = master_name, master_class elif re.fullmatch(Computed._regexp, table_name): class_obj = type(class_name, (Computed,), dict(definition=..., _make_tuples=_make_tuples)) elif re.fullmatch(Imported._regexp, table_name): class_obj = type(class_name, (Imported,), dict(definition=..., _make_tuples=_make_tuples)) elif re.fullmatch(Lookup._regexp, table_name): class_obj = type(class_name, (Lookup,), dict(definition=...)) elif re.fullmatch(Manual._regexp, table_name): class_obj = type(class_name, (Manual,), dict(definition=...)) else: class_obj = None Schema.table2class[self.database, table_name] = class_name, class_obj return class_name, class_obj
def delete_tuple(jwt_payload: dict): """ Route to delete a specific record. Expects: (html:GET:Authorization): Must include in format of: bearer <JWT-Token> (html:POST:JSON): {"schemaName": <schema_name>, "tableName": <table_name>, "restrictionTuple": <tuple_to_restrict_table_by>} NOTE: Table name must be in CamalCase :param jwt_payload: Dictionary containing databaseAddress, username and password strings. :type jwt_payload: dict :return: If successful then returns "Delete Successful" otherwise returns error :rtype: dict """ try: # Attempt to delete tuple DJConnector.delete_tuple( jwt_payload, request.json["schemaName"], request.json["tableName"], request.json["restrictionTuple"], **{ k: v.lower() == 'true' for k, v in request.args.items() if k == 'cascade' }, ) return "Delete Sucessful" except IntegrityError as e: match = foregn_key_error_regexp.match(e.args[0]) return dict( error=e.__class__.__name__, error_msg=str(e), child_schema=match.group('child').split('.')[0][1:-1], child_table=to_camel_case( match.group('child').split('.')[1][1:-1]), ), 409 except Exception as e: return str(e), 500
def create_param_expansion( f_name, container_table, fn_field=None, config_field=None, resolver=None, suffix="Param", default_to_str=False, ): """ Given a function name `f_name` as would be found in the `container_table` class, this will create a new DataJoint computed table subclass with the correct definition to expand blobs corresponding to the `f_name`'s arguments. The `container_table` must be a class (not an instance), and is also expected to implement `resolve_fn` method that can be used to resolve the name of the function to a specific function object. The name of the attributes for the function name and function argument object inside `container_table` is automatically inferred based on attribute names (i.e. ending with `_fn` and `_config`, repsectively). Alternatively, you can specifically supply a `resolver` function to resolve the function name, and also specify the name of the function and config object attributes via `fn_field` and `config_field`. Resolver functions are implemented in nnfabrik.builder and can be imported from there (`resolve_model`) The resulting computed table will have a name of the form `MyFunctionNameParam` for a function named `my_function_name`. In otherwords, the name is converted from snake_case to CamelCase and `suffix` (default to 'Param') is appended. """ if fn_field is None: fn_field = next( v for v in container_table.heading.attributes.keys() if v.endswith("_fn") ) if config_field is None: config_field = next( v for v in container_table.heading.attributes.keys() if v.endswith("_config") ) resolver = resolver or (lambda x: container_table.resolve_fn(x)) f = resolver(f_name) def_str = make_definition(f, default_to_str=default_to_str) class NewTable(dj.Computed): definition = """ -> {} --- {} """.format( container_table.__name__, def_str ) @property def key_source(self): return container_table & '{}="{}"'.format(fn_field, f_name) def make(self, key): entries = (container_table & key).fetch1(config_field) entries = cleanup_numpy_scalar(entries) key = dict(key, **entries) if default_to_str: for k, v in key.items(): if type(v) in [list, tuple]: key[k] = str(v) self.insert1(key, ignore_extra_fields=True) NewTable.__name__ = to_camel_case(f.__name__) + suffix return NewTable
def record(jwt_payload: dict, schema_name: str, table_name: str) -> Union[dict, str, tuple]: (""" Handler for ``/schema/{schema_name}/table/{table_name}/record`` route. :param jwt_payload: Dictionary containing databaseAddress, username, and password strings. :type jwt_payload: dict :param schema_name: Schema name. :type schema_name: str :param table_name: Table name. :type table_name: str :return: If successful performs desired operation based on HTTP method, otherwise returns error. :rtype: :class:`~typing.Union[dict, str, tuple]` .. http:get:: /schema/{schema_name}/table/{table_name}/record Route to fetch records. **Example request**: .. sourcecode:: http GET /schema/alpha_company/table/Computer/record?limit=1&page=2&""" "order=computer_id%20DESC&restriction=W3siYXR0cmlidXRlTmFtZSI6ICJjb21wdXRlcl9tZW1vcnk" "iLCAib3BlcmF0aW9uIjogIj49IiwgInZhbHVlIjogMTZ9XQo=" """ HTTP/1.1 Host: fakeservices.datajoint.io Authorization: Bearer <token> **Example successful response**: .. sourcecode:: http HTTP/1.1 200 OK Vary: Accept Content-Type: application/json { "recordHeader": [ "computer_id", "computer_serial", "computer_brand", "computer_built", "computer_processor", "computer_memory", "computer_weight", "computer_cost", "computer_preowned", "computer_purchased", "computer_updates", "computer_accessories" ], "records": [ [ "4e41491a-86d5-4af7-a013-89bde75528bd", "DJS1JA17G", "Dell", 1590364800, 2.2, 16, 4.4, "700.99", 0, 1603181061, null, "=BLOB=" ] ], "totalCount": 2 } **Example unexpected response**: .. sourcecode:: http HTTP/1.1 500 Internal Server Error Vary: Accept Content-Type: text/plain 400 Bad Request: The browser (or proxy) sent a request that this server could not understand. :query schema_name: Schema name. :query table_name: Table name. :query limit: Limit of how many records per page. Defaults to ``1000``. :query page: Page requested. Defaults to ``1``. :query order: Sort order. Defaults to ``KEY ASC``. :query restriction: Base64-encoded ``AND`` sequence of restrictions. For example, you could restrict as ``[{"attributeName": "computer_memory", "operation": ``- ``">=", "value": 16}]`` with this param set as ``W3siYXR0cmlidXRlTmFtZSI6ICJjb21wdXRlcl9tZW1vcnkiLCAib3Bl``- ``cmF0aW9uIjogIj49IiwgInZhbHVlIjogMTZ9XQo=``. Defaults to no restriction. :reqheader Authorization: Bearer <OAuth2_token> :resheader Content-Type: text/plain, application/json :statuscode 200: No error. :statuscode 500: Unexpected error encountered. Returns the error message as a string. .. http:post:: /schema/{schema_name}/table/{table_name}/record Route to insert a record. Omitted attributes utilize the default if set. **Example request**: .. sourcecode:: http POST /schema/alpha_company/table/Computer/record HTTP/1.1 Host: fakeservices.datajoint.io Accept: application/json Authorization: Bearer <token> { "records": [ { "computer_id": "ffffffff-86d5-4af7-a013-89bde75528bd", "computer_serial": "ZYXWVEISJ", "computer_brand": "HP", "computer_built": "2021-01-01", "computer_processor": 2.7, "computer_memory": 32, "computer_weight": 3.7, "computer_cost": 599.99, "computer_preowned": 0, "computer_purchased": "2021-02-01 13:00:00", "computer_updates": 0 } ] } **Example successful response**: .. sourcecode:: http HTTP/1.1 200 OK Vary: Accept Content-Type: text/plain Insert Successful **Example unexpected response**: .. sourcecode:: http HTTP/1.1 500 Internal Server Error Vary: Accept Content-Type: text/plain 400 Bad Request: The browser (or proxy) sent a request that this server could not understand. :reqheader Authorization: Bearer <OAuth2_token> :resheader Content-Type: text/plain :statuscode 200: No error. :statuscode 500: Unexpected error encountered. Returns the error message as a string. .. http:patch:: /schema/{schema_name}/table/{table_name}/record Route to update a record. Omitted attributes utilize the default if set. **Example request**: .. sourcecode:: http PATCH /schema/alpha_company/table/Computer/record HTTP/1.1 Host: fakeservices.datajoint.io Accept: application/json Authorization: Bearer <token> { "records": [ { "computer_id": "ffffffff-86d5-4af7-a013-89bde75528bd", "computer_serial": "ZYXWVEISJ", "computer_brand": "HP", "computer_built": "2021-01-01", "computer_processor": 2.7, "computer_memory": 32, "computer_weight": 3.7, "computer_cost": 601.01, "computer_preowned": 0, "computer_purchased": "2021-02-01 13:00:00", "computer_updates": 0 } ] } **Example successful response**: .. sourcecode:: http HTTP/1.1 200 OK Vary: Accept Content-Type: text/plain Update Successful **Example unexpected response**: .. sourcecode:: http HTTP/1.1 500 Internal Server Error Vary: Accept Content-Type: text/plain 400 Bad Request: The browser (or proxy) sent a request that this server could not understand. :reqheader Authorization: Bearer <OAuth2_token> :resheader Content-Type: text/plain :statuscode 200: No error. :statuscode 500: Unexpected error encountered. Returns the error message as a string. .. http:delete:: /schema/{schema_name}/table/{table_name}/record Route to delete a specific record. **Example request**: .. sourcecode:: http DELETE /schema/alpha_company/table/Computer/record?cascade=false&""" "restriction=W3siYXR0cmlidXRlTmFtZSI6ICJjb21wdXRlcl9tZW1vcnkiLCAib3BlcmF0aW9uIjogIj49" "IiwgInZhbHVlIjogMTZ9XQo=" """ HTTP/1.1 Host: fakeservices.datajoint.io Authorization: Bearer <token> **Example successful response**: .. sourcecode:: http HTTP/1.1 200 OK Vary: Accept Content-Type: text/plain Delete Successful **Example conflict response**: .. sourcecode:: http HTTP/1.1 409 Conflict Vary: Accept Content-Type: application/json { "error": "IntegrityError", "error_msg": "Cannot delete or update a parent row: a foreign key constraint fails (`alpha_company`.`#employee`, CONSTRAINT `#employee_ibfk_1` FOREIGN KEY (`computer_id`) REFERENCES `computer` (`computer_id`) ON DELETE RESTRICT ON UPDATE CASCADE", "child_schema": "alpha_company", "child_table": "Employee" } **Example unexpected response**: .. sourcecode:: http HTTP/1.1 500 Internal Server Error Vary: Accept Content-Type: text/plain 400 Bad Request: The browser (or proxy) sent a request that this server could not understand. :query cascade: Enable cascading delete. Accepts ``true`` or ``false``. Defaults to ``false``. :query restriction: Base64-encoded ``AND`` sequence of restrictions. For example, you could restrict as ``[{"attributeName": "computer_memory", "operation": ``- ``">=", "value": 16}]`` with this param set as ``W3siYXR0cmlidXRlTmFtZSI6ICJjb21wdXRlcl9tZW1vcnkiLCAib3Bl``- ``cmF0aW9uIjogIj49IiwgInZhbHVlIjogMTZ9XQo=``. Defaults to no restriction. :reqheader Authorization: Bearer <OAuth2_token> :resheader Content-Type: text/plain, application/json :statuscode 200: No error. :statuscode 409: Attempting to delete a record with dependents while ``cascade`` set to ``false``. :statuscode 500: Unexpected error encountered. Returns the error message as a string. """) if request.method in {"GET", "HEAD"}: try: _DJConnector._set_datajoint_config(jwt_payload) schema_virtual_module = dj.VirtualModule(schema_name, schema_name) # Get table object from name dj_table = _DJConnector._get_table_object(schema_virtual_module, table_name) record_header, table_tuples, total_count = _DJConnector._fetch_records( query=dj_table, **{ k: (int(v) if k in ("limit", "page") else (v.split(",") if k == "order" else loads( b64decode(v.encode("utf-8")).decode("utf-8")))) for k, v in request.args.items() }, ) return dict(recordHeader=record_header, records=table_tuples, totalCount=total_count) except Exception as e: return str(e), 500 elif request.method == "POST": try: # Attempt to insert _DJConnector._insert_tuple(jwt_payload, schema_name, table_name, request.json["records"]) return "Insert Successful" except Exception as e: return str(e), 500 elif request.method == "PATCH": try: # Attempt to insert _DJConnector._update_tuple(jwt_payload, schema_name, table_name, request.json["records"]) return "Update Successful" except Exception as e: return str(e), 500 elif request.method == "DELETE": try: # Attempt to delete tuple _DJConnector._delete_records( jwt_payload, schema_name, table_name, **{ k: loads(b64decode(v.encode("utf-8")).decode("utf-8")) for k, v in request.args.items() if k == "restriction" }, **{ k: v.lower() == "true" for k, v in request.args.items() if k == "cascade" }, ) return "Delete Sucessful" except IntegrityError as e: match = foreign_key_error_regexp.match(e.args[0]) return ( dict( error=e.__class__.__name__, errorMessage=str(e), childSchema=match.group("child").split(".")[0][1:-1], childTable=to_camel_case( match.group("child").split(".")[1][1:-1]), ), 409, ) except Exception as e: return str(e), 500
def test_to_camel_case(): assert_equal(to_camel_case("all_groups"), "AllGroups") assert_equal(to_camel_case("hello"), "Hello") assert_equal(to_camel_case("this_is_a_sample_case"), "ThisIsASampleCase") assert_equal(to_camel_case("This_is_Mixed"), "ThisIsMixed")
def test_to_camel_case(): assert_equal(to_camel_case('all_groups'), 'AllGroups') assert_equal(to_camel_case('hello'), 'Hello') assert_equal(to_camel_case('this_is_a_sample_case'), 'ThisIsASampleCase') assert_equal(to_camel_case('This_is_Mixed'), 'ThisIsMixed')
class FkForm(ManualLookupForm): parent_table_name = to_camel_case(self.foreign_table.table_name) existing_entries = self.create_dropdown_field(kwargs)