def handle(self): name = self.argument('name') singular = inflection.singularize(inflection.tableize(name)) directory = self._get_path() filepath = self._get_path(singular + '.py') if os.path.exists(filepath): raise RuntimeError('The model file already exists.') mkdir_p(directory) parent = os.path.join(directory, '__init__.py') if not os.path.exists(parent): with open(parent, 'w'): pass stub = self._get_stub() stub = self._populate_stub(name, stub) with open(filepath, 'w') as f: f.write(stub) self.info('Model <comment>%s</> successfully created.' % name) if self.option('migration'): table = inflection.tableize(name) self.call( 'make:migration', [ ('name', 'create_%s_table' % table), ('--table', table), ('--create', True) ] )
def __new__(cls, name, bases, dct): if not all(isinstance(dct[rel_type], tuple) for rel_type in remodel.models.REL_TYPES): raise ValueError('Related models must be passed as a tuple') # TODO: Find a way to pass model class to its field handler class model = dct.pop('model') dct['restricted'], dct['related'] = set(), set() for rel in dct.pop('has_one'): if isinstance(rel, tuple): # 4-tuple relation supplied other, field, lkey, rkey = rel else: # Just the related model supplied other = rel field, lkey, rkey = other.lower(), 'id', '%s_id' % model.lower() dct[field] = HasOneDescriptor(other, lkey, rkey) dct['related'].add(field) index_registry.register(other, rkey) for rel in dct.pop('belongs_to'): if isinstance(rel, tuple): other, field, lkey, rkey = rel else: other = rel field, lkey, rkey = other.lower(), '%s_id' % other.lower(), 'id' dct[field] = BelongsToDescriptor(other, lkey, rkey) dct['related'].add(field) dct['restricted'].add(lkey) index_registry.register(model, lkey) for rel in dct.pop('has_many'): if isinstance(rel, tuple): other, field, lkey, rkey = rel else: other = rel field, lkey, rkey = tableize(other), 'id', '%s_id' % model.lower() dct[field] = HasManyDescriptor(other, lkey, rkey) dct['related'].add(field) index_registry.register(other, rkey) for rel in dct.pop('has_and_belongs_to_many'): if isinstance(rel, tuple): other, field, lkey, rkey = rel else: other = rel field, lkey, rkey = tableize(other), 'id', 'id' join_model = '_' + ''.join(sorted([model, other])) try: remodel.models.ModelBase(join_model, (remodel.models.Model,), {}) except AlreadyRegisteredError: # HABTM join_model model has been registered, probably from the # other end of the relation pass mlkey, mrkey = '%s_id' % model.lower(), '%s_id' % other.lower() dct[field] = HasAndBelongsToManyDescriptor(other, lkey, rkey, join_model, mlkey, mrkey) dct['related'].add(field) index_registry.register(join_model, mlkey) index_registry.register(join_model, mrkey) return super(FieldHandlerBase, cls).__new__(cls, name, bases, dct)
def __new__(mcs, name, bases, dct): super_new = super(ModelBase, mcs).__new__ # Ensure the following are not done for the Model class itself parents = [b for b in bases if isinstance(b, ModelBase)] if not parents: return super_new(mcs, name, bases, dct) # Set metadata dct['_table'] = tableize(name) rel_attrs = {rel: dct.setdefault(rel, ()) for rel in REL_TYPES} dct['_field_handler_cls'] = FieldHandlerBase( '%sFieldHandler' % name, (FieldHandler,), dict(rel_attrs, model=name)) object_handler_cls = dct.setdefault('object_handler', ObjectHandler) # Register callbacks dct['_callbacks'] = {callback: [] for callback in CALLBACKS} for callback in CALLBACKS: # Callback-named methods if callback in dct: dct['_callbacks'][callback].append(callback) # Callback-decorated methods dct['_callbacks'][callback].extend([key for key, value in dct.items() if hasattr(value, callback)]) new_class = super_new(mcs, name, bases, dct) model_registry.register(name, new_class) setattr(new_class, 'objects', object_handler_cls(new_class)) return new_class
def __init__(self, model, lkey, rkey, join_model, mlkey, mrkey): self.model = model self.lkey = lkey self.rkey = rkey self.join_model = join_model self.mlkey = mlkey self.mrkey = mrkey self.related_cache = '_%s_cache' % tableize(model)
def _get_tablename(cls): """Override this in subclasses if you want anything other than a table name automatically derived from the class name using inflection.tableize(). Mind the dangers of further subclassing, and of using the same table for different Document classes. """ return inflection.tableize(cls.__name__)
def _populate_stub(self, name, stub): """ Populate the placeholders in the migration stub. :param name: The name of the model :type name: str :param stub: The stub :type stub: str :rtype: str """ table_name = inflection.tableize(name) stub = stub.replace("DummyClass", name).replace("dummy_table", table_name) return stub
def handle(self): name = self.argument("name") singular = inflection.singularize(inflection.tableize(name)) directory = self._get_path() filepath = self._get_path(singular + ".py") if os.path.exists(filepath): raise RuntimeError("The cog extension already exists.") mkdir_p(directory) stub = self._get_stub() stub = self._populate_stub(name, stub) with open(filepath, "w") as f: f.write(stub) self.info(f"Cog <comment>'{name}'</> successfully created.")
def process_worksheet(gsheets_loader, sheet_name, worksheet, start_from_row, config): if worksheet is None: name_with_worksheet = sheet_name else: name_with_worksheet = sheet_name + "_" + worksheet if 'singular_table_name' in config and config['singular_table_name']: stream_name = underscore(parameterize(name_with_worksheet)) else: stream_name = tableize(parameterize(name_with_worksheet)) schema = gsheets_loader.get_schema(sheet_name, worksheet, start_from_row) records = gsheets_loader.get_records_as_json(sheet_name, worksheet, start_from_row) # additional data transformations column_mapping = None if 'underscore_columns' in config and config['underscore_columns']: column_mapping = {'id': 'id'} props = {} for k, v in schema['properties'].items(): kt = underscore(parameterize(k)) props[kt] = v column_mapping[k] = kt schema['properties'] = props schema['properties']['id'] = {'type': 'integer'} for i, record in enumerate(records, start=1): record['id'] = i # write stuff singer.write_schema( stream_name=stream_name, schema=schema, key_properties=['id'] ) for record in records: if column_mapping is not None: record_transformed = {column_mapping[k]: v for k, v in record.items()} else: record_transformed = record singer.write_record(stream_name, record_transformed)
def __new__(cls, clsname, bases, dct): super_new = super(ModelBase, cls).__new__ new_class = super_new(cls, clsname, bases, dct) fields_copy = {} new_class._fields = {} dct['id'] = GUID(missing=lambda: str(guid.GUID().slug)) for key, value in dct.items(): if not key.startswith('__'): new_class._fields[key] = value fields_copy[key] = value new_class._table = tableize(clsname) new_class._table_exists = False new_class._schema = ma.Schema.from_dict(fields_copy, name=clsname + 'Schema') return new_class
def handle(self): branch = None name = self.argument("name") singular = inflection.singularize(inflection.tableize(name)) cwd = Path() TOKEN = self.option("token") if self.option("simple"): cwd.joinpath(f"{name}.py").write_text(simple(TOKEN)) if TOKEN: self.info("Token has been added") return self.info( f"Simple bot: {name}.py has been successfully created\nYou'll need to install discord.py if it's not already installed!" ) handler = GitHandler(cwd) version = self.option("branch") if version: branch = handler.get_branch(version) if not branch: self.warning(f"Unable to retrieve branch {version}") return else: branch = handler.get_latest_version() if branch: self.info(f"Using branch {branch.name}") handler.unzip(name, branch) self.info(f"Bot <comment>'{name}'</> successfully created.") if self.option("token"): path = cwd.joinpath(name, "config.yaml") with open(path, "r") as file: text = file.read() with open(path, "w") as file: file.write(text.replace("TOKEN:\n", f"TOKEN: {TOKEN}")) self.info("Token has been successfully added.") self.info( "You can now CD into the directory to install the requirements" ) else: self.warning(f"Ran into an error retrieveing branch {version}")
def __init__(cls, name, bases, attrs): # fully-dotted path as object identity cls.__qname__ = f'{cls.__module__}.{cls.__name__}' if not hasattr(cls, '_registry'): # create reference to single registry across all subclasses cls._registry = ModelMeta.__registry else: # ensure registry entry exists for all subclasses (base class excluded) ModelMeta.__registry.setdefault( cls.__qname__, dict(cls=cls, version=None, pipeline=None, pipeline_config=dict( upload_accept=None, upload_role=None, upload_meta_schema=None, **{f'has_{stage}': False for stage in TASK_KEYS}, **{f'{stage}_sha256': None for stage in TASK_KEYS} )) ) # default filename if none supplied if '__filename__' not in attrs: cls.__filename__ = inflection.tableize(name) # default parent directory if none supplied if '__directory__' not in attrs: cls.__directory__ = cls.__module__.split('.')[1] # store denormalize field name if present if '_denormalize' in attrs: cls._denormalize = True cls._denormalize_on = attrs['_denormalize'] else: cls._denormalize = False # instantiate and register pipeline if defined if '__pipeline__' in attrs: cls._register_pipeline(attrs['__pipeline__']) super(ModelMeta, cls).__init__(name, bases, attrs)
def __new__(cls, clsname, bases, dct): super_new = super(ModelBase, cls).__new__ new_class = super_new(cls, clsname, bases, dct) fields_copy = {} new_class._fields = {} for key, value in dct.items(): print('%s: %s' % (key, value)) if not key.startswith('__'): new_class._fields[key] = value fields_copy[key] = value new_class._table = tableize(clsname) new_class._table_exists = False fields_copy['id'] = ma.fields.UUID(required=True) new_class._schema = type( clsname + 'Schema', (ma.Schema,), fields_copy ) return new_class
def __tablename__(cls): return tableize(cls.__name__)
def define_table_name(model_class): model_name = model_class.__name__ return inflection.tableize(model_name)
def parent_id(cls): return db.Column(db.Integer, db.ForeignKey('{}.id'.format(inflection.tableize(cls.__name__))), nullable=True)
def tableize(word): __doc__ = inflection.tableize.__doc__ return inflection.tableize(str(word))
def __new__(cls, name, bases, dct): if not all( isinstance(dct[rel_type], tuple) for rel_type in modernremodel.models.REL_TYPES): raise ValueError('Related models must be passed as a tuple') # TODO: Find a way to pass model class to its field handler class model = dct.pop('model') dct['restricted'], dct['related'] = set(), set() for rel in dct.pop('has_one'): if isinstance(rel, tuple): # 4-tuple relation supplied other, field, lkey, rkey = rel else: # Just the related model supplied other = rel field, lkey, rkey = other.lower( ), 'id', '%s_id' % model.lower() dct[field] = HasOneDescriptor(other, lkey, rkey) dct['related'].add(field) index_registry.register(other, rkey) for rel in dct.pop('belongs_to'): if isinstance(rel, tuple): other, field, lkey, rkey = rel else: other = rel field, lkey, rkey = other.lower( ), '%s_id' % other.lower(), 'id' dct[field] = BelongsToDescriptor(other, lkey, rkey) dct['related'].add(field) dct['restricted'].add(lkey) index_registry.register(model, lkey) for rel in dct.pop('has_many'): if isinstance(rel, tuple): other, field, lkey, rkey = rel else: other = rel field, lkey, rkey = tableize( other), 'id', '%s_id' % model.lower() dct[field] = HasManyDescriptor(other, lkey, rkey) dct['related'].add(field) index_registry.register(other, rkey) for rel in dct.pop('has_and_belongs_to_many'): if isinstance(rel, tuple): other, field, lkey, rkey = rel else: other = rel field, lkey, rkey = tableize(other), 'id', 'id' join_model = '_' + ''.join(sorted([model, other])) try: modernremodel.models.ModelBase(join_model, (modernremodel.models.Model, ), {}) except AlreadyRegisteredError: # HABTM join_model model has been registered, probably from the # other end of the relation pass mlkey, mrkey = '%s_id' % model.lower(), '%s_id' % other.lower() dct[field] = HasAndBelongsToManyDescriptor(other, lkey, rkey, join_model, mlkey, mrkey) dct['related'].add(field) index_registry.register(join_model, mlkey) index_registry.register(join_model, mrkey) return super(FieldHandlerBase, cls).__new__(cls, name, bases, dct)
def _api_type_for_model(self, model): return dasherize(tableize(model.__name__))
def __new__(cls, name, bases, attrs): new_cls = super().__new__(cls, name, bases, attrs) table_name = tableize(name) setattr(new_cls, "objects", ObjectManager(new_cls, table_name)) setattr(new_cls, "table_name", table_name) return new_cls
def test_tableize(string, tableized): assert inflection.tableize(string) == tableized
def _fqtn(entity: Type[ffd.Entity]): return inflection.tableize(entity.get_fqn()).replace('.', '_')
def get_table_name(cls): return cls.__table__ or tableize(cls.__name__)
def table_name(self): return inflection.tableize(self.entity.__name__)
def _schema(self, request: Request): model_name = request.param('model') table_name = inflection.tableize(model_name) row = self.get_model_row_by_model_name(model_name) #model_i = row['model']() #table = model_i.get_table() if env('DB_CONNECTION') == 'sqlite': # Get Schema in SQLite with Python # https://www.tomordonez.com/get-schema-sqlite-python.html db = sqlite3.connect(env('DB_DATABASE')) cursor = db.cursor() cursor.execute(f"PRAGMA table_info('{table_name}')") schema = list(cursor.fetchall()) for i, row in enumerate(schema): row = list(row) schema[i] = {'id': row[0], 'column': row[1], 'type': row[2]} cursor.execute(f"PRAGMA foreign_key_list('{table_name}')") foreign_list = cursor.fetchall() elif env('DB_CONNECTION') == 'mysql': conn = pymysql.connect(host=env('DB_HOST'), user=env('DB_USERNAME'), password=env('DB_PASSWORD'), db=env('DB_DATABASE'), charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor) with conn.cursor() as cursor: sql = f'SELECT * FROM {table_name} LIMIT 1' cursor.execute(sql) schema = cursor.description schema = list(schema) for i, row in enumerate(schema): new_row = {'id': i, 'column': row[0], 'type': row[1]} for k, v in MYSQL_FIELD_TYPE.items(): if str(new_row['type']) == k: new_row['type'] = v schema[i] = new_row foreign_list = [] with conn.cursor() as cursor: sql = f'SHOW CREATE TABLE {table_name}' cursor.execute(sql) filedata = cursor.fetchone()['Create Table'] filedata_arr = filedata.splitlines() for data in filedata_arr: row_in_foreign_list = [0, 0, 'table', 'key'] if 'FOREIGN KEY' in data: row = data.split('`') row_in_foreign_list[3] = row[3] row_in_foreign_list[2] = row[5] foreign_list += [row_in_foreign_list] elif env('DB_CONNECTION') == 'postgres': conn = psycopg2.connect(host=env('DB_HOST'), database=env('DB_DATABASE'), user=env('DB_USERNAME'), password=env('DB_PASSWORD'), port=env('DB_PORT')) with conn.cursor() as cursor: sql = f"select column_name, data_type from information_schema.columns where table_name = '{table_name}'" cursor.execute(sql) schema = cursor.fetchall() for i, row in enumerate(schema): new_row = { 'id': i, 'column': row[0], 'type': row[1].split(' ')[0] } for k, v in PGSQL_FIELD_TYPE.items(): if str(new_row['type']) == k: new_row['type'] = v schema[i] = new_row break foreign_list = [] with conn.cursor() as cursor: sql = "SELECT table_name, constraint_name FROM information_schema.table_constraints WHERE table_schema = 'public' AND constraint_type = 'FOREIGN KEY'" cursor.execute(sql) filedata = cursor.fetchall() for i, v in enumerate(filedata): key_name = v[1] key_name = key_name.replace(f'{table_name}_', '') key_name = key_name.strip('_foreign') if v[0] == table_name: foreign_list += [ (i, 0, inflection.pluralize(key_name.strip('_id')), key_name) ] foreign = {} for row in foreign_list: data = self.foreign_data(self, row[2]) # row[2] = table_name foreign[row[3]] = data # row[3] = column_name return {'schema': schema, 'foreign_keys': foreign}
def parent_id(cls): return db.Column(db.Integer, db.ForeignKey('{}.id'.format( inflection.tableize(cls.__name__))), nullable=True)
def __tablename__(cls): return inflection.tableize(cls.__name__)
def __init__(self, model, lkey, rkey): self.model = model self.lkey = lkey self.rkey = rkey self.related_cache = '_%s_cache' % tableize(model)
def __new__(mcs, name, bases, attrs): """ Detects all fields and wires everything up. These class attributes are defined here: * *type* The JSON API typename * *_declared_fields* Maps the key (schema property name) to the associated :class:`BaseField`. * *_fields_by_sp* Maps the source pointer of a field to the associated :class:`BaseField`. * *_attributes* Maps the JSON API attribute name to the :class:`Attribute` instance. * *_relationships* Maps the JSON API relationship name to the :class:`Relationship` instance. * *_links* Maps the JSON API link name to the :class:`Link` instance. * *_meta* Maps the (top level) JSON API meta member to the associated :class:`Attribute` instance. * *_toplevel* A list with all JSON API top level fields (attributes, ..., meta). :arg str name: The name of the schema class :arg tuple bases: The direct bases of the schema class :arg dict attrs: A dictionary with all properties defined on the schema class (attributes, methods, ...) """ cls_fields = _get_fields(attrs, abc.FieldABC, pop=True) klass = super(SchemaMeta, mcs).__new__(mcs, name, bases, attrs) inherited_fields = _get_fields_by_mro(klass, abc.FieldABC) declared_fields = OrderedDict() for key, prop in inherited_fields + cls_fields: prop.key = key prop.name = \ prop.name or (klass.inflect(key) if callable(klass.inflect) else key) if not ALLOWED_MEMBER_NAME_REGEX.fullmatch(prop.name): raise ValueError('Field name "{}" is not allowed.'.format( prop.name)) prop.mapped_key = prop.mapped_key or key declared_fields[prop.key] = prop # Find nested fields (link_of, ...) and link them with # their parent. for key, field in declared_fields.items(): if getattr(field, 'link_of', None): relationship = declared_fields[field.link_of] assert isinstance(relationship, Relationship), \ 'Links can be added only for relationships.' relationship.add_link(field) klass._id = declared_fields.pop('id', None) # Find the *top-level* attributes, relationships, links and meta fields. attributes = OrderedDict( (key, field) for key, field in declared_fields.items() if isinstance(field, Attribute) and not field.meta) mcs._assign_sp(attributes.values(), JSONPointer('/attributes')) klass._attributes = MappingProxyType(attributes) relationships = OrderedDict((key, field) for key, field in declared_fields.items() if isinstance(field, Relationship)) # TODO: Move default links to class initializer # It will allow to use custom namespace for Links for relationship in relationships.values(): # Add the default links. relationship.links.update({ 'self': Link('jsonapi.relationships', name='self', link_of=relationship.name), 'related': Link('jsonapi.related', name='related', link_of=relationship.name) }) mcs._assign_sp(relationships.values(), JSONPointer('/relationships')) klass._relationships = MappingProxyType(relationships) links = OrderedDict((key, field) for key, field in declared_fields.items() if isinstance(field, Link) and not field.link_of) mcs._assign_sp(links.values(), JSONPointer('/links')) klass._links = MappingProxyType(links) meta = OrderedDict((key, field) for key, field in declared_fields.items() if isinstance(field, Attribute) and field.meta) mcs._assign_sp(links.values(), JSONPointer('/meta')) klass._meta = MappingProxyType(meta) # Collect all top level fields in a list. toplevel = tuple( itertools.chain(klass._attributes.values(), klass._relationships.values(), klass._links.values(), klass._meta.values())) klass._toplevel = toplevel # Create the source pointer map. klass._fields_by_sp = mcs._sp_to_field(toplevel) # Determine 'type' name. if not attrs.get('type') and attrs.get('resource_class'): klass.type = inflection.dasherize( inflection.tableize(attrs['resource_class'].__name__)) if not attrs.get('type'): klass.type = name if not ALLOWED_MEMBER_NAME_REGEX.fullmatch(klass.type): raise ValueError('Type "{}" is not allowed.'.format(klass.type)) klass._declared_fields = MappingProxyType(declared_fields) return klass
def _endpoint(self): return inflection.dasherize(inflection.tableize( self._class_name)) + "/"
def get_tableized_name(cls): return tableize(cls.get_server_model_name())
def _fqtn(entity: Type[ffd.Entity]): return inflection.tableize(entity.get_fqn())
def __tablename__(cls): """Pluralize class name to create a more natural table name.""" return inflection.tableize(cls.__name__)
def _table_name(cls): return cls.__table__ or inflection.tableize(cls.__name__)
def __bind__(cls, skip_table_setup=False): if _is_bound(cls): return # bind subclasses for model in cls.__subclasses__(): model.__bind__(skip_table_setup) # create meta class class_ = cls.Meta if 'Meta' in cls.__dict__ else _Dummy class Meta(class_, IMeta): pass if Meta.abstract: return cls.Meta = Meta # connection connection = Meta.connection if not connection: connection = cls.__connection__ Meta.connection = connection if not isinstance(connection, Connection): raise ValueError('Invalid connection {}'.format(connection)) # set table name table_name = Meta.table_name if is_empty(table_name): table_name = tableize(cls.__name__) Meta.table_name = Meta.connection.prefixed_table_name(table_name) # init attributes Meta.schema = cls.get_raw_attributes() for name, attr in Meta.schema.items(): attr.__bind__(name, cls) # List of attributes attrs = Meta.schema.values() # validate hash key hash_keys = [x for x in attrs if x.hash_key] size = len(hash_keys) if size == 1: hash_key = hash_keys.pop() Meta.hash_key = hash_key elif size == 0: raise SchemaError('Missing hash_key for model {}'.format(cls)) else: raise SchemaError( 'Multiple attributes {} declared as hash_key for model {}' .format(sorted(hash_keys), cls) ) # validate range key range_keys = [x for x in attrs if x.range_key] size = len(range_keys) if size == 1: range_key = range_keys.pop() Meta.range_key = range_key elif size > 1: raise SchemaError( 'Multiple attributes {} declared as range_key for model {}' .format(sorted(range_keys), cls) ) # Init indexes Meta.indexes = cls.get_raw_indexes() for name, index in Meta.indexes.items(): index.__bind__(name, cls) # validation for collisions in local attributes/indexes names = [x.name for x in list(Meta.schema.values()) + list(Meta.indexes.values())] duplicates = [item for item, count in collections.Counter(names).items() if count > 1] if len(duplicates): raise SchemaError( 'Duplicate attribute or index names {} found in model {}'.format(sorted(set(duplicates)), cls) ) # finalize Meta _BOUND_MODELS.add(cls) # Create table if not skip_table_setup: try: cls.__create_table__() except ClientError as e: if e.response['Error']['Code'] != 'ResourceInUseException': raise # Table exists. update the table cls.__update_table__()