def default(self, obj): """ :param obj: :return: Dictionary of JSON serializable objects. :rtype: dict """ if isinstance(obj.__class__, DeclarativeMeta): # An SQLAlchemy class fields = {} for field in [col.key for col in class_mapper(type(obj)).iterate_properties if isinstance(col, sa.orm.ColumnProperty)]: try: data = obj.__getattribute__(field) except DetachedInstanceError: data = 'UNKNOWN' if field != 'dt_modify' and not field.startswith('_'): try: # this will fail on non-encodable values, like other classes json.dumps(data) fields[field] = data except TypeError: # Special handlers for unusual data types. # Handler: datetime if isinstance(data, datetime): fields[field] = data.isoformat() elif isinstance(data, time): fields[field] = data.isoformat() else: fields[field] = None # a json-encodable dict return fields return json.JSONEncoder.default(self, obj)
def copy_sqla_object(obj: object, omit_fk: bool = True, omit_pk: bool = True, omit_attrs: List[str] = None, debug: bool = False) -> object: """ Given an SQLAlchemy object, creates a new object (FOR WHICH THE OBJECT MUST SUPPORT CREATION USING ``__init__()`` WITH NO PARAMETERS), and copies across all attributes, omitting PKs (by default), FKs (by default), and relationship attributes (always omitted). Args: obj: the object to copy omit_fk: omit foreign keys (FKs)? omit_pk: omit primary keys (PKs)? omit_attrs: attributes (by name) not to copy debug: be verbose Returns: a new copy of the object """ omit_attrs = omit_attrs or [] # type: List[str] cls = type(obj) mapper = class_mapper(cls) newobj = cls() # not: cls.__new__(cls) rel_keys = set([c.key for c in mapper.relationships]) prohibited = rel_keys if omit_pk: pk_keys = set([c.key for c in mapper.primary_key]) prohibited |= pk_keys if omit_fk: fk_keys = set([c.key for c in mapper.columns if c.foreign_keys]) prohibited |= fk_keys prohibited |= set(omit_attrs) if debug: log.debug("copy_sqla_object: skipping: {}", prohibited) for k in [ p.key for p in mapper.iterate_properties if p.key not in prohibited ]: try: value = getattr(obj, k) if debug: log.debug("copy_sqla_object: processing attribute {} = {}", k, value) setattr(newobj, k, value) except AttributeError: if debug: log.debug("copy_sqla_object: failed attribute {}", k) pass return newobj
def __call__(self, entity_class, options): """ Creates a query for the given entity class, passing the given option map to the constructor, and processes loader options, if configured. """ # The "query_class" option is used to inject the optimized # counting query class which fetches the total result count and # the first result page in one call. query_cls = options.pop('query_class', Query) q = query_cls([entity_class], self.__session, **options) entity_attr_names = self.loader_option_map.get(entity_class) if not entity_attr_names is None: q = self.__process_loader_options(entity_attr_names, class_mapper(entity_class), q) return q
def _migrate(self, locales=False): self.start('{0}{1}'.format('locales of ' if locales else '', self.get_name())) query_count = self.get_count_query_locales() if locales else \ self.get_count_query() total_count = self.connection_source.execute( text(query_count)).fetchone()[0] print('Total: {0} rows'.format(total_count)) query = text(self.get_query_locales() if locales else self.get_query()) model_document = self.get_model_document(locales) # make sure that the version is not managed by SQLAlchemy, the value # that is provided as version should be used document_mapper = class_mapper(model_document) document_mapper.version_id_prop = None document_mapper.version_id_col = None document_mapper.version_id_generator = None if not locales: document_mapper = class_mapper(DocumentGeometry) document_mapper.version_id_prop = None document_mapper.version_id_col = None document_mapper.version_id_generator = None batch = DocumentBatch(self.session_target, self.batch_size, model_document, self.get_model_archive_document(locales), self.get_model_geometry(), self.get_model_archive_geometry()) with transaction.manager, batch: count = 0 current_document_id = None current_locale = None version = 1 archives = [] geometry_archives = [] for document_in in self.connection_source.execute(query): count += 1 if current_document_id is None: current_document_id = document_in.id version = 1 if locales: current_locale = document_in.culture else: if current_document_id != document_in.id: print('WARNING: no latest version for {0}'.format( current_document_id)) archives = [] geometry_archives = [] version = 1 current_document_id = document_in.id if locales: current_locale = document_in.culture if locales: document_archive = self.get_document_locale_archive( document_in, version) else: document_archive = self.get_document_archive( document_in, version) # do not migrate any empty geom for non geom types geometry_archive = self.get_document_geometry_archive( document_in, version) if geometry_archive is not None: geometry_archives.append(geometry_archive) archives.append(document_archive) if document_in.is_latest_version: if locales: if current_locale != document_in.culture: raise Exception( 'locale of the latest version does not match ' 'locale of the first version {0}'.format( current_document_id)) document = self.get_document_locale( document_in, version) else: document = self.get_document(document_in, version) geometry = self.get_document_geometry( document_in, version) # do not migrate any empty geom for non geom types if geometry is not None: batch.add_geometry(geometry) batch.add_geometry_archives(geometry_archives) batch.add_archive_documents(archives) batch.add_document(document) archives = [] geometry_archives = [] version = 1 current_document_id = None current_locale = None else: version += 1 self.progress(count, total_count) # the transaction will not be commited automatically when doing # a bulk insertion. `mark_changed` forces a commit. zope.sqlalchemy.mark_changed(self.session_target) self.stop()
def uselist_relationships( Model: Union[type, DeclarativeMeta]) -> Mapping[str, bool]: """ Inspect a model and return a map of {relationship name => uselist} """ mapper: Mapper = class_mapper(Model) return {rel.key: rel.uselist for rel in mapper.relationships}
def _migrate(self, locales=False): self.start('{0}{1}'.format( 'locales of ' if locales else '', self.get_name())) query_count = self.get_count_query_locales() if locales else \ self.get_count_query() total_count = self.connection_source.execute( text(query_count)).fetchone()[0] print('Total: {0} rows'.format(total_count)) query = text(self.get_query_locales() if locales else self.get_query()) model_document = self.get_model_document(locales) # make sure that the version is not managed by SQLAlchemy, the value # that is provided as version should be used document_mapper = class_mapper(model_document) document_mapper.version_id_prop = None document_mapper.version_id_col = None document_mapper.version_id_generator = None if not locales: document_mapper = class_mapper(DocumentGeometry) document_mapper.version_id_prop = None document_mapper.version_id_col = None document_mapper.version_id_generator = None batch = DocumentBatch( self.session_target, self.batch_size, model_document, self.get_model_archive_document(locales), self.get_model_geometry(), self.get_model_archive_geometry()) with transaction.manager, batch: count = 0 current_document_id = None current_locale = None version = 1 archives = [] geometry_archives = [] for document_in in self.connection_source.execute(query): count += 1 if current_document_id is None: current_document_id = document_in.id version = 1 if locales: current_locale = document_in.culture else: if current_document_id != document_in.id: print('WARNING: no latest version for {0}'.format( current_document_id)) archives = [] geometry_archives = [] version = 1 current_document_id = document_in.id if locales: current_locale = document_in.culture if locales: document_archive = self.get_document_locale_archive( document_in, version) else: document_archive = self.get_document_archive( document_in, version) # do not migrate any empty geom for non geom types geometry_archive = self.get_document_geometry_archive( document_in, version) if geometry_archive is not None: geometry_archives.append(geometry_archive) archives.append(document_archive) if document_in.is_latest_version: if locales: if current_locale != document_in.culture: raise Exception( 'locale of the latest version does not match ' 'locale of the first version {0}'.format( current_document_id)) document = self.get_document_locale( document_in, version) else: document = self.get_document( document_in, version) geometry = self.get_document_geometry( document_in, version) # do not migrate any empty geom for non geom types if geometry is not None: batch.add_geometry(geometry) batch.add_geometry_archives(geometry_archives) batch.add_archive_documents(archives) batch.add_document(document) archives = [] geometry_archives = [] version = 1 current_document_id = None current_locale = None else: version += 1 self.progress(count, total_count) # the transaction will not be commited automatically when doing # a bulk insertion. `mark_changed` forces a commit. zope.sqlalchemy.mark_changed(self.session_target) self.stop()