def metadata(self): '''Returns the metadata for this EntitySet. The metadata will be recomputed if it does not exist.''' if self._data_description is None: description = serialize.entityset_to_description(self) self._data_description = deserialize.description_to_entityset(description) return self._data_description
def test_all_ww_logical_types(): logical_types = list_logical_types()['type_string'].to_list() dataframe = pd.DataFrame(columns=logical_types) es = EntitySet() ltype_dict = {ltype: ltype for ltype in logical_types} ltype_dict['ordinal'] = Ordinal(order=[]) es.add_dataframe(dataframe=dataframe, dataframe_name='all_types', index='integer', logical_types=ltype_dict) description = serialize.entityset_to_description(es) _es = deserialize.description_to_entityset(description) assert es.__eq__(_es, deep=True)
def test_with_custom_ww_logical_type(): class CustomLogicalType(LogicalType): pass ww_type_system.add_type(CustomLogicalType) columns = ['integer', 'natural_language', 'custom_logical_type'] dataframe = pd.DataFrame(columns=columns) es = EntitySet() ltype_dict = { 'integer': 'integer', 'natural_language': 'natural_language', 'custom_logical_type': CustomLogicalType, } es.add_dataframe(dataframe=dataframe, dataframe_name='custom_type', index='integer', logical_types=ltype_dict) description = serialize.entityset_to_description(es) _es = deserialize.description_to_entityset(description) assert isinstance( _es['custom_type'].ww.logical_types['custom_logical_type'], CustomLogicalType) assert es.__eq__(_es, deep=True)
def to_dictionary(self): return serialize.entityset_to_description(self)
def __dask_tokenize__(self): return (EntitySet, serialize.entityset_to_description(self.metadata))
def test_dask_entityset_description(dask_es): description = serialize.entityset_to_description(dask_es) _es = deserialize.description_to_entityset(description) assert dask_es.metadata.__eq__(_es, deep=True)