def parse_header(self, elem): values = self._collect('header', elem) # Handle a reference to a DataStructureDefinition attrs = {} for k in ['id', 'agencyid', 'version', 'urn']: value = values.pop('structure_ref_' + k, None) if not value: continue elif k == 'agencyid': attrs['maintainer'] = Agency(id=value) else: attrs[k] = value if set(attrs.keys()) == {'urn'}: attrs['id'] = values['structure_id'] if 'id' in attrs: # Create the DSD and DFD dsd = self._maintained(DataStructureDefinition, **attrs) dfd = DataflowDefinition(id=values.pop('structure_id'), structure=dsd) # Also store the dimension at observation self._set_obs_dim(values.pop('dim_at_obs')) extra = [dfd] else: extra = [] # Maybe return the DFD; see .initialize() return [Header(**values)] + extra
class DataMessage(Message): #: :class:`list` of :class:`pandasdmx.model.DataSet` data: List[DataSet] = [] dataflow: DataflowDefinition = DataflowDefinition() # TODO infer the observation dimension from the DSD, e.g. # - If a *TimeSeriesDataSet, it's the TimeDimension, # - etc. observation_dimension: Union[_AllDimensions, List[Dimension]] = None # Convenience access @property def structure(self): """The DataStructureDefinition used in the DataMessage.dataflow.""" return self.dataflow.structure def __repr__(self): """String representation.""" lines = [super().__repr__()] # DataMessage contents if len(self.data): lines.append('DataSet ({})'.format(len(self.data))) lines.extend(_summarize(self, ('dataflow', 'observation_dimension'))) return '\n '.join(lines)
class DataMessage(Message): """Data Message. .. note:: A DataMessage may contain zero or more :class:`.DataSet`, so :attr:`data` is a list. To retrieve the first (and possibly only) data set in the message, access the first element of the list: ``msg.data[0]``. """ #: :class:`list` of :class:`.DataSet`. data: List[DataSet] = [] #: :class:`.DataflowDefinition` that contains the data. dataflow: DataflowDefinition = DataflowDefinition() # TODO infer the observation dimension from the DSD, e.g. # - If a *TimeSeriesDataSet, it's the TimeDimension, # - etc. observation_dimension: Union[_AllDimensions, List[Dimension]] = None # Convenience access @property def structure(self): """DataStructureDefinition used in the :attr:`dataflow`.""" return self.dataflow.structure def __repr__(self): """String representation.""" lines = [super().__repr__()] # DataMessage contents if self.data: lines.append('DataSet ({})'.format(len(self.data))) lines.extend(_summarize(self, ('dataflow', 'observation_dimension'))) return '\n '.join(lines)
def parse_header(self, elem): # Collect values from *elem* and its children using XPath values = {} for key, xpath in HEADER_XPATH.items(): matches = xpath(elem) if len(matches) == 0: continue values[key] = matches[0] if len(matches) == 1 else matches # Handle a reference to a DataStructureDefinition attrs = {} for k in ['id', 'agencyid', 'version', 'urn']: value = values.pop('structure_ref_' + k, None) if not value: continue elif k == 'agencyid': attrs['maintainer'] = Agency(id=value) else: attrs[k] = value if set(attrs.keys()) == {'urn'}: attrs['id'] = values['structure_id'] extra = [] if 'id' in attrs: # Create or retrieve the DSD. NB if the dsd argument was provided # to read_message(), this should be the same DSD dsd = self._maintained(DataStructureDefinition, **attrs) if 'structure_id' in values: # Add the DSD to the index a second time, using the message # -specific structure ID (rather that the DSD's own ID). key = ('DataStructureDefinition', values['structure_id']) self._index[key] = dsd # Create a DataflowDefinition dfd = DataflowDefinition(id=values.pop('structure_id'), structure=dsd) extra.append(dfd) # Store the observation at dimension level dim_at_obs = values.pop('dim_at_obs') if dim_at_obs == 'AllDimensions': self._obs_dim = AllDimensions else: # Retrieve or create the Dimension args = dict(id=dim_at_obs, order=1e9) if 'TimeSeries' in self._stack[0]: # {,StructureSpecific}TimeSeriesData message → the # dimension at observation level is a TimeDimension args['cls'] = TimeDimension self._obs_dim = dsd.dimensions.get(**args) # Maybe return the DFD; see .initialize() return [Header(**values)] + extra
def test_contentconstraint(): crole = ConstraintRole(role=ConstraintRoleType['allowable']) cr = ContentConstraint(role=crole) cr.content = {DataflowDefinition()} cr.data_content_region = CubeRegion(included=True, member={})