Example #1
0
 def get_subdatasets(self, pattern=None, fulfilled=None, absolute=False,
                     recursive=False, recursion_limit=None, edges=False):
     """DEPRECATED: use `subdatasets()`"""
     # TODO wipe this function out completely once we are comfortable
     # with it. Internally we don't need or use it anymore.
     import inspect
     lgr.warning('%s still uses Dataset.get_subdatasets(). RF to use `subdatasets` command', inspect.stack()[1][3])
     from datalad.coreapi import subdatasets
     if edges:
         return [(r['parentpath'] if absolute else relpath(r['parentpath'], start=self.path),
                  r['path'] if absolute else relpath(r['path'], start=self.path))
                 for r in subdatasets(
                     dataset=self,
                     fulfilled=fulfilled,
                     recursive=recursive,
                     recursion_limit=recursion_limit,
                     bottomup=True)]
     else:
         return subdatasets(
             dataset=self,
             fulfilled=fulfilled,
             recursive=recursive,
             recursion_limit=recursion_limit,
             bottomup=True,
             result_xfm='{}paths'.format('' if absolute else 'rel'))
Example #2
0
 def get_subdatasets(self, pattern=None, fulfilled=None, absolute=False,
                     recursive=False, recursion_limit=None, edges=False):
     """DEPRECATED: use `subdatasets()`"""
     # TODO wipe this function out completely once we are comfortable
     # with it. Internally we don't need or use it anymore.
     import inspect
     lgr.warning('%s still uses Dataset.get_subdatasets(). RF to use `subdatasets` command', inspect.stack()[1][3])
     from datalad.coreapi import subdatasets
     if edges:
         return [(r['parentpath'] if absolute else relpath(r['parentpath'], start=self.path),
                  r['path'] if absolute else relpath(r['path'], start=self.path))
                 for r in subdatasets(
                     dataset=self,
                     fulfilled=fulfilled,
                     recursive=recursive,
                     recursion_limit=recursion_limit,
                     bottomup=True)]
     else:
         return subdatasets(
             dataset=self,
             fulfilled=fulfilled,
             recursive=recursive,
             recursion_limit=recursion_limit,
             bottomup=True,
             result_xfm='{}paths'.format('' if absolute else 'rel'))
Example #3
0
    def _get_dataset_metadata(self):
        """
        Returns
        -------
        dict
          keys are homogenized datalad metadata keys, values are arbitrary
        """
        fpath = opj(self.ds.path, self._dataset_metadata_filename)
        obj = {}
        if exists(fpath):
            obj = jsonload(fpath, fixup=True)
        if 'definition' in obj:
            obj['@context'] = obj['definition']
            del obj['definition']
        obj['@id'] = self.ds.id
        subdsinfo = [{
            # this version would change anytime we aggregate metadata, let's not
            # do this for now
            #'version': sds['revision'],
            'type': sds['type'],
            'name': sds['gitmodule_name'],
        }
            for sds in subdatasets(
                dataset=self.ds,
                recursive=False,
                return_type='generator',
                result_renderer='disabled')
        ]
        if subdsinfo:
            obj['haspart'] = subdsinfo
        superds = self.ds.get_superdataset(registered_only=True, topmost=False)
        if superds:
            obj['ispartof'] = {
                '@id': superds.id,
                'type': 'dataset',
            }

        return obj
Example #4
0
    def _get_dataset_metadata(self):
        """
        Returns
        -------
        dict
          keys are homogenized datalad metadata keys, values are arbitrary
        """
        fpath = opj(self.ds.path, DATASET_METADATA_FILE)
        obj = {}
        if exists(fpath):
            obj = jsonload(fpath, fixup=True)
        if 'definition' in obj:
            obj['@context'] = obj['definition']
            del obj['definition']
        obj['@id'] = self.ds.id
        subdsinfo = [
            {
                # this version would change anytime we aggregate metadata, let's not
                # do this for now
                #'version': sds['revision'],
                'type': sds['type'],
                'name': sds['gitmodule_name'],
            } for sds in subdatasets(dataset=self.ds,
                                     recursive=False,
                                     return_type='generator',
                                     result_renderer='disabled',
                                     on_failure='ignore')
        ]
        if subdsinfo:
            obj['haspart'] = subdsinfo
        superds = self.ds.get_superdataset(registered_only=True, topmost=False)
        if superds:
            obj['ispartof'] = {
                '@id': superds.id,
                'type': 'dataset',
            }

        return obj