def make_column(spec): if isinstance(spec, string_types): if spec in TERMS.by_uri: return TERMS.by_uri[spec].to_column() return Column(name=spec, datatype='string') if isinstance(spec, dict): return Column.fromvalue(spec) if isinstance(spec, Column): return spec raise TypeError(spec)
def __str__(self): # We overwrite the base class' method to fix the order of columns. tg = TableGroup.fromvalue(self.MD) for col in sorted( self.column_labels, key=lambda t: (t == IPA_COLUMN, t.lower()), reverse=True): if col != self.GRAPHEME_COL: tg.tables[0].tableSchema.columns.append( Column.fromvalue({"name": col, "null": self.NULL})) return tg.tables[0].write(self.iteritems(), fname=None).decode('utf8').strip()
def create(self, req, filename=None, verbose=True, outfile=None): cldf_cfg = req.registry.getUtility(ICldfConfig) with TemporaryDirectory() as tmpd: cls = getattr(dataset, cldf_cfg.module) ds = cls.in_dir(tmpd) ds.properties['dc:bibliographicCitation '] = text_citation(req, req.dataset) ds.properties['dc:publisher'] = '%s, %s' % ( req.dataset.publisher_name, req.dataset.publisher_place) ds.properties['dc:license'] = req.dataset.license ds.properties['dc:issued'] = req.dataset.published.isoformat() ds.properties['dc:title'] = req.dataset.name ds.properties['dc:creator'] = req.dataset.formatted_editors() ds.properties['dc:identifier'] = req.resource_url(req.dataset) ds.properties['dcat:accessURL'] = req.route_url('download') if DBSession.query(Sentence).count(): ds.add_component('ExampleTable') if DBSession.query(DomainElement).count(): ds.add_component('CodeTable', {'name': 'Number', 'datatype': 'integer'}) ds.add_component('ParameterTable') ds.add_component('LanguageTable') ds.add_table('contributions.csv', 'ID', 'Name', 'Description', 'Contributors') ds.add_columns(ds.primary_table, Column.fromvalue( { 'name': 'Contribution_ID', 'datatype': 'string', 'valueUrl': url_template(req, 'contribution', 'contribution').uri, })) ds.add_foreign_key( ds.primary_table, 'Contribution_ID', 'contributions.csv', 'ID') ds['LanguageTable'].aboutUrl = url_template(req, 'language', 'ID') ds['ParameterTable'].aboutUrl = url_template(req, 'parameter', 'ID') ds[ds.primary_table].aboutUrl = url_template(req, 'value', 'ID') cldf_cfg.custom_schema(req, ds) for src in cldf_cfg.query(Source): ds.sources.add(cldf_cfg.convert(Source, src, req)) fname = outfile or self.abspath(req) transaction.abort() tabledata = defaultdict(list) for table, model in [ ('ParameterTable', Parameter), ('CodeTable', DomainElement), ('LanguageTable', Language), ('ExampleTable', Sentence), ('contributions.csv', Contribution), (ds.primary_table, Value), ]: if verbose: print('exporting {0} ...'.format(model)) transaction.begin() for item in cldf_cfg.query(model): tabledata[table].append(cldf_cfg.convert(model, item, req)) transaction.abort() if verbose: print('... done') transaction.begin() ds.write(**cldf_cfg.custom_tabledata(req, tabledata)) ds.validate() shutil.make_archive( fname.parent.joinpath(fname.stem).as_posix(), 'zip', tmpd.as_posix())
def create(self, req, filename=None, verbose=True, outfile=None): cldf_cfg = req.registry.getUtility(ICldfConfig) with TemporaryDirectory() as tmpd: cls = getattr(dataset, cldf_cfg.module) ds = cls.in_dir(tmpd) ds.properties['dc:bibliographicCitation'] = text_citation( req, req.dataset) ds.properties['dc:publisher'] = '%s, %s' % ( req.dataset.publisher_name, req.dataset.publisher_place) ds.properties['dc:license'] = req.dataset.license ds.properties['dc:issued'] = req.dataset.published.isoformat() ds.properties['dc:title'] = req.dataset.name ds.properties['dc:creator'] = req.dataset.formatted_editors() ds.properties['dc:identifier'] = req.resource_url(req.dataset) ds.properties['dcat:accessURL'] = req.route_url('download') if DBSession.query(Sentence).count(): ds.add_component('ExampleTable') if DBSession.query(DomainElement).count(): ds.add_component('CodeTable', { 'name': 'Number', 'datatype': 'integer' }) ds.add_component('ParameterTable') ds.add_component('LanguageTable') ds.add_table('contributions.csv', 'ID', 'Name', 'Description', 'Contributors') ds.add_columns( ds.primary_table, Column.fromvalue({ 'name': 'Contribution_ID', 'datatype': 'string', 'valueUrl': url_template(req, 'contribution', 'contribution').uri, })) ds.add_foreign_key(ds.primary_table, 'Contribution_ID', 'contributions.csv', 'ID') ds['LanguageTable'].aboutUrl = url_template(req, 'language', 'ID') ds['ParameterTable'].aboutUrl = url_template( req, 'parameter', 'ID') ds[ds.primary_table].aboutUrl = url_template(req, 'value', 'ID') cldf_cfg.custom_schema(req, ds) for src in cldf_cfg.query(Source): ds.sources.add(cldf_cfg.convert(Source, src, req)) fname = outfile or self.abspath(req) transaction.abort() tabledata = defaultdict(list) for table, model in [ ('ParameterTable', Parameter), ('CodeTable', DomainElement), ('LanguageTable', Language), ('ExampleTable', Sentence), ('contributions.csv', Contribution), (ds.primary_table, Value), ]: if verbose: print('exporting {0} ...'.format(model)) transaction.begin() for item in cldf_cfg.query(model): tabledata[table].append(cldf_cfg.convert(model, item, req)) transaction.abort() if verbose: print('... done') transaction.begin() ds.write(**cldf_cfg.custom_tabledata(req, tabledata)) ds.validate() shutil.make_archive(str(fname.parent / fname.stem), 'zip', str(tmpd))