def init(): '''Initialize or update data and indexes''' log.info('Apply DB migrations if needed') migrate(record=True) log.info('Initialize or update ElasticSearch mappings') es.initialize() log.info('%s: Feed initial data if needed', yellow('TODO')) log.info('%s: Create an administrator', yellow('TODO'))
def status_label(record): if record.ok: return green(record.last_date.strftime(DATE_FORMAT)) elif not record.exists(): return yellow('Not applied') else: return red(record.status)
def status(): '''Display the database migrations status''' for plugin, package, filename in available_migrations(): migration = get_migration(plugin, filename) if migration: status = green(migration['date'].strftime(DATE_FORMAT)) else: status = yellow('Not applied') log_status(plugin, filename, status)
def init(): '''Initialize or update data and indexes''' log.info('Initialize or update ElasticSearch mappings') es.initialize() log.info('Build sample fixture data') generate_fixtures() log.info('Apply DB migrations if needed') migrate(record=True) log.info('%s: Feed initial data if needed', yellow('TODO'))
def migrate(record): '''Perform database migrations''' handler = record_migration if record else execute_migration for plugin, package, filename in available_migrations(): migration = get_migration(plugin, filename) if migration: log_status(plugin, filename, cyan('Skipped')) else: status = purple('Recorded') if record else yellow('Apply') log_status(plugin, filename, status) script = resource_string(package, join('migrations', filename)) handler(plugin, filename, script)
def parse_url(url, quiet=False, rid=''): '''Parse the datasets in a DCAT format located at URL (debug)''' if quiet: verbose_loggers = ['rdflib', 'udata.core.dataset'] [logging.getLogger(l).setLevel(logging.ERROR) for l in verbose_loggers] class MockSource: url = '' class MockJob: items = [] class MockDatasetFactory(DatasetFactory): '''Use DatasetFactory without .save()''' @classmethod def _create(cls, model_class, *args, **kwargs): instance = model_class(*args, **kwargs) return instance echo(cyan('Parsing url {}'.format(url))) source = MockSource() source.url = url backend = DcatBackend(source, dryrun=True) backend.job = MockJob() format = backend.get_format() echo(yellow('Detected format: {}'.format(format))) graph = backend.parse_graph(url, format) # serialize/unserialize graph like in the job mechanism _graph = graph.serialize(format=format, indent=None) graph = Graph(namespace_manager=namespace_manager) graph.parse(data=_graph, format=format) for item in backend.job.items: if not rid or rid in item.remote_id: echo(magenta('Processing item {}'.format(item.remote_id))) echo('Item kwargs: {}'.format(yellow(item.kwargs))) node = backend.get_node_from_item(item) dataset = MockDatasetFactory() dataset = dataset_from_rdf(graph, dataset, node=node) echo('') echo(green('Dataset found!')) echo('Title: {}'.format(yellow(dataset))) echo('License: {}'.format(yellow(dataset.license))) echo('Description: {}'.format(yellow(dataset.description))) echo('Tags: {}'.format(yellow(dataset.tags))) echo('Resources: {}'.format(yellow([(r.title, r.format, r.url) for r in dataset.resources]))) try: dataset.validate() except mongoengine.errors.ValidationError as e: log.error(e, exc_info=True) else: echo(green('Dataset is valid ✅')) echo('')
def info(plugin_or_specs, filename): ''' Display detailed info about a migration ''' migration = migrations.get(plugin_or_specs, filename) log_status(migration, status_label(migration.record)) try: echo(migration.module.__doc__) except migrations.MigrationError: echo(yellow('Module not found')) for op in migration.record.get('ops', []): display_op(op)
def migrate(record, dryrun=False): '''Perform database migrations''' handler = record_migration if record else execute_migration success = True for plugin, package, filename in available_migrations(): migration = get_migration(plugin, filename) if migration or not success: log_status(plugin, filename, cyan('Skipped')) else: status = purple('Recorded') if record else yellow('Apply') log_status(plugin, filename, status) script = resource_string(package, join('migrations', filename)) success &= handler(plugin, filename, script, dryrun=dryrun)
def migrate(record, dry_run=False): '''Perform database migrations''' success = True for migration in migrations.list_available(): if migration.record.ok or not success: log_status(migration, cyan('Skipped')) else: status = magenta('Recorded') if record else yellow('Apply') log_status(migration, status) try: output = migration.execute(recordonly=record, dryrun=dry_run) except migrations.RollbackError as re: format_output(re.migrate_exc.output, False) log_status(migration, red('Rollback')) format_output(re.output, not re.exc) success = False except migrations.MigrationError as me: format_output(me.output, False, traceback=me.traceback) success = False else: format_output(output, True) return success