import os from ckan.lib.cli import ( load_config, parse_db_config, paster_click_group, click_config_option, ) from ckanext.datastore.backend.postgres import identifier from ckanext.datastore.controller import DUMP_FORMATS, dump_to import click datastore_group = paster_click_group( summary=u'Perform commands to set up the datastore') @datastore_group.command( u'set-permissions', help=u'Emit an SQL script that will set the permissions for the ' u'datastore users as configured in your configuration file.') @click.help_option(u'-h', u'--help') @click_config_option @click.pass_context def set_permissions(ctx, config): load_config(config or ctx.obj['config']) write_url = parse_db_config(u'ckan.datastore.write_url') read_url = parse_db_config(u'ckan.datastore.read_url') db_url = parse_db_config(u'sqlalchemy.url')
import click from ckan.lib.cli import ( load_config, paster_click_group, click_config_option, ) from ckanext.matomo import commands matomo = paster_click_group(summary=u'Matomo related commands') @matomo.command(u'fetch', help='Fetches data from Matomo to local database') @click_config_option @click.pass_context @click.option(u'--dryrun', is_flag=True, help="Prints what would be updated without making any changes.") @click.option( u'--since', help= "First date to fetch in YYYY-MM-DD format. Default: latest PackageStats entry date." ) @click.option( u'--until', help="Last date to fetch in YYYY-MM-DD format. Default: current date.") def fetch(ctx, config, dryrun, since, until): load_config(config or ctx.obj['config']) commands.fetch(dryrun, since, until)
import os import shutil try: from pathlib import Path except ImportError: from pathlib2 import Path # python 2 backport from ckan.lib.cli import (load_config, paster_click_group, click_config_option) import click from scripts.prh.get_prh_data import PRHData from scripts.prh.make_csv_of_prh_data import make_csv_of_prh_data prh_group = paster_click_group(summary="Tools for PRH api") @prh_group.command( u'fetch-data', help='Crawls through PRH api and saves responses as parsed json.') @click.argument(u'BASE_DIR', nargs=1, required=True) @click.help_option(u'-h', u'--help') @click_config_option @click.option(u'-y', u'--year', default=1978, type=click.INT) @click.option(u'-s', u'--start-from-beginning', is_flag=True) @click.option(u'-pid', u'--package_id', required=True) @click.pass_context def fetch(ctx, base_dir, year, start_from_beginning, package_id, config): load_config(config or ctx.obj['config']) if Path(os.path.join(base_dir, 'data', 'json', 'prh_data',
if found != len(terms): print "Term not found" return 1 for term in terms: translated.append(('en', term, term)) context = self._create_context() for locale, term, translation in translated: if translation: self._add_term(context, locale, term, translation) ytp_dataset_group = paster_click_group( summary=u'Dataset related commands.' ) @ytp_dataset_group.command( u'migrate_author_email', help=u'Migrates empty author emails that caused problems in updating datasets' ) @click_config_option @click.option(u'--dryrun', is_flag=True) @click.pass_context def migrate_author_email(ctx, config, dryrun): load_config(config or ctx.obj['config']) package_patches = [] for old_package_dict in package_generator('*:*', 1000):
found += 1 if found != len(terms): print "Term not found" return 1 for term in terms: translated.append(('en', term, term)) context = self._create_context() for locale, term, translation in translated: if translation: self._add_term(context, locale, term, translation) ytp_dataset_group = paster_click_group(summary=u'Dataset related commands.') @ytp_dataset_group.command( u'migrate_author_email', help= u'Migrates empty author emails that caused problems in updating datasets') @click_config_option @click.option(u'--dryrun', is_flag=True) @click.pass_context def migrate_author_email(ctx, config, dryrun): load_config(config or ctx.obj['config']) package_patches = [] for old_package_dict in package_generator('*:*', 1000): if old_package_dict.get('author_email') is None: