コード例 #1
0
ファイル: views.py プロジェクト: carolynjanie/Revenue
 def get(self, request, ftid):
     footprint = get_object_or_404(Footprints, id=ftid)
     try:
         floors = Floors.objects.filter(footprint=footprint)
     except:
         raise CommandError(
             'No Floors registered for this footprint. Kindly register and try again'
         )
     else:
         fls = []
         for floor in floors:
             fls.append({
                 'name': floor.floor_name,
                 'no': floor.floor_no,
                 'rooms': Rooms.objects.filter(floor=floor).count()
             })
         return JsonResponse({'objs': fls})
コード例 #2
0
    def handle(self, app_label=None, revision=None, **kwargs):
        appconfig = self.lookup_app(app_label)

        if ":" in revision:
            raise CommandError("Range revision is not allowed")

        self.stdout.write(
            self.style.SUCCESS("Stamping revision for %s on database %s" %
                               (appconfig.name, appconfig.db.alias)))

        with alembic.context.EnvironmentContext(
                appconfig.config,
                appconfig.script,
                fn=partial(self.stamp, appconfig=appconfig, revision=revision),
                destination_rev=revision,
        ) as context:
            self.run_env(context, appconfig)
コード例 #3
0
    def handle(self, *args, **options):
        if settings.DEBUG:
            self.stdout.write('Start parsing')

            country_parser = CountryParser()
            country_parser.get_countries()
            country_parser.get_alternate_names()
            country_parser.get_alternate_locale_names('ru')

            city_parser = CityParser()
            city_parser.get_cities()
            city_parser.get_alternate_names()
            city_parser.get_alternate_locale_names('ru')

            self.stdout.write('Successful parsing!')
        else:
            raise CommandError('ERROR: Please set debug mode')
コード例 #4
0
    def handle_one_table(self, table_name):
        if table_name not in self.tables_to_export:
            raise CommandError('table "%s" is not supported' % table_name)

        count = 0
        with closing(connection.cursor()) as cursor:
            cursor.execute('SELECT COUNT(*) FROM ' + table_name)
            count, = cursor.fetchone()
        print('processing %s' % table_name, ',', count, 'items')

        sql = 'SELECT * FROM ' + table_name
        filename_csv = settings.MEDIA_ROOT + '/raw/' + table_name + '.csv'
        for offset in range(0, count, self.BATCH_SIZE):
            with closing(connection.cursor()) as cursor:
                cursor.execute(sql + ' LIMIT ' + str(self.BATCH_SIZE) +
                               ' OFFSET ' + str(offset))
                self.dump_cursor(cursor, filename_csv, append=(offset > 0))
コード例 #5
0
    def handle(self, *args, **options):
        options.setdefault('interactive', False)
        database = options.get('database')
        email = options.get('email')
        password = options.get('password')

        if not password or not email:
            raise CommandError("--email and --password are required options")

        user_data = {
            'email': email,
            'password': password,
        }

        exists = self.UserModel._default_manager.db_manager(database).filter(email=email).exists()
        if not exists:
            self.UserModel._default_manager.db_manager(database).create_superuser(**user_data)
コード例 #6
0
ファイル: seed_options.py プロジェクト: uees/tdxStock
    def handle(self, *args, **options):
        if not Option.objects.exists():
            Option.objects.create(name="site_name", value='TdxStock')
            Option.objects.create(name="site_host", value='localhost')
            Option.objects.create(name="site_description", value='沪深股票分析')
            options = [
                Option(name=name, value=None) for name in [
                    'site_seo_description', 'site_keywords',
                    'open_site_comment', 'beian_code', 'gongan_beian_code',
                    'show_gongan_code'
                ]
            ]
            Option.objects.bulk_create(options)
            self.stdout.write(self.style.SUCCESS('填充 options 表成功'))

        else:
            raise CommandError('options 表中已经有数据')
コード例 #7
0
 def execute_deposits(cls):
     pending_deposits = Transaction.objects.filter(
         kind=Transaction.KIND.deposit,
         status=Transaction.STATUS.pending_user_transfer_start,
     )
     try:
         ready_transactions = rdi.poll_pending_deposits(pending_deposits)
     except NotImplementedError as e:
         raise CommandError(e)
     for transaction in ready_transactions:
         try:
             success = execute_deposit(transaction)
         except ValueError as e:
             logger.error(f"poll_pending_transactions: {str(e)}")
             continue
         if success:
             rdi.after_deposit(transaction)
コード例 #8
0
ファイル: base.py プロジェクト: City-of-Lappeenranta/Respa
def select_resources(resources, selected_resources):
    ret = []
    for res_id in selected_resources:
        for res in resources:
            try:
                if int(res_id) == res.id:
                    break
            except ValueError:
                pass
            if res_id == res.principal_email:
                break
            if res_id == res.resource.id:
                break
        else:
            raise CommandError('Resource with ID "%s" not found' % res_id)
        ret.append(res)
    return ret
コード例 #9
0
def render_field_variations(kwargs):
    kwargs["storage"] = get_storage_class(kwargs["storage"])()
    ignore_missing = kwargs.pop("ignore_missing")
    do_render = kwargs.pop("do_render")
    try:
        if callable(do_render):
            kwargs.pop("field_class")
            do_render = do_render(**kwargs)
        if do_render:
            render_variations(**kwargs)
    except FileNotFoundError as e:
        if not ignore_missing:
            print(ignore_missing)
            raise CommandError(
                "Source file was not found, terminating. "
                "Use -i/--ignore-missing to skip this error.") from e
    return kwargs["file_name"]
コード例 #10
0
def create_tables(  # noqa:C901
    command: BaseCommand, datasets: Iterable[Dataset], allow_unmanaged=False
):  # noqa:C901
    """Create tables for all updated datasets.
    This is a separate function to allow easy reuse.
    """
    errors = 0
    command.stdout.write("Creating tables")

    # First create all models. This allows Django to resolve  model relations.
    models = []
    for dataset in datasets:
        models.extend(schema_models_factory(dataset.schema, base_app_name="dso_api.dynamic_api"))

    # Create all tables
    with connection.schema_editor() as schema_editor:
        for model in models:
            # Only create tables if migration is allowed
            # - router allows it (not some external database)
            # - model is managed (not by default)
            # - user overrides this (e.g. developer)
            db_table_name = model._meta.db_table
            router_allows = router.allow_migrate_model(model._meta.app_label, model)
            if not router_allows:
                command.stdout.write(
                    f"  Skipping externally managed table: {db_table_name}"
                )
                continue

            if not allow_unmanaged and not model._meta.can_migrate(connection):
                command.stderr.write(
                    f"  Skipping non-managed model: {model._meta.db_table}"
                )
                continue

            try:
                command.stdout.write(f"* Creating table {model._meta.db_table}")
                with transaction.atomic():
                    schema_editor.create_model(model)
            except (DatabaseError, ValueError) as e:
                command.stderr.write(f"  Tables not created: {e}")
                if not re.search(r'relation "[^"]+" already exists', str(e)):
                    errors += 1

    if errors:
        raise CommandError("Not all tables could be created")
コード例 #11
0
    def handle(self, **options):
        from django.utils import translation
        translation.activate(settings.LANGUAGE_CODE)

        self.backends = options.get('using')
        if not self.backends:
            self.backends = list(haystack_connections.connections_info.keys())

        alias_mappings = []

        # Use a timestamped index instead of the default in settings.
        for backend_name in self.backends:
            connection = haystack_connections[backend_name]
            backend = connection.get_backend()
            record_count = self.get_record_count(backend.conn,
                                                 backend.index_name)
            alias, index_name = self.prepare_backend_index(backend)
            alias_mappings.append((backend, index_name, alias, record_count))

        # Set the alias (from settings) to the timestamped catalog.
        run_attempts = 0
        indexes_pending = {key: '' for key in [x[1] for x in alias_mappings]}
        while indexes_pending and run_attempts < 2:
            run_attempts += 1
            super(Command, self).handle(**options)

            for backend, index, alias, record_count in alias_mappings:
                # Run a sanity check to ensure we aren't drastically changing the
                # index, which could be indicative of a bug.
                if index in indexes_pending and not options.get(
                        'disable_change_limit', False):
                    record_count_is_sane, index_info_string = self.sanity_check_new_index(
                        backend.conn, index, record_count)
                    if record_count_is_sane:
                        self.set_alias(backend, alias, index)
                        indexes_pending.pop(index, None)
                    else:
                        indexes_pending[index] = index_info_string
                else:
                    self.set_alias(backend, alias, index)
                    indexes_pending.pop(index, None)

        if indexes_pending:
            raise CommandError(
                'Sanity check failed for new index(es): {}'.format(
                    indexes_pending))
コード例 #12
0
    def handle(self, *args, **options):
        course_ids_file = options['course_ids_file']
        batch_limit = options['batch_limit']

        if course_ids_file:
            if not os.path.exists(course_ids_file):
                raise CommandError('Pass the correct absolute path to course ids file as --course_ids_file argument.')

            total_courses, failed_courses = self._generate_enrollment_codes_from_file(course_ids_file)
        else:
            total_courses, failed_courses = self._generate_enrollment_codes_from_db(batch_limit)

        if failed_courses:
            logger.error('Completed enrollment codes generation. %d of %d failed.', len(failed_courses), total_courses)
            logger.error('\n'.join(['Failed courses:'] + failed_courses))
        else:
            logger.info('Successfully generated enrollment codes for the batch of %s courses.', total_courses)
コード例 #13
0
    def handle(self, *args, **options):
        programs_config = ProgramsApiConfig.current()
        self.client = Client.objects.get(name=programs_config.OAUTH2_CLIENT_NAME)

        if self.client.user is None:
            msg = (
                'No user is associated with the {} OAuth2 client. '
                'A service user is necessary to make requests to the Programs API. '
                'No tasks have been enqueued. '
                'Associate a user with the client and try again.'
            ).format(programs_config.OAUTH2_CLIENT_NAME)

            raise CommandError(msg)

        self._load_run_modes()

        logger.info('Looking for users who may be eligible for a program certificate.')

        self._load_usernames()

        if options.get('commit'):
            logger.info('Enqueuing program certification tasks for %d candidates.', len(self.usernames))
        else:
            logger.info(
                'Found %d candidates. To enqueue program certification tasks, pass the -c or --commit flags.',
                len(self.usernames)
            )
            return

        succeeded, failed = 0, 0
        for username in self.usernames:
            try:
                award_program_certificates.delay(username)
            except:  # pylint: disable=bare-except
                failed += 1
                logger.exception('Failed to enqueue task for user [%s]', username)
            else:
                succeeded += 1
                logger.debug('Successfully enqueued task for user [%s]', username)

        logger.info(
            'Done. Successfully enqueued tasks for %d candidates. '
            'Failed to enqueue tasks for %d candidates.',
            succeeded,
            failed
        )
コード例 #14
0
 def handle(self, *args, **options):
     # Get the backend to use
     channel_backend = channel_backends[DEFAULT_CHANNEL_BACKEND]
     auto_import_consumers()
     if channel_backend.local_only:
         raise CommandError(
             "You have a process-local channel backend configured, and so cannot run separate interface servers.\n"
             "Configure a network-based backend in CHANNEL_BACKENDS to use this command."
         )
     # Run the interface
     port = options.get("port", None) or 9000
     self.stdout.write(
         "Running Twisted/Autobahn WebSocket interface server")
     self.stdout.write(" Channel backend: %s" % channel_backend)
     self.stdout.write(" Listening on: ws://0.0.0.0:%i" % port)
     WebsocketTwistedInterface(channel_backend=channel_backend,
                               port=port).run()
コード例 #15
0
ファイル: check_blob_logs.py プロジェクト: ye-man/commcare-hq
    def handle(self, files, migrate=False, num_workers=10, **options):
        set_max_connections(num_workers)
        blob_db = get_blob_db()
        if not isinstance(blob_db, MigratingBlobDB):
            raise CommandError(
                "Expected to find migrating blob db backend (got %r)" %
                blob_db)
        old_db = blob_db.old_db
        new_db = blob_db.new_db
        ignored = 0

        try:
            pool = Pool(size=num_workers)
            for filepath in files:
                print("Processing {}".format(filepath))
                with open(filepath, encoding='utf-8') as fh:
                    for line in fh:
                        if not line:
                            continue
                        try:
                            rec = json.loads(line)
                        except ValueError:
                            ignored += 1
                            print(("Ignore {}", line))
                            continue
                        pool.spawn(process, rec, old_db, new_db, migrate)

            print("CTRL+C to abort")
            while not pool.join(timeout=10):
                print("waiting for {} workers to finish...".format(len(pool)))
        except KeyboardInterrupt:
            pass

        if ignored:
            print("Ignored {} malformed records".format(ignored))
        for type_code, stats in sorted(Stats.items.items()):
            try:
                group = BLOB_MIXIN_MODELS[type_code].__name__
            except KeyError:
                group = CODES.name_of(type_code, "type_code %s" % type_code)
            total = stats.new + stats.old + stats.noref + stats.lost
            print("{}: checked {} records".format(group, total))
            print("  Found in new db: {}".format(stats.new))
            print("  Found in old db: {}".format(stats.old))
            print("  Not referenced: {}".format(stats.noref))
            print("  Not found: {}".format(stats.lost))
コード例 #16
0
    def handle(self, *args: Any, **kwargs: str) -> None:
        if settings.WARN_NO_EMAIL:
            raise CommandError("Outgoing email not yet configured, see\n  "
                               "https://zulip.readthedocs.io/en/latest/production/email.html")
        message = ("Success!  If you receive this message, you've "
                   "successfully configured sending email from your "
                   "Zulip server.  Remember that you need to restart "
                   "the Zulip server with /home/zulip/deployments/current/scripts/restart-server "
                   "after changing the settings in /etc/zulip before your changes will take effect.")
        send_mail("Zulip email test", message, FromAddress.SUPPORT, kwargs['email'])
        send_mail("Zulip noreply email test", message, FromAddress.tokenized_no_reply_address(), kwargs['email'])

        if kwargs['managers']:
            mail_managers("Zulip manager email test", "This email was sent to the site managers.")

        if kwargs['admins']:
            mail_admins("Zulip admins email test", "This email was sent to the site admins.")
コード例 #17
0
ファイル: publish.py プロジェクト: legitishan/cc-licenses
    def run_django_distill(self):
        """Outputs static files into the output dir."""
        if not os.path.isdir(settings.STATIC_ROOT):
            e = "Static source directory does not exist, run collectstatic"
            raise CommandError(e)
        output_dir = self.output_dir
        if os.path.isdir(output_dir):
            rmtree(output_dir)
        os.makedirs(output_dir)

        self.stdout.write(f"\n{self.output_dir}")
        save_url_as_static_file(output_dir, "/status/", "status/index.html")
        tbranches = TranslationBranch.objects.filter(complete=False)
        for tbranch_id in tbranches.values_list("id", flat=True):
            save_url_as_static_file(
                output_dir,
                f"/status/{tbranch_id}/",
                f"status/{tbranch_id}.html",
            )

        legalcodes = LegalCode.objects.validgroups()
        for group in legalcodes.keys():
            self.stdout.write(f"\n{self.output_dir}")
            for legalcode in legalcodes[group]:
                # deed
                filepath, symlinks = legalcode.get_file_and_links("deed")
                save_url_as_static_file(
                    output_dir,
                    legalcode.deed_url,
                    filepath,
                )
                for symlink in symlinks:
                    relative_symlink(output_dir, filepath, symlink)
                # legalcode
                filepath, symlinks = legalcode.get_file_and_links("legalcode")
                save_url_as_static_file(
                    output_dir,
                    legalcode.license_url,
                    filepath,
                )
                for symlink in symlinks:
                    relative_symlink(output_dir, filepath, symlink)

        self.stdout.write(f"\n{self.output_dir}")
        save_url_as_static_file(output_dir, reverse("metadata"),
                                "licenses/metadata.yaml")
コード例 #18
0
    def handle(self, *args, **options):
        site_id = options.get('site_id')
        site_domain = options.get('site_domain')
        site_name = options.get('site_name')

        enable_facebook_sharing = options.get('enable_facebook_sharing')
        facebook_app_id = options.get('facebook_app_id')
        if enable_facebook_sharing and not facebook_app_id:
            raise CommandError(
                'A Facebook app ID must be supplied to enable Facebook sharing'
            )

        try:
            site = Site.objects.get(id=site_id)
        except Site.DoesNotExist:
            site, site_created = Site.objects.get_or_create(domain=site_domain)
            if site_created:
                logger.info('Created Site [%d] with domain [%s]', site.id,
                            site.domain)

        site.domain = site_domain
        site.name = site_name
        site.save()

        SiteConfiguration.objects.update_or_create(
            site=site,
            defaults={
                'platform_name': options.get('platform_name'),
                'lms_url_root': options.get('lms_url_root'),
                'catalog_api_url': options.get('catalog_api_url'),
                'tos_url': options.get('tos_url'),
                'privacy_policy_url': options.get('privacy_policy_url'),
                'homepage_url': options.get('homepage_url'),
                'company_name': options.get('company_name'),
                'certificate_help_url': options.get('certificate_help_url'),
                'records_help_url': options.get('records_help_url'),
                'twitter_username': options.get('twitter_username'),
                'enable_linkedin_sharing':
                options.get('enable_linkedin_sharing'),
                'enable_twitter_sharing':
                options.get('enable_twitter_sharing'),
                'enable_facebook_sharing': enable_facebook_sharing,
                'facebook_app_id': facebook_app_id,
                'segment_key': options.get('segment_key'),
                'theme_name': options.get('theme_name').lower(),
            })
コード例 #19
0
    def get_dmd_data_path(self):
        """Return path to most recent directory of unzipped dm+d data, without
        the trailing slash.

        It expects to find this at data/dmd/[datestamp]/nhsbsa_dmd_[release].
        """

        # The extra slash ('') at the end of glob_pattern is to ensure we don't
        # capture any .zip files.
        glob_pattern = os.path.join(settings.PIPELINE_DATA_BASEDIR, "dmd", "*",
                                    "nhsbsa_dmd_*", "")
        paths = sorted(glob.glob(glob_pattern))
        if not paths:
            raise CommandError("No dmd data found")

        # Remove the extra slash.
        return paths[-1][:-1]
コード例 #20
0
ファイル: generate_admin.py プロジェクト: Elbar/djazz
    def handle(self, *args, **options):
        package = options['app_name']
        models_names = list(a.name for a in pyclbr.readmodule(package + '.models').values())

        package_dir = os.path.join(os.getcwd(), package)
        admin_skeleton = open(os.path.join(akoikelov.djazz.__path__[0], 'conf', ) + '/tpl/admin.py-tpl').read()
        admin_file_resource = open(package_dir + '/admin.py', 'a')

        if not os.path.exists(package_dir):
            raise CommandError('Given package %s doesn\'t exist!' % package)

        for m in models_names:
            generator = AdminGenerator(m, admin_file_resource, admin_skeleton, package)
            generator.generate()

        admin_file_resource.close()
        self.stdout.write(self.style.SUCCESS('Admin classes for models %s successfully generated!' % models_names))
コード例 #21
0
    def _report(self, allowed_checksums):
        if allowed_checksums:
            allowed_checksums = allowed_checksums.split(",")
            if "sha256" not in allowed_checksums:
                raise CommandError(_("Checksums must contain sha256"))
        else:
            allowed_checksums = settings.ALLOWED_CONTENT_CHECKSUMS

        forbidden_checksums = set(
            constants.ALL_KNOWN_CONTENT_CHECKSUMS).difference(
                allowed_checksums)

        self.stderr.write(
            _("Warning: the handle-artifact-checksums report is in "
              "tech preview and may change in the future."))
        self._show_on_demand_content(forbidden_checksums)
        self._show_immediate_content(forbidden_checksums)
コード例 #22
0
    def handle(self, check_app_name=None, **options):
        runner = simple.DjangoTestSuiteRunner(verbosity=0)
        err_msg = "Failed to migrate %s; see output for hints at missing dependencies:\n"
        hacks.patch_flush_during_test_db_creation()
        failures = 0
        if check_app_name is None:
            app_names = settings.INSTALLED_APPS
        else:
            app_names = [check_app_name]
        for app_name in app_names:
            app_label = app_name.split(".")[-1]
            if app_name == 'south':
                continue

            try:
                Migrations(app_name)
            except (NoMigrations, ImproperlyConfigured):
                continue
            app = loading.get_app(app_label)

            verbosity = int(options.get('verbosity', 1))
            if verbosity >= 1:
                self.stderr.write("processing %s\n" % app_name)

            old_config = runner.setup_databases()
            try:
                call_command('migrate',
                             app_label,
                             noinput=True,
                             verbosity=verbosity)
                for model in loading.get_models(app):
                    dummy = model._default_manager.exists()
            except (KeyboardInterrupt, SystemExit):
                raise
            except Exception as e:
                failures += 1
                if verbosity >= 1:
                    self.stderr.write(err_msg % app_name)
                    self.stderr.write("%s\n" % e)
            finally:
                runner.teardown_databases(old_config)
        if failures > 0:
            raise CommandError("Missing depends_on found in %s app(s)." %
                               failures)
        self.stderr.write("No missing depends_on found.\n")
コード例 #23
0
    def handle(self, *args, **options):
        super(Command, self).handle(*args, **options)
        both_list_and_endpoints = options.get("doc_id") is not None and (
            options.get("start_id") is not None
            or options.get("end_id") is not None
            or options.get("filed_after") is not None
        )
        no_option = not any(
            [
                options.get("doc_id") is None,
                options.get("start_id") is None,
                options.get("end_id") is None,
                options.get("filed_after") is None,
                options.get("all") is False,
            ]
        )
        if both_list_and_endpoints or no_option:
            raise CommandError(
                "Please specify either a list of documents, a "
                "range of ids, a range of dates, or "
                "everything."
            )

        self.index = options["index"]

        # Use query chaining to build the query
        query = Opinion.objects.all().order_by("pk")
        if options.get("doc_id"):
            query = query.filter(pk__in=options["doc_id"])
        if options.get("end_id"):
            query = query.filter(pk__lte=options["end_id"])
        if options.get("start_id"):
            query = query.filter(pk__gte=options["start_id"])
        if options.get("filed_after"):
            query = query.filter(
                cluster__date_filed__gte=options["filed_after"]
            )
        if options.get("all"):
            query = Opinion.objects.all()
        self.count = query.count()
        self.average_per_s = 0
        self.timings = []
        opinion_pks = query.values_list("pk", flat=True).iterator()
        self.update_documents(opinion_pks, options["queue"])
        self.add_to_solr(options["queue"])
コード例 #24
0
    def _update(self, models, options):
        """
        Update indices with sanity check.

        Will be created a new index and populate with data.
        The index will be masked with previous one to prevent missing data.
        """

        alias_mappings = []
        for document in registry.get_documents(models):
            # pylint: disable=protected-access
            index = document._index
            record_count = self.get_record_count(document)
            alias, new_index_name = self.prepare_backend_index(index)
            alias_mappings.append(AliasMapper(document, index, new_index_name, alias, record_count))
        # Set the alias (from settings) to the timestamped catalog.
        run_attempts = 0
        indexes_pending = {key: '' for key in [x.new_index_name for x in alias_mappings]}
        conn = get_connection()
        while indexes_pending and run_attempts < 1:  # Only try once, as retries gave buggy results. See VAN-391
            run_attempts += 1
            self._populate(models, options)
            for doc, __, new_index_name, alias, record_count in alias_mappings:
                # Run a sanity check to ensure we aren't drastically changing the
                # index, which could be indicative of a bug.
                if new_index_name in indexes_pending and not options.get('disable_change_limit', False):
                    record_count_is_sane, index_info_string = self.sanity_check_new_index(
                        run_attempts, doc, new_index_name, record_count
                    )
                    if record_count_is_sane:
                        ElasticsearchUtils.set_alias(conn, alias, new_index_name)
                        indexes_pending.pop(new_index_name, None)
                    else:
                        indexes_pending[new_index_name] = index_info_string
                else:
                    ElasticsearchUtils.set_alias(conn, alias, new_index_name)
                    indexes_pending.pop(new_index_name, None)

        for index_alias_mapper in alias_mappings:
            index_alias_mapper.registered_index._name = index_alias_mapper.alias  # pylint: disable=protected-access

        if indexes_pending:
            raise CommandError('Sanity check failed for the new index(es): {}'.format(indexes_pending))

        return True
コード例 #25
0
    def handle(self, *args, **options):

        freeze_name = options["freeze"]
        dummy = options["dummy"]

        if not dummy:
            self.prepare_harvest(freeze_name)

        harvest_queryset = EdurepHarvest.objects.filter(
            freeze__name=freeze_name, stage=HarvestStages.NEW)
        if not harvest_queryset.exists():
            raise EdurepHarvest.DoesNotExist(
                f"There are no NEW EdurepHarvest objects for '{freeze_name}'")

        self.header("EDUREP SEEDS HARVEST", options)

        # Calling the Edurep OAI-PMH interface and get the Edurep meta data about learning materials
        self.info("Fetching metadata for sources ...")
        send_config = create_config("http_resource", {
            "resource": "edurep.EdurepOAIPMH",
            "continuation_limit": 1000,
        })
        current_time = now()
        successes = defaultdict(int)
        fails = defaultdict(int)
        for harvest in self.progress(harvest_queryset,
                                     total=harvest_queryset.count()):
            set_specification = harvest.source.collection_name
            scc, err = send(set_specification,
                            f"{harvest.latest_update_at:%Y-%m-%d}",
                            config=send_config,
                            method="get")
            if len(err):
                raise CommandError(
                    "Failed to harvest seeds from Edurep OAI-PMH")
            successes[set_specification] += len(scc)
            fails[set_specification] += len(err)
            if not dummy:
                harvest.harvested_at = current_time
                harvest.save()
        self.info('Failed OAI-PMH calls: ', fails)
        self.info('Successful OAI-PMH calls: ', successes)
        success_count = sum(successes.values())
        fail_count = sum(fails.values())
        return f'OAI-PMH: {success_count}/{success_count+fail_count}'
コード例 #26
0
ファイル: initial_data.py プロジェクト: wayneburlingame/ralph
    def handle(self, *args, **options):
        try:
            parent_network = ipaddress.ip_network(
                options.get('parent_network'))
            dc_name = options.get('dc_name')
            server_room_name = options.get('server_room_name')
            network_address = parent_network.network_address
            dns_1 = ipaddress.ip_address(options.get('dns_1'))
            dns_2 = ipaddress.ip_address(options.get('dns_2'))
            number_of_subnets = int(options.get('number_of_subnets'))
            region = options.get('region')
            configuration_path = options.get('configuration_path')
        except ValueError as e:
            raise CommandError(e)

        self._validate_network(parent_network, number_of_subnets)
        self._validate_configuration_path(configuration_path)

        self.create_users(region)
        self.create_configuration_path(configuration_path)
        TransitionsCommand.create_data_center_asset_transitions()

        NetworkCommand.create_network(dc_name=dc_name,
                                      dns1_address=dns_1,
                                      dns2_address=dns_2,
                                      gateway_address=network_address + 1,
                                      network=parent_network,
                                      server_room_name=server_room_name)
        for _ in range(0, number_of_subnets):
            network = ipaddress.ip_network('{}/{}'.format(network_address, 24))
            NetworkCommand.create_network(dc_name=dc_name,
                                          dns1_address=dns_1,
                                          dns2_address=dns_2,
                                          gateway_address=network_address + 1,
                                          network=network,
                                          server_room_name=server_room_name,
                                          create_rack=True)
            network_address += 256

        for name in ["A", "B", "C"]:
            ServerModelCommand.create_model(model_name="Model {}".format(name))
            ServerModelCommand.create_model(
                model_name="Blade server model {}".format(name), is_blade=True)

        call_command('sitetree_resync_apps')
コード例 #27
0
    def handle(self, *args, **options):
        after = date_argument(options['after'])
        before = date_argument(options['before'])
        if after and before and before <= after:
            raise CommandError('"--before" must be after "--after"')

        record_type = options['type']
        serialiser: Serialiser = Serialiser.serialisers[record_type]()

        with open(options['path'], 'wt') as jsonl_file:
            records = serialiser.get_modified_records(after, before)

            for record in records:
                jsonl_file.write(
                    json.dumps(serialiser.serialise(record),
                               default=str,
                               ensure_ascii=False))
                jsonl_file.write('\n')
    def handle(self, *args, **options):
        try:
            n_params_func = inspect.signature(load_data).parameters
            params = {
                k: v
                for k, v in options.items() if k in n_params_func and v
            }

            LOGGER_INFO.debug(params)
            load_data(**params)
        except Exception:
            LOGGER.exception(f'Error al ejecutar el {self.nom_proc}')
            raise CommandError(f'El {self.nom_proc} no se ha podido efectuar')
        else:
            LOGGER_INFO.info(
                msg_info :=
                f'{self.nom_proc.capitalize()} efectuado correctamente')
            self.stdout.write(self.style.SUCCESS(msg_info))
コード例 #29
0
    def handle(self, export_file, config_file, domain, user_id, **options):
        start = datetime.utcnow()

        if '@' in user_id:
            user = WebUser.get_by_username(user_id)
        else:
            user = WebUser.get(user_id)
        if not user.is_member_of(domain):
            raise CommandError("%s can't access %s" % (user, domain))

        with open(config_file, 'r', encoding='utf-8') as f:
            config = ImporterConfig.from_json(f.read())

        config.couch_user_id = user._id
        with get_spreadsheet(export_file) as spreadsheet:
            print(json.dumps(do_import(spreadsheet, config, domain),
                             default=json_handler))
            print('finished in %s seconds' % (datetime.utcnow() - start).seconds)
コード例 #30
0
    def handle(self, *args, **options):
        username = options.get('username')
        password = options.get('password')
        email = options.get('email')

        database = DEFAULT_DB_ALIAS
        if not password or not username:
            raise CommandError("--username and --password are required")

        data = {'username': username,'password': password,'email': email}
        try:
            self.UserModel._default_manager.db_manager(database).create_superuser(**data)
        except django.db.utils.IntegrityError:
            user = self.UserModel._default_manager.db_manager(database).get(username=username)
            user.set_password(password)
            if email:
                user.email = email
            user.save()