def handle(self, *app_labels, **options): ser_format = options.get('format') outfilename = options.get('output') if outfilename is None: raise CommandError( 'No --outfile specified (this is a required option)') self.target_dir = join(dirname(abspath(outfilename)), 'media') for modelclass in models_with_filefields(): pre_dump.connect(self.save_images_for_signal, sender=modelclass) self.set_up_serializer(ser_format) with File(open(outfilename, 'w')) as self.stdout: super(Command, self).handle(*app_labels, **options)
def parse_data(self, object_generator, total_rows): if self.progress: pbar = self.__initialize_progress_bar(total_rows) for index, raw_row in enumerate(object_generator): if len(raw_row) != len(self.fields): raise CommandError('Incorrect parsed file! Stopping parsing! {} != {}'.format(len(raw_row),len(self.fields))) if self.is_csv: row = map(lambda s: s.strip(), raw_row) else: row = raw_row # TODO: invent great way to ignore last row when there is header if self.header and not self.is_csv and index == total_rows: continue self.process_row(row, index) if self.progress: pbar.update(index + 1)
def handle(self, **options): if not settings.DEBUG: raise CommandError("This command cannot be run in production environment") models = options["models"] if not models: for app in apps.get_app_configs(): self.truncate_app(app) else: for name in models: if "." in name: app_label, model_name = name.split(".", 1) model = apps.get_model(app_label, model_name) self.truncate_model(model) else: app = apps.get_app_config(name) self.truncate_app(app)
def handle(self, *args, **options): # Get the backend to use channel_backend = channel_backends[DEFAULT_CHANNEL_BACKEND] if channel_backend.local_only: raise CommandError( "You have a process-local channel backend configured, and so cannot run separate interface servers.\n" "Configure a network-based backend in CHANNEL_BACKENDS to use this command." ) # Run the interface port = int(options.get("port", None) or 8000) from channels.interfaces.http_twisted import HttpTwistedInterface self.stdout.write( "Running twisted/Autobahn HTTP & WebSocket interface server") self.stdout.write(" Channel backend: %s" % channel_backend) self.stdout.write(" Listening on: 0.0.0.0:%i" % port) HttpTwistedInterface(channel_backend=channel_backend, port=port).run()
def get_pk_from_class(model_class): """ :param model_class: :return from pprint import pprint pprint(vars(model_class._meta)) """ pks = [] for field in model_class._meta.fields: if field.primary_key: pks.append(field.name) if pks.__len__() == 0: raise CommandError( "Model class {} has no primary key".format(model_class)) return pks
def authenticate(self): """ Authenticate, CitySDK uses session based username/password auth """ username = settings.CITYSDK_API_SETTINGS['USERNAME'] password = settings.CITYSDK_API_SETTINGS['PASSWORD'] # noinspection PyUnusedLocal session_response = requests.get('%sauth?username=%s&password=%s' % (BASE_API_URL, username, password)) if session_response.status_code == 200: self.session_cookies = session_response.cookies print("Authentication successful with response: %s" % session_response.text) else: raise CommandError("Authentication failed with credentials %s:%s" % ((username, password)))
def handle(self, migrator_slug, initial=None, continuous=None, cleanup=None, stats=None, erase_continuous_progress=None, **options): try: migrator = get_migrator_by_slug(migrator_slug) except KeyError: raise CommandError(USAGE) if not any((initial, continuous, cleanup, stats, erase_continuous_progress)): raise CommandError('initial, continuous, cleanup, stats, or ' 'erase_continuous_progress must be set') if cleanup and (initial or continuous): raise CommandError('cleanup must be run alone') if stats: self.handle_stats(migrator) if initial: if migrator.last_seq: raise CommandError(MAYBE_YOU_WANT_TO_RUN_CONTINUOUS.format(migrator_slug)) self.handle_initial(migrator) if erase_continuous_progress: if not migrator.original_seq: CommandError(MAYBE_YOU_WANT_TO_RUN_INITIAL.format(migrator_slug)) if migrator.cleanup_complete: raise CommandError(CANNOT_RUN_CONTINUOUS_AFTER_CLEANUP) self.handle_erase_continuous_progress(migrator) if continuous: if not migrator.last_seq: raise CommandError(MAYBE_YOU_WANT_TO_RUN_INITIAL.format(migrator_slug)) if migrator.cleanup_complete: raise CommandError(CANNOT_RUN_CONTINUOUS_AFTER_CLEANUP) self.handle_continuous(migrator) if cleanup: confirmation = input( "Cleanup will remove doc_types ({}) from db {}\n" "I recommend running './manage.py delete_doc_conflicts' " "first or some docs might not actually be deleted.\n" "Are you sure you want to proceed? [y/n]" .format(', '.join(migrator.doc_types), migrator.source_db)) if confirmation == 'y': if migrator.docs_are_replicating(): self.stdout.write( "It looks like replication is still happening, please track " "down and cancel before attempting to cleanup, lest you " "replicate the deletions. Yikes!") return self.handle_cleanup(migrator)
def get_project_directory(self, name, target): """ This code is copied verbatim from Django's TemplateCommand.handle(), but with the directory creation code removed. """ # if some directory is given, make sure it's nicely expanded. if target is None: top_dir = os.path.join(os.getcwd(), name) else: top_dir = os.path.abspath(os.path.expanduser(target)) if not os.path.exists(top_dir): raise CommandError("Destination directory '%s' does not " "exist, please create it first." % top_dir) return top_dir
def _update_index(self, models): """Order to update index.""" selected_models = [] for input_model in models: try: app_name, model_name = input_model.split(".") model_class = get_model(app_name, model_name) selected_models.append(model_class) except (ValueError, LookupError): raise CommandError("Model not found for indexing") restrict_to = selected_models if selected_models else None self.stdout.write('Initializing index...', ending='') indexer.initialize_index() self.stdout.write(' OK') indexer.update_index(stdout=self.stdout, restrict_to=restrict_to)
def handle_user_login(self, *args, **options): password = '******' username = '******' email = '*****@*****.**' if password and not username: raise CommandError("--username is required") if username: exists = User.objects.filter(username=username).exists() if exists: self.stdout.write("User exists, Try again") return user = User.objects.create_user(username, email, password) user.save()
def handle(self, *args, **options): oldest_supported = options.get('oldest_supported') token = settings.GITHUB_API_TOKEN base_url = settings.GITHUB_API_BASE_URL client = GitHubClient(base_url, token) try: releases = get_supported_releases(client, oldest_supported) except requests.HTTPError as e: raise CommandError('Could not get releases: ' + str(e)) if options['print']: for release in releases: self.stdout.write(release) else: sync_releases(releases)
def handle(self, *args, **options): """Main logic for the actual command.""" sso_email_user_id = options['sso_email_user_id'] token = options['token'] or token_urlsafe() hours = options['hours'] timeout = hours * 60 * 60 try: adviser = Advisor.objects.get(sso_email_user_id=sso_email_user_id) except Advisor.DoesNotExist: raise CommandError(f'No adviser with SSO email user ID {sso_email_user_id} found.') add_token_data_to_cache(token, adviser.email, adviser.sso_email_user_id, timeout) msg = f'The token {token} was successfully added and will expire in {hours} hours.' return self.style.SUCCESS(msg)
def __process_options(self, **options): password = options.get('password') username = options.get('username') database = options.get('database') if password and not username: raise CommandError( "--username is required if specifying --password") if username and options.get('preserve') \ and self.UserModel._default_manager.db_manager(database).filter(username=username).exists(): self.stdout.write( "User exists, exiting normally due to --preserve") return False return True
def handle(self, *args, **kwargs): if os.environ['DJANGO_SETTINGS_MODULE'] != \ 'openprescribing.settings.e2etest': raise CommandError('Command must run with e2etest settings') try: run_end_to_end() except: import traceback msg = 'End-to-end test failed (seed: %s)\n\n' % settings.BQ_NONCE msg += traceback.format_exc() notify_slack(msg) raise msg = 'Pipeline tests ran to completion (seed: %s)' % settings.BQ_NONCE notify_slack(msg)
def handle(self, *args, **kwargs): if Order.objects.count() > 0: raise CommandError('The database is already populated') users = [ factories.create_user(name) for name in ['Alice', 'Beatrice', 'Benedict'] ] with override_settings( EMAIL_BACKEND='django.core.mail.backends.dummy.EmailBackend'): factories.create_ticket(users[0], num_days=5) factories.create_pending_order_for_others(users[0]) factories.create_confirmed_order_for_self_and_others( users[1], rate='corporate') factories.create_confirmed_order_for_self(users[2], num_days=5)
def _minify(config, verbose=False): modules = config['modules'] if verbose: modules = with_progress_bar(modules, prefix="Minifying", oneline=False) else: print("Minifying Javascript bundles (estimated wait time: 5min)") for module in modules: rel_path = Path(module['name'] + ".js") path = os.path.join(ROOT_DIR, 'staticfiles', rel_path) ret = call([ "node", "node_modules/uglify-js/bin/uglifyjs", path, "--compress", "--mangle", "--output", path, "--source-map", f"url={rel_path.name}.map" ]) if ret: raise CommandError(f"Failed to minify {rel_path}")
def handle(self, *args, **options): password = options.get("password") username = options.get("username") database = options.get("database") if password and not username: raise CommandError("--username is required if specifying --password") super().handle(*args, **options) if password: user = self.UserModel._default_manager.db_manager(database).get( username=username ) user.set_password(password) user.save()
def handle(self, *args, **options): if not (options.get('file')): raise CommandError('File (--file) argument must be specified!') try: path_to_json_file = options.get('file') with open(path_to_json_file, mode="w+", encoding='utf-8') as json_file: exp = self.get_exporter() exp.export_to_file(json_file) self.stdout.write(self.style.SUCCESS("Export complete!")) except Exception as e: msg = f"Something went wrong during the export ({__file__}:class {self.__class__.__name__})! Details:" self.stderr.write(self.style.ERROR(msg)) self.stderr.write(self.style.ERROR(str(e)))
def set_up_db(): """Set up the in-memory database so that we can avoid clobbering existing data. Clear the cached database connection, set up connection to in-memory sqlite3 database, and migrate.""" databases = connections.databases assert databases["default"]["ENGINE"] == "django.db.backends.sqlite3" databases["default"]["NAME"] = ":memory:" del connections.databases connections.__init__(databases=databases) call_command("migrate") # This is a belt-and-braces check to ensure that the above hackery has worked. if Concept.objects.count() > 0: raise CommandError("Must be run against empty database")
def get_instrument_type_value(instrument_type_name): """ Searching an instrument type within the choices of InstrumentType. :param instrument_type_name: A string represents the instrument type. :return: Returns the value of instrument_type_name if exists. """ for value, label in InstrumentType.choices: if label == instrument_type_name: return value raise CommandError( 'Instrument type "{}" not found'.format(instrument_type_name))
def handle(self, *args, tour, **options): if tour == 1: df = self.import_tour_1() elif tour == 2: df = self.import_tour_2() else: raise CommandError("Il n'y a que deux tours de scrutin.") df["insee"] = df["departement"].astype(str).str.zfill( 2) + df["commune"].str.zfill(3) df["liste_code"] = (df["insee"] + "-" + df["liste_numero"].astype(str).str.zfill(2)) df["conseil_communautaire"] = df["conseil_communautaire"] == "O" listes = (df[["liste_code", "insee", "liste_nom", "nuance"]].drop_duplicates().set_index(["liste_code"])) listes["nuance"] = ( listes["nuance"].cat.remove_unused_categories().cat.add_categories( [""]).fillna("")) listes["candidats_noms"] = df.groupby(["liste_code" ])["candidat_nom"].agg(list) listes["candidats_prenoms"] = df.groupby( ["liste_code"])["candidat_prenom"].agg(list) listes["candidats_communautaire"] = df.groupby( ["liste_code"])["conseil_communautaire"].agg(list) communes_ids = { c["code"]: c["id"] for c in CommunePage.objects.filter( code__in=listes["insee"].unique()).values("id", "code") } for l in tqdm(listes.itertuples(), total=len(listes)): Liste.objects.update_or_create( code=l.Index, tour=tour, defaults={ "nom": l.liste_nom, "nuance": l.nuance, "candidats_noms": l.candidats_noms, "candidats_prenoms": l.candidats_prenoms, "candidats_communautaire": l.candidats_communautaire, "commune_id": communes_ids.get(l.insee), }, )
def handle(self, *args, **options): site_id = options.get("site_id") site_domain = options.get("site_domain") site_name = options.get("site_name") enable_facebook_sharing = options.get("enable_facebook_sharing") facebook_app_id = options.get("facebook_app_id") if enable_facebook_sharing and not facebook_app_id: raise CommandError( "A Facebook app ID must be supplied to enable Facebook sharing" ) try: site = Site.objects.get(id=site_id) except Site.DoesNotExist: site, site_created = Site.objects.get_or_create(domain=site_domain) if site_created: logger.info("Created Site [%d] with domain [%s]", site.id, site.domain) site.domain = site_domain site.name = site_name site.save() SiteConfiguration.objects.update_or_create( site=site, defaults={ "platform_name": options.get("platform_name"), "lms_url_root": options.get("lms_url_root"), "catalog_api_url": options.get("catalog_api_url"), "tos_url": options.get("tos_url"), "privacy_policy_url": options.get("privacy_policy_url"), "homepage_url": options.get("homepage_url"), "company_name": options.get("company_name"), "certificate_help_url": options.get("certificate_help_url"), "records_help_url": options.get("records_help_url"), "twitter_username": options.get("twitter_username"), "enable_linkedin_sharing": options.get("enable_linkedin_sharing"), "enable_twitter_sharing": options.get("enable_twitter_sharing"), "enable_facebook_sharing": enable_facebook_sharing, "facebook_app_id": facebook_app_id, "segment_key": options.get("segment_key"), "theme_name": options.get("theme_name").lower(), }, )
def _report(self, allowed_checksums): if allowed_checksums: allowed_checksums = allowed_checksums.split(",") if "sha256" not in allowed_checksums: raise CommandError(_("Checksums must contain sha256")) else: allowed_checksums = settings.ALLOWED_CONTENT_CHECKSUMS forbidden_checksums = set( constants.ALL_KNOWN_CONTENT_CHECKSUMS).difference( allowed_checksums) self.stderr.write( _("Warning: the handle-artifact-checksums report is in " "tech preview and may change in the future.")) self._show_on_demand_content(forbidden_checksums) self._show_immediate_content(forbidden_checksums)
def handle(self, *args, **options): """ This is the base function of this child Command class Through which class only all the actions to be performed :param args: :param options: :return: """ print('Sending incomplete registration partner notifications!') try: from background_scripts.partner.partner_registration_incomplete import PartnerRegistrationIncomplete notify_to_incomplete_partner = PartnerRegistrationIncomplete() notify_to_incomplete_partner.perform_actions() except Exception: raise CommandError( 'Sending incomplete registration partner notifications failed!' )
def handle(self, *args, **options): username = options['username'] password = options['password'] email = None if User.objects.filter(username=username).exists(): raise CommandError("This username is exists check user name") else: user = User.objects.create_superuser( username=username, email=email, password=password, ) user.save() self.stdout.write(self.style.SUCCESS('Successfully created superuser'))
def handle_one_table(self, table_name): if table_name not in self.tables_to_export: raise CommandError('table "%s" is not supported' % table_name) count = 0 with closing(connection.cursor()) as cursor: cursor.execute('SELECT COUNT(*) FROM ' + table_name) count, = cursor.fetchone() print('processing %s' % table_name, ',', count, 'items') sql = 'SELECT * FROM ' + table_name filename_csv = settings.MEDIA_ROOT + '/raw/' + table_name + '.csv' for offset in range(0, count, self.BATCH_SIZE): with closing(connection.cursor()) as cursor: cursor.execute(sql + ' LIMIT ' + str(self.BATCH_SIZE) + ' OFFSET ' + str(offset)) self.dump_cursor(cursor, filename_csv, append=(offset > 0))
def get_dmd_data_path(self): """Return path to most recent directory of unzipped dm+d data, without the trailing slash. It expects to find this at data/dmd/[datestamp]/nhsbsa_dmd_[release]. """ # The extra slash ('') at the end of glob_pattern is to ensure we don't # capture any .zip files. glob_pattern = os.path.join(settings.PIPELINE_DATA_BASEDIR, "dmd", "*", "nhsbsa_dmd_*", "") paths = sorted(glob.glob(glob_pattern)) if not paths: raise CommandError("No dmd data found") # Remove the extra slash. return paths[-1][:-1]
def render_field_variations(kwargs): kwargs["storage"] = get_storage_class(kwargs["storage"])() ignore_missing = kwargs.pop("ignore_missing") do_render = kwargs.pop("do_render") try: if callable(do_render): kwargs.pop("field_class") do_render = do_render(**kwargs) if do_render: render_variations(**kwargs) except FileNotFoundError as e: if not ignore_missing: print(ignore_missing) raise CommandError( "Source file was not found, terminating. " "Use -i/--ignore-missing to skip this error.") from e return kwargs["file_name"]
def select_resources(resources, selected_resources): ret = [] for res_id in selected_resources: for res in resources: try: if int(res_id) == res.id: break except ValueError: pass if res_id == res.principal_email: break if res_id == res.resource.id: break else: raise CommandError('Resource with ID "%s" not found' % res_id) ret.append(res) return ret
def run_django_distill(self): """Outputs static files into the output dir.""" if not os.path.isdir(settings.STATIC_ROOT): e = "Static source directory does not exist, run collectstatic" raise CommandError(e) output_dir = self.output_dir if os.path.isdir(output_dir): rmtree(output_dir) os.makedirs(output_dir) self.stdout.write(f"\n{self.output_dir}") save_url_as_static_file(output_dir, "/status/", "status/index.html") tbranches = TranslationBranch.objects.filter(complete=False) for tbranch_id in tbranches.values_list("id", flat=True): save_url_as_static_file( output_dir, f"/status/{tbranch_id}/", f"status/{tbranch_id}.html", ) legalcodes = LegalCode.objects.validgroups() for group in legalcodes.keys(): self.stdout.write(f"\n{self.output_dir}") for legalcode in legalcodes[group]: # deed filepath, symlinks = legalcode.get_file_and_links("deed") save_url_as_static_file( output_dir, legalcode.deed_url, filepath, ) for symlink in symlinks: relative_symlink(output_dir, filepath, symlink) # legalcode filepath, symlinks = legalcode.get_file_and_links("legalcode") save_url_as_static_file( output_dir, legalcode.license_url, filepath, ) for symlink in symlinks: relative_symlink(output_dir, filepath, symlink) self.stdout.write(f"\n{self.output_dir}") save_url_as_static_file(output_dir, reverse("metadata"), "licenses/metadata.yaml")