Example #1
0
 def handle(self, username, **options):
     traceback = options.get('traceback', False)
     try:
         user = BakeryUser.objects.get(username=username)
         user.is_staff = True
         user.is_superuser = True
         user.save(update_fields=['is_staff', 'is_superuser'])
         self.stdout.write('Updated {0} to superuser status'.format(username))
     except (BakeryUser.DoesNotExist, ValueError) as exc:
         ce = CommandError(exc)
         if traceback:
             tb = sys.exc_info()[2]
             raise ce.with_traceback(tb)
         else:
             raise ce
Example #2
0
 def handle(self, url, *args, **options):
     verbosity = int(options.get('verbosity', 1))
     traceback = options.get('traceback', False)
     if verbosity > 1:
         self.stdout.write('Importing {0}'.format(url))
     try:
         Cookie.objects.import_from_url(url)
         self.stdout.write('Imported {0}'.format(url))
     except ValueError as exc:
         ce = CommandError(str(exc).format(url))
         if traceback:
             tb = sys.exc_info()[2]
             raise ce.with_traceback(tb)
         else:
             raise ce
Example #3
0
def call_es(path, *args, **kw):
    method = kw.pop('method', 'GET')
    status = kw.pop('status', 200)
    if isinstance(status, int):
        status = [status]

    if not path.startswith('/'):
        path = '/' + path

    method = getattr(requests, method.lower())
    res = method(url(path), *args, **kw)

    if res.status_code not in status:
        error = CommandError('Call on %r failed.\n%s' % (path, res.content))
        error.content = res.content
        error.json = res.json()
        raise error

    return res
Example #4
0
    def handle(self, *args, **options):
        try:
            cmd = args[0]
        except IndexError:
            raise CommandError('No command passed.')

        if cmd == 'add':
            email, carrier, region = self.get_ecr(args)
            try:
                OperatorPermission.objects.create(
                    user=self.get_user(email),
                    region=self.get_region_id(region),
                    carrier=self.get_carrier_id(carrier))
                self.stdout.write('Created %s/%s permission for %s' % (
                    region, carrier, email))
            except DatabaseError, e:
                exception = CommandError('Unable to grant permission.')
                exception.args = e.args
                raise exception
    def handle(self, *args, **options):
        
        try:
            gas_pk = int(args[0])
            csv_filename = args[1]
        except:
            raise CommandError("Usage import_gasmembers: %s" % (self.args))

        if len(args) > 2:
            delimiter = args[2]
        else:
            delimiter = ";"

        if len(args) == 4:
            tmpl = args[3]
        else:
            tmpl = "%(name)s %(surname)s %(email)s %(city)s"

        # STEP 0: prepare data in dicts
        f = file(csv_filename, "rb")
        csvdata = f.read()
        f.close()

        fieldnames = get_params_from_template(tmpl)
        m = CSVManager(fieldnames=fieldnames, delimiter=delimiter, encoding=ENCODING)
        data = m.read(csvdata)
        log.debug(pprint(m.read(csvdata)))

        # Data prepared

        g = GAS.objects.get(pk=gas_pk)
        g.config.auto_populate_products = True
        g.config.save()

        # STEP 2: process data and create instances
        with transaction.commit_on_success():
            for d in data:
                log.info("#### ---- start new user import... ----####")
                try:
                    user, updated = self._get_or_create_user(d)
                    try:
                        pers = user.person
                    except Person.DoesNotExist:
                        contacts = self._get_or_create_contacts(d)
                        place = self._get_or_create_place(d)
                        pers = self._get_or_create_person(d, contacts, place)
                        pers.user = user
                        pers.save()
                    else:
                        # This is a user of an already created person
                        log.info(("PERSON %s ALREADY EXISTENT" % user.person).decode(ENCODING)) 
                        if updated:
                            log.debug("UPDATE PERSON DETAILS")

                            contacts = self._update_contacts(user.person, d)
                            place = self._update_place(user.person, d)
                            pers = self._update_person(user.person, d, contacts, place, force=True)
                        else:
                            log.debug("SKIP IT")


                except KeyError, e:
                    if e.message not in self.allowed_keys:
                        raise CommandError("Invalid key '%s' provided. Allowed keys in python template are: %s" % (e.message, self.allowed_keys))
                    else:
                        raise CommandError("Key '%s' is REQUIRED." % e.message)

                gm, created = GASMember.objects.get_or_create(person=pers, gas=g)
                gm.save()
                log.info(("CREATED GASMEMBER %s" % gm).decode(ENCODING)) 
Example #6
0
    def handle(self, *args, **options):

        # Pick up the options
        database = options["database"]
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" % database)
        continuous = options["continuous"]

        # Use the test database if we are running the test suite
        if "FREPPLE_TEST" in os.environ:
            connections[database].close()
            settings.DATABASES[database]["NAME"] = settings.DATABASES[database]["TEST"][
                "NAME"
            ]

        # Check if a worker already exists
        if checkActive(database):
            if "FREPPLE_TEST" not in os.environ:
                logger.info(
                    "Worker for database '%s' already active"
                    % settings.DATABASES[database]["NAME"]
                )
            return

        # Spawn a worker-alive thread
        WorkerAlive(database).start()

        # Process the queue
        if "FREPPLE_TEST" not in os.environ:
            logger.info(
                "Worker %s for database '%s' starting to process jobs"
                % (os.getpid(), settings.DATABASES[database]["NAME"])
            )
        idle_loop_done = False
        setattr(_thread_locals, "database", database)
        while True:
            try:
                task = (
                    Task.objects.all()
                    .using(database)
                    .filter(status="Waiting")
                    .order_by("id")[0]
                )
                idle_loop_done = False
            except:
                # No more tasks found
                if continuous:
                    time.sleep(5)
                    continue
                else:
                    # Special case: we need to permit a single idle loop before shutting down
                    # the worker. If we shut down immediately, a newly launched task could think
                    # that a worker is already running - while it just shut down.
                    if idle_loop_done:
                        break
                    else:
                        idle_loop_done = True
                        time.sleep(5)
                        continue
            try:
                if "FREPPLE_TEST" not in os.environ:
                    logger.info(
                        "Worker %s for database '%s' starting task %d at %s"
                        % (
                            os.getpid(),
                            settings.DATABASES[database]["NAME"],
                            task.id,
                            datetime.now(),
                        )
                    )
                background = False
                task.started = datetime.now()
                # Verify the command exists
                exists = False
                for commandname in get_commands():
                    if commandname == task.name:
                        exists = True
                        break

                if not exists:
                    # No such task exists
                    logger.error("Task %s not recognized" % task.name)
                    task.status = "Failed"
                    task.processid = None
                    task.save(using=database)
                else:
                    # Close all database connections to assure the parent and child
                    # process don't share them.
                    connections.close_all()
                    # Spawn a new command process
                    args = []
                    kwargs = {"database": database, "task": task.id, "verbosity": 0}
                    background = (
                        "background" in task.arguments if task.arguments else False
                    )
                    if task.arguments:
                        for i in shlex.split(task.arguments):
                            if "=" in i:
                                key, val = i.split("=")
                                kwargs[key.strip("--").replace("-", "_")] = val
                            else:
                                args.append(i)
                    child = Process(
                        target=runCommand,
                        args=(task.name, *args),
                        kwargs=kwargs,
                        name="frepplectl %s" % task.name,
                    )
                    child.start()

                    # Normally, the child will update the processid.
                    # Just to make sure, we do it also here.
                    task.processid = child.pid
                    task.save(update_fields=["processid"], using=database)

                    # Wait for the child to finish
                    child.join()

                    # Read the task again from the database and update it
                    task = Task.objects.all().using(database).get(pk=task.id)
                    task.processid = None
                    if (
                        task.status not in ("Done", "Failed")
                        or not task.finished
                        or not task.started
                    ):
                        now = datetime.now()
                        if not task.started:
                            task.started = now
                        if not background:
                            if not task.finished:
                                task.finished = now
                            if task.status not in ("Done", "Failed"):
                                task.status = "Done"
                        task.save(using=database)
                    if "FREPPLE_TEST" not in os.environ:
                        logger.info(
                            "Worker %s for database '%s' finished task %d at %s: success"
                            % (
                                os.getpid(),
                                settings.DATABASES[database]["NAME"],
                                task.id,
                                datetime.now(),
                            )
                        )
            except Exception as e:
                # Read the task again from the database and update.
                task = Task.objects.all().using(database).get(pk=task.id)
                task.status = "Failed"
                now = datetime.now()
                if not task.started:
                    task.started = now
                task.finished = now
                task.message = str(e)
                task.save(using=database)
                if "FREPPLE_TEST" not in os.environ:
                    logger.info(
                        "Worker %s for database '%s' finished task %d at %s: failed"
                        % (
                            os.getpid(),
                            settings.DATABASES[database]["NAME"],
                            task.id,
                            datetime.now(),
                        )
                    )
        # Remove the parameter again
        try:
            Parameter.objects.all().using(database).get(pk="Worker alive").delete()
        except:
            pass
        setattr(_thread_locals, "database", None)

        # Remove log files exceeding the configured disk space allocation
        totallogs = 0
        filelist = []
        for x in os.listdir(settings.FREPPLE_LOGDIR):
            if x.endswith(".log"):
                size = 0
                creation = 0
                filename = os.path.join(settings.FREPPLE_LOGDIR, x)
                # needs try/catch because log files may still be open or being used and Windows does not like it
                try:
                    size = os.path.getsize(filename)
                    creation = os.path.getctime(filename)
                    filelist.append(
                        {"name": filename, "size": size, "creation": creation}
                    )
                except:
                    pass
                totallogs += size
        todelete = totallogs - settings.MAXTOTALLOGFILESIZE * 1024 * 1024
        filelist.sort(key=operator.itemgetter("creation"))
        for fordeletion in filelist:
            if todelete > 0:
                try:
                    os.remove(fordeletion["name"])
                    todelete -= fordeletion["size"]
                except:
                    pass

        # Exit
        if "FREPPLE_TEST" not in os.environ:
            logger.info(
                "Worker %s for database '%s' finished all jobs in the queue and exits"
                % (os.getpid(), settings.DATABASES[database]["NAME"])
            )
Example #7
0
    def handle(self, *args, **options):

        self.verbosity = options["verbosity"]
        self.interactive = options["interactive"]

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_config in apps.get_app_configs():
            if module_has_submodule(app_config.module, "management"):
                import_module(".management", app_config.name)

        # Get the database we're operating from
        db = options["database"]
        connection = connections[db]

        # Hook for backends needing any database preparation
        connection.prepare_database()
        # Work out which apps have migrations and which do not
        executor = MigrationExecutor(connection,
                                     self.migration_progress_callback)

        # Raise an error if any migrations are applied before their dependencies.
        executor.loader.check_consistent_history(connection)

        # Before anything else, see if there's conflicting apps and drop out
        # hard if there are any
        conflicts = executor.loader.detect_conflicts()
        if conflicts:
            name_str = "; ".join("%s in %s" % (", ".join(names), app)
                                 for app, names in conflicts.items())
            raise CommandError(
                "Conflicting migrations detected; multiple leaf nodes in the "
                "migration graph: (%s).\nTo fix them run "
                "'python manage.py makemigrations --merge'" % name_str)

        # If they supplied command line arguments, work out what they mean.
        target_app_labels_only = True
        if options["app_label"] and options["migration_name"]:
            app_label, migration_name = options["app_label"], options[
                "migration_name"]
            if app_label not in executor.loader.migrated_apps:
                raise CommandError("App '%s' does not have migrations." %
                                   app_label)
            if migration_name == "zero":
                targets = [(app_label, None)]
            else:
                try:
                    migration = executor.loader.get_migration_by_prefix(
                        app_label, migration_name)
                except AmbiguityError:
                    raise CommandError(
                        "More than one migration matches '%s' in app '%s'. "
                        "Please be more specific." %
                        (migration_name, app_label))
                except KeyError:
                    raise CommandError(
                        "Cannot find a migration matching '%s' from app '%s'."
                        % (migration_name, app_label))
                targets = [(app_label, migration.name)]
            target_app_labels_only = False
        elif options["app_label"]:
            app_label = options["app_label"]
            if app_label not in executor.loader.migrated_apps:
                raise CommandError("App '%s' does not have migrations." %
                                   app_label)
            targets = [
                key for key in executor.loader.graph.leaf_nodes()
                if key[0] == app_label
            ]
        else:
            targets = executor.loader.graph.leaf_nodes()

        plan = executor.migration_plan(targets)
        run_syncdb = options["run_syncdb"] and executor.loader.unmigrated_apps

        # Print some useful info
        if self.verbosity >= 1:
            self.stdout.write(
                self.style.MIGRATE_HEADING("Operations to perform:"))
            if run_syncdb:
                self.stdout.write(
                    self.style.MIGRATE_LABEL("  Synchronize unmigrated apps: ")
                    + (", ".join(sorted(executor.loader.unmigrated_apps))))
            if target_app_labels_only:
                self.stdout.write(
                    self.style.MIGRATE_LABEL("  Apply all migrations: ") +
                    (", ".join(sorted({a
                                       for a, n in targets})) or "(none)"))
            else:
                if targets[0][1] is None:
                    self.stdout.write(
                        self.style.MIGRATE_LABEL("  Unapply all migrations: ")
                        + "%s" % (targets[0][0], ))
                else:
                    self.stdout.write(
                        self.style.MIGRATE_LABEL(
                            "  Target specific migration: ") + "%s, from %s" %
                        (targets[0][1], targets[0][0]))

        pre_migrate_state = executor._create_project_state(
            with_applied_migrations=True)
        pre_migrate_apps = pre_migrate_state.apps
        emit_pre_migrate_signal(
            self.verbosity,
            self.interactive,
            connection.alias,
            apps=pre_migrate_apps,
            plan=plan,
        )

        # Run the syncdb phase.
        if run_syncdb:
            if self.verbosity >= 1:
                self.stdout.write(
                    self.style.MIGRATE_HEADING(
                        "Synchronizing apps without migrations:"))
            self.sync_apps(connection, executor.loader.unmigrated_apps)

        # Migrate!
        if self.verbosity >= 1:
            self.stdout.write(
                self.style.MIGRATE_HEADING("Running migrations:"))
        if not plan:
            if self.verbosity >= 1:
                self.stdout.write("  No migrations to apply.")
                # If there's changes that aren't in migrations yet, tell them how to fix it.
                autodetector = MigrationAutodetector(
                    executor.loader.project_state(),
                    ProjectState.from_apps(apps),
                )
                changes = autodetector.changes(graph=executor.loader.graph)
                if changes:
                    self.stdout.write(
                        self.style.NOTICE(
                            "  Your models have changes that are not yet reflected "
                            "in a migration, and so won't be applied."))
                    self.stdout.write(
                        self.style.NOTICE(
                            "  Run 'manage.py makemigrations' to make new "
                            "migrations, and then re-run 'manage.py migrate' to "
                            "apply them."))
            fake = False
            fake_initial = False
        else:
            fake = options["fake"]
            fake_initial = options["fake_initial"]
        post_migrate_state = executor.migrate(
            targets,
            plan=plan,
            state=pre_migrate_state.clone(),
            fake=fake,
            fake_initial=fake_initial,
        )
        # post_migrate signals have access to all models. Ensure that all models
        # are reloaded in case any are delayed.
        post_migrate_state.clear_delayed_apps_cache()
        post_migrate_apps = post_migrate_state.apps

        # Re-render models of real apps to include relationships now that
        # we've got a final state. This wouldn't be necessary if real apps
        # models were rendered with relationships in the first place.
        with post_migrate_apps.bulk_update():
            model_keys = []
            for model_state in post_migrate_apps.real_models:
                model_key = model_state.app_label, model_state.name_lower
                model_keys.append(model_key)
                post_migrate_apps.unregister_model(*model_key)
        post_migrate_apps.render_multiple([
            ModelState.from_model(apps.get_model(*model))
            for model in model_keys
        ])

        # Send the post_migrate signal, so individual apps can do whatever they need
        # to do at this point.
        emit_post_migrate_signal(
            self.verbosity,
            self.interactive,
            connection.alias,
            apps=post_migrate_apps,
            plan=plan,
        )
Example #8
0
def import_tg_file(self, options):
    ''' Using the talkgroup file from trunk-recorder'''
    file_name = options['file']
    system_id = options['system']
    truncate = options['truncate']
    try:
        system = System.objects.get(pk=system_id)
    except System.DoesNotExist:
        self.stdout.write("Valid systems")
        for system in System.objects.all():
            self.stdout.write("#{} - {}".format(system.pk, system.name))
        raise CommandError(
            'System #{} was not a valid system'.format(system_id))
    self.stdout.write("Importing talkgroups for system #{} - {}".format(
        system.pk, system.name))
    if truncate:
        mode_max_length = TalkGroup._meta.get_field('mode').max_length
        alpha_tag_max_length = TalkGroup._meta.get_field(
            'alpha_tag').max_length
        description_max_length = TalkGroup._meta.get_field(
            'description').max_length
    with open(file_name) as tg_file:
        tg_info = csv.reader(tg_file, delimiter=',', quotechar='"')
        line_number = 0
        for row in tg_info:
            line_number += 1
            try:
                if truncate:
                    if len(row[2]) > mode_max_length:
                        row[2] = row[2][:mode_max_length]
                        self.stdout.write(
                            "Truncating mode from line ({}) TG {}".format(
                                line_number, row[3]))
                    if len(row[3]) > alpha_tag_max_length:
                        row[3] = row[3][:alpha_tag_max_length]
                        self.stdout.write(
                            "Truncating alpha_tag from line ({}) TG {}".format(
                                line_number, row[3]))
                    if len(row[4]) > description_max_length:
                        row[4] = row[4][:description_max_length]
                        self.stdout.write(
                            "Truncating description from line ({}) TG {}".
                            format(line_number, row[3]))
                #print('LEN ' + str(len(row)))
                priority = 3
                try:
                    priority = row[6]
                except IndexError:
                    pass
                try:
                    priority = int(priority)
                except ValueError:
                    priority = 3
                obj, create = TalkGroup.objects.update_or_create(
                    dec_id=row[0],
                    system=system,
                    defaults={
                        'mode': row[2],
                        'alpha_tag': row[3],
                        'description': row[4],
                        'priority': priority
                    })
                obj.service_type = row[5][:20]
                obj.save()
            except (IntegrityError, IndexError):
                pass
Example #9
0
            connection = Database.connect(conn_string)
            connection.set_isolation_level(0)  # autocommit false
            cursor = connection.cursor()
            drop_query = 'DROP DATABASE %s' % db_name
            logging.info('Executing... "' + drop_query + '"')

            try:
                cursor.execute(drop_query)
            except Database.ProgrammingError, e:
                logging.info("Error: %s" % str(e))

            # Encoding should be SQL_ASCII (7-bit postgres default) or
            # prefered UTF8 (8-bit)
            create_query = "CREATE DATABASE %s WITH OWNER = %s ENCODING = \
            'UTF8' " % (db_name, user)

            if postgis.match(engine):
                create_query += 'TEMPLATE = template_postgis '
            if settings.DEFAULT_TABLESPACE:
                create_query += 'TABLESPACE = %s;' % (
                    settings.DEFAULT_TABLESPACE
                )
            else:
                create_query += ';'
            logging.info('Executing... "' + create_query + '"')
            cursor.execute(create_query)

        else:
            raise CommandError("Unknown database engine %s" % engine)
Example #10
0
    def handle(self, *args, **options):

        self.verbosity = int(options.get('verbosity'))
        self.interactive = options.get('interactive')
        self.show_traceback = options.get('traceback')
        self.load_initial_data = options.get('load_initial_data')
        self.test_database = options.get('test_database', False)

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_config in apps.get_app_configs():
            if module_has_submodule(app_config.module, "management"):
                import_module('.management', app_config.name)

        # Get the database we're operating from
        db = options.get('database')
        connection = connections[db]

        # If they asked for a migration listing, quit main execution flow and show it
        if options.get("list", False):
            return self.show_migration_list(connection, args)

        # Work out which apps have migrations and which do not
        executor = MigrationExecutor(connection,
                                     self.migration_progress_callback)

        # Before anything else, see if there's conflicting apps and drop out
        # hard if there are any
        conflicts = executor.loader.detect_conflicts()
        if conflicts:
            name_str = "; ".join("%s in %s" % (", ".join(names), app)
                                 for app, names in conflicts.items())
            raise CommandError(
                "Conflicting migrations detected (%s).\nTo fix them run 'python manage.py makemigrations --merge'"
                % name_str)

        # If they supplied command line arguments, work out what they mean.
        run_syncdb = False
        target_app_labels_only = True
        if len(args) > 2:
            raise CommandError(
                "Too many command-line arguments (expecting 'app_label' or 'app_label migrationname')"
            )
        elif len(args) == 2:
            app_label, migration_name = args
            if app_label not in executor.loader.migrated_apps:
                raise CommandError(
                    "App '%s' does not have migrations (you cannot selectively sync unmigrated apps)"
                    % app_label)
            if migration_name == "zero":
                targets = [(app_label, None)]
            else:
                try:
                    migration = executor.loader.get_migration_by_prefix(
                        app_label, migration_name)
                except AmbiguityError:
                    raise CommandError(
                        "More than one migration matches '%s' in app '%s'. Please be more specific."
                        % (app_label, migration_name))
                except KeyError:
                    raise CommandError(
                        "Cannot find a migration matching '%s' from app '%s'."
                        % (app_label, migration_name))
                targets = [(app_label, migration.name)]
            target_app_labels_only = False
        elif len(args) == 1:
            app_label = args[0]
            if app_label not in executor.loader.migrated_apps:
                raise CommandError(
                    "App '%s' does not have migrations (you cannot selectively sync unmigrated apps)"
                    % app_label)
            targets = [
                key for key in executor.loader.graph.leaf_nodes()
                if key[0] == app_label
            ]
        else:
            targets = executor.loader.graph.leaf_nodes()
            run_syncdb = True

        plan = executor.migration_plan(targets)

        # Print some useful info
        if self.verbosity >= 1:
            self.stdout.write(
                self.style.MIGRATE_HEADING("Operations to perform:"))
            if run_syncdb:
                self.stdout.write(
                    self.style.MIGRATE_LABEL("  Synchronize unmigrated apps: ")
                    + (", ".join(executor.loader.unmigrated_apps) or "(none)"))
            if target_app_labels_only:
                self.stdout.write(
                    self.style.MIGRATE_LABEL("  Apply all migrations: ") +
                    (", ".join(set(a for a, n in targets)) or "(none)"))
            else:
                if targets[0][1] is None:
                    self.stdout.write(
                        self.style.MIGRATE_LABEL("  Unapply all migrations: ")
                        + "%s" % (targets[0][0], ))
                else:
                    self.stdout.write(
                        self.style.MIGRATE_LABEL(
                            "  Target specific migration: ") + "%s, from %s" %
                        (targets[0][1], targets[0][0]))

        # Run the syncdb phase.
        # If you ever manage to get rid of this, I owe you many, many drinks.
        # Note that pre_migrate is called from inside here, as it needs
        # the list of models about to be installed.
        if run_syncdb:
            if self.verbosity >= 1:
                self.stdout.write(
                    self.style.MIGRATE_HEADING(
                        "Synchronizing apps without migrations:"))
            created_models = self.sync_apps(connection,
                                            executor.loader.unmigrated_apps)
        else:
            created_models = []

        # Migrate!
        if self.verbosity >= 1:
            self.stdout.write(
                self.style.MIGRATE_HEADING("Running migrations:"))
        if not plan:
            if self.verbosity >= 1:
                self.stdout.write("  No migrations needed.")
                # If there's changes that aren't in migrations yet, tell them how to fix it.
                autodetector = MigrationAutodetector(
                    executor.loader.graph.project_state(),
                    ProjectState.from_apps(apps),
                )
                changes = autodetector.changes(graph=executor.loader.graph)
                if changes:
                    self.stdout.write(
                        self.style.NOTICE(
                            "  Your models have changes that are not yet reflected in a migration, and so won't be applied."
                        ))
                    self.stdout.write(
                        self.style.NOTICE(
                            "  Run 'manage.py makemigrations' to make new migrations, and then re-run 'manage.py migrate' to apply them."
                        ))
        else:
            executor.migrate(targets, plan, fake=options.get("fake", False))

        # Send the post_migrate signal, so individual apps can do whatever they need
        # to do at this point.
        emit_post_migrate_signal(created_models, self.verbosity,
                                 self.interactive, connection.alias)
Example #11
0
def _confirm(message):
    if input('{} [y/n]'.format(message)).lower() == 'y':
        return True
    else:
        raise CommandError('abort')
    def collect(self):
        """
        Perform the bulk of the work of collectstatic.
        Split off from handle() to facilitate testing.
        """
        if self.symlink and not self.local:
            raise CommandError("Can't symlink to a remote destination.")

        if self.clear:
            self.clear_dir('')

        if self.symlink:
            handler = self.link_file
        else:
            handler = self.copy_file

        found_files = {}
        for finder in get_finders():
            for path, storage in finder.list(self.ignore_patterns):
                # Prefix the relative path if the source storage contains it
                if getattr(storage, 'prefix', None):
                    prefixed_path = os.path.join(storage.prefix, path)
                else:
                    prefixed_path = path

                if prefixed_path not in found_files:
                    found_files[prefixed_path] = (storage, path)
                    handler(path, prefixed_path, storage)
                else:
                    self.log(
                        "Found another file with the destination path '%s'. It "
                        "will be ignored since only the first encountered file "
                        "is collected. If this is not what you want, make sure "
                        "every static file has a unique path." % prefixed_path,
                        level=1,
                    )

        # Storage backends may define a post_process() method.
        if self.post_process and hasattr(self.storage, 'post_process'):
            processor = self.storage.post_process(found_files,
                                                  dry_run=self.dry_run)
            for original_path, processed_path, processed in processor:
                if isinstance(processed, Exception):
                    self.stderr.write("Post-processing '%s' failed!" %
                                      original_path)
                    # Add a blank line before the traceback, otherwise it's
                    # too easy to miss the relevant part of the error message.
                    self.stderr.write("")
                    raise processed
                if processed:
                    self.log("Post-processed '%s' as '%s'" %
                             (original_path, processed_path),
                             level=2)
                    self.post_processed_files.append(original_path)
                else:
                    self.log("Skipped post-processing '%s'" % original_path)

        return {
            'modified': self.copied_files + self.symlinked_files,
            'unmodified': self.unmodified_files,
            'post_processed': self.post_processed_files,
        }
    def _import(
        self, path, release_info, table, import_info=True, service=False, force=False, bulk=True
    ):
        """Bulk data into table and add entry to ImportInfo table.

        :param table_path: Path to TSV file to import
        :param release_info: Content of release info as dict
        :param table: Django model object of table to import
        :param null: Null value for bulk import
        :return: Boolean if import happened (True) or not (False)
        """

        self.stdout.write(
            "{table} -- Importing {table} {version} ({genomebuild}, source: {path}) ...".format(
                **release_info, path=path
            )
        )

        if not service and not release_info["table"] == table.__name__:
            CommandError("Table name in release_info file does not match table name.")

        # Skip importing table if record already exists in import info table and re-import is not forced.
        if import_info and not force and self._get_import_info_record(release_info).exists():
            self.stdout.write(
                self.style.WARNING(
                    "Skipping {table} {version} ({genomebuild}). Already imported.".format(
                        **release_info
                    )
                )
            )
            return False

        # Service table should just create an import info record, no actual data is imported.
        if not service:
            # Clear out any existing entries for this release/database.
            if import_info:
                self.stdout.write("{table} -- Removing old {table} results.".format(**release_info))
                sa_table = aldjemy.core.get_meta().tables[table._meta.db_table]
                if "release" in sa_table.c:
                    SQLALCHEMY_ENGINE.execute(
                        sa_table.delete().where(sa_table.c.release == release_info["genomebuild"])
                    )
                else:
                    SQLALCHEMY_ENGINE.execute(sa_table.delete())
                self.stdout.write("{table} -- Importing new {table} data".format(**release_info))

            # Import data
            if bulk:
                try:
                    table.objects.from_csv(
                        path,
                        delimiter="\t",
                        null=release_info["null_value"],
                        ignore_conflicts=False,
                        drop_constraints=True,
                        drop_indexes=True,
                    )
                except Exception as e:
                    self.stderr.write(
                        "Error during import to table %s:\n%s" % (table._meta.db_table, e)
                    )
                    # Remove already imported data.
                    sa_table = aldjemy.core.get_meta().tables[table._meta.db_table]
                    if "release" in sa_table.c:
                        SQLALCHEMY_ENGINE.execute(
                            sa_table.delete().where(
                                sa_table.c.release == release_info["genomebuild"]
                            )
                        )
                    else:
                        SQLALCHEMY_ENGINE.execute(sa_table.delete())
                    # Continue with remaining tables.
                    return False
            else:  # no bulk import
                with transaction.atomic():
                    for record in tsv_reader(path):
                        table.objects.create(**record)

        if import_info:
            # Create import info record. Existence already checked above.
            self._create_import_info_record(release_info)

        self.stdout.write(
            self.style.SUCCESS(
                "{table} -- Finished importing {table} {version} ({path})".format(
                    **release_info, path=os.path.basename(path)
                )
            )
        )
        return True
    def handle(self, *args, **options):
        """Iterate over genomebuilds, database folders and versions to gather all required information for import.
        """

        if not options["service"] and options["tables_path"] is None:
            raise CommandError("Please set either --tables-path or --service")
        if options["tables_path"] and options["service"]:
            raise CommandError("Please set either --tables-path or --service")
        if options["service"] and (
            options["service_name"] is None
            or options["service_version"] is None
            or options["service_genomebuild"] is None
        ):
            raise CommandError(
                "Please set --service-name, --service-version and --service-genomebuild when using flag --service"
            )

        if options["service"]:
            return self._import(
                None,
                {
                    "table": options["service_name"],
                    "genomebuild": options["service_genomebuild"],
                    "version": options["service_version"],
                },
                None,
                import_info=True,
                service=True,
            )

        path_import_versions = options.get("import_versions_path") or os.path.join(
            options["tables_path"], "import_versions.tsv"
        )

        if not os.path.isfile(path_import_versions):
            raise CommandError("Require version import info file {}.".format(path_import_versions))

        self._switch_vacuum(enable=False)

        import_infos = list(tsv_reader(path_import_versions))
        if options["threads"] == 0:  # sequential
            for import_info in import_infos:
                if import_info["table_group"] in TABLES[import_info["build"]]:
                    self._handle_import(import_info, options)
                else:
                    self.stderr.write(
                        "Table group {} is no registered table group.".format(
                            import_info["table_group"]
                        )
                    )
        else:
            pool = ThreadPool(processes=options["threads"])
            for import_info in import_infos:
                if import_info["table_group"] in TABLES[import_info["build"]]:
                    pool.apply_async(self._handle_import_try_catch, (import_info, options))
                else:
                    self.stderr.write(
                        "Table group {} is no registered table group.".format(
                            import_info["table_group"]
                        )
                    )
            pool.close()
            pool.join()

        self._switch_vacuum(enable=True)
Example #15
0
    def handle_import(self, options):
        """
        Gets the posts from either the provided URL or the path if it
        is local.
        """

        url = options.get("url")
        if url is None:
            raise CommandError("Usage is import_wordpress %s" % self.args)
        try:
            import feedparser
        except ImportError:
            raise CommandError("Could not import the feedparser library.")
        feed = feedparser.parse(url)

        # We use the minidom parser as well because feedparser won't
        # interpret WXR comments correctly and ends up munging them.
        # xml.dom.minidom is used simply to pull the comments when we
        # get to them.
        xml = parse(url)
        xmlitems = xml.getElementsByTagName("item")

        for (i, entry) in enumerate(feed["entries"]):
            # Get a pointer to the right position in the minidom as well.
            xmlitem = xmlitems[i]
            content = linebreaks(self.wp_caption(entry.content[0]["value"]))

            # Get the time struct of the published date if possible and
            # the updated date if we can't.
            pub_date = getattr(entry, "published_parsed", entry.updated_parsed)
            if pub_date:
                pub_date = datetime.fromtimestamp(mktime(pub_date))
                pub_date -= timedelta(seconds=timezone)

            # Tags and categories are all under "tags" marked with a scheme.
            terms = defaultdict(set)
            for item in getattr(entry, "tags", []):
                terms[item.scheme].add(item.term)

            if entry.wp_post_type == "post":
                post = self.add_post(
                    title=entry.title,
                    content=content,
                    pub_date=pub_date,
                    tags=terms["tag"],
                    categories=terms["category"],
                    old_url=entry.get("link", entry.id),
                )

                # Get the comments from the xml doc.
                for c in xmlitem.getElementsByTagName("wp:comment"):
                    name = self.get_text(c, "author")
                    email = self.get_text(c, "author_email")
                    url = self.get_text(c, "author_url")
                    body = self.get_text(c, "content")
                    pub_date = self.get_text(c, "date_gmt")
                    fmt = "%Y-%m-%d %H:%M:%S"
                    pub_date = datetime.strptime(pub_date, fmt)
                    pub_date -= timedelta(seconds=timezone)
                    self.add_comment(
                        post=post,
                        name=name,
                        email=email,
                        body=body,
                        website=url,
                        pub_date=pub_date,
                    )

            elif entry.wp_post_type == "page":
                old_id = getattr(entry, "wp_post_id")
                parent_id = getattr(entry, "wp_post_parent")
                self.add_page(
                    title=entry.title,
                    content=content,
                    tags=terms["tag"],
                    old_id=old_id,
                    old_parent_id=parent_id,
                )
    def handle(self, *args, **options):
        if len(args) < 1:
            raise CommandError('Usage is rebuild_indicator_table %s' % self.args)

        config_id = args[0]
        tasks.rebuild_indicators(config_id)
    def handle(self, *args, **options):
        """

        """

        # update party data

        if options['party-mode']:
            print "it's party time!"
            list_of_parties = Party.current_knesset.all()
            list_of_party_ids = [party.id for party in list_of_parties]
            list_of_party_ids = self.inspect_list_of_all_objects(list_of_party_ids, 'party', options)

            for i, party_id in enumerate(list_of_party_ids):
                print 'working on %d of %d: party: %s' % (i + 1, len(list_of_party_ids), party_id)
                self.update_party_instance(party_id, options)
                sleep(SLEEP_TIME)

        # update members data
        else:
            list_of_members = list()
            test_for_all_members = False

            # Case no args - fetch all Members
            if len(args) == 0:
                list_of_members = list(Member.objects.all())
                test_for_all_members = True

            # Case arg exists - fetch Member by id supplied
            elif len(args) == 1:
                member_id = args[0]
                try:
                    member = Member.objects.get(id=member_id)
                    list_of_members.append(member)

                except Member.DoesNotExist:
                    warning_msg = "Status #({0}) does not exist.".format(member_id)
                    logger = logging.getLogger('django')
                    logger.warning(warning_msg)
                    raise CommandError(
                        'Member "%s" does not exist. If you know it to exist in oknesset.org, run with no parameters and it will be added automatically.' % member_id)

            # Case invalid args
            else:
                raise CommandError('Please enter a valid status id')

            list_of_member_ids = [member.id for member in list_of_members]

            # if executed as update for all members, test for gaps between source and local.
            if test_for_all_members:
                list_of_member_ids = self.inspect_list_of_all_objects(list_of_member_ids, 'member', options)
                print list_of_member_ids

            # Iterate over list_of_members of direct update on selected members
            for i, member_id in enumerate(list_of_member_ids[LIST_INDEX_START:]):
                print 'working on %d of %d: member: %s' % (i + LIST_INDEX_START + 1, len(list_of_member_ids[LIST_INDEX_START:]), member_id),
                self.update_member_instance(member_id)
                print 'sleep for %d secs' % SLEEP_TIME
                sleep(SLEEP_TIME)

        print 'Done.'
Example #18
0
    def handle(self, *args, **options):
        try:
            ytc = YoutubeConnection.objects.filter(deleted=False)[0]
        except IndexError:
            self.stdout.write(u'Error: there is no YoutubeConnection')
            ytc = None

        if ytc:
            storage = Storage(YoutubeConnection, 'email', ytc.email,
                              'credential')
            credential = storage.get()
            if credential is None or credential.invalid == True:
                CommandError(u'YoutubeConnection is invalid')

            http = httplib2.Http()
            http = credential.authorize(http)
            service = build('youtube', 'v3', http=http)

            if args:
                aycs = AdvertiserYoutubeChannel.objects.filter(
                    youtube_id__in=args, first_load=True, deleted=False)
            else:
                aycs = AdvertiserYoutubeChannel.objects.filter(first_load=True,
                                                               deleted=False)

            # do first load
            for ayc in [a for a in aycs]:
                channels = service.channels().list(
                    part='statistics', id=ayc.youtube_id).execute()

                if not channels[u'items']:
                    continue

                ayc.views_count = channels[u'items'][0][u'statistics'][
                    u'viewCount']
                ayc.videos_count = channels[u'items'][0][u'statistics'][
                    u'videoCount']
                ayc.subscribers_count = channels[u'items'][0][u'statistics'][
                    u'subscriberCount']
                ayc.save()

                evolution = EvolutionYoutubeChannel(
                    youtube_channel=ayc,
                    views_count=channels[u'items'][0][u'statistics']
                    [u'viewCount'],
                    videos_count=channels[u'items'][0][u'statistics']
                    [u'videoCount'],
                    subscribers_count=channels[u'items'][0][u'statistics']
                    [u'subscriberCount'],
                )
                evolution.save()

                self.stdout.write(
                    u'Successfully updated Advertiser Youtube Channel: %s \n\n'
                    % ayc.youtube_id)

                main_loop = True
                page_token = None

                while main_loop:
                    search = service.search().list(
                        part='id',
                        channelId=ayc.youtube_id,
                        maxResults=50,
                        order='date',
                        pageToken=page_token,
                        type='video',
                    ).execute()

                    items = [
                        item[u'id'][u'videoId'] for item in search[u'items']
                        if item[u'kind'] == u'youtube#searchResult'
                        and item[u'id'][u'kind'] == u'youtube#video'
                    ]

                    videos = service.videos().list(
                        part='id,snippet,statistics',
                        id=','.join(items),
                        maxResults=50,
                    ).execute()

                    for row in videos[u'items']:
                        # saving video
                        if row[u'kind'] == u'youtube#video':
                            try:
                                ytv = YoutubeVideo.objects.get(
                                    youtube_channel=ayc,
                                    youtube_id__exact=row[u'id'],
                                    deleted=False,
                                )
                            except YoutubeVideo.DoesNotExist:
                                try:
                                    ytv = YoutubeVideo(
                                        youtube_channel=ayc,
                                        youtube_id=row[u'id'],
                                        title=row[u'snippet'][u'title'],
                                        description=row[u'snippet']
                                        [u'description'],
                                        created_time=datetime.datetime.
                                        strptime(
                                            row[u'snippet'][u'publishedAt'],
                                            '%Y-%m-%dT%H:%M:%S.000Z'),
                                        views=row[u'statistics'][u'viewCount'],
                                        likes=row[u'statistics'][u'likeCount'],
                                        dislikes=row[u'statistics']
                                        [u'dislikeCount'],
                                        favorites=row[u'statistics']
                                        [u'favoriteCount'],
                                        comments=row[u'statistics']
                                        [u'commentCount'],
                                    )
                                except Exception, e:
                                    err = APIError(
                                        app_name=u'metrics_social',
                                        model_name='YoutubeVideo',
                                        error=u'%s: %s' % (Exception, str(e)),
                                        response=row,
                                    )
                                    err.save()
                                    self.stdout.write(
                                        u'Inserted error video: %s %s' %
                                        (Exception, str(e)))
                                else:
                                    ytv.save()
                                    self.stdout.write(u'Inserted video %s' %
                                                      ytv.youtube_id)
                            else:
                                main_loop = False
                                self.stdout.write(u'Video already exists: %s' %
                                                  ytv.youtube_id)
                                break

                    if u'nextPageToken' in search:
                        page_token = search[u'nextPageToken']
                    else:
                        main_loop = False
Example #19
0
    def handle(self, *args, **options):
        if args:
            appname, = args

        if options.get('no_color', False):
            style = no_style()
        else:
            style = color_style()

        if getattr(settings, 'ADMIN_FOR', None):
            settings_modules = [__import__(m, {}, {}, ['']) for m in settings.ADMIN_FOR]
        else:
            settings_modules = [settings]

        self.LANGUAGES = getattr(settings, 'LANGUAGES', ((None, None), ))

        language = options.get('language', None)
        if language is not None:
            translation.activate(language)
            self.LANGUAGES = [(code, name) for code, name in self.LANGUAGES if code == language]

        decorator = options.get('decorator')
        if not decorator:
            decorator = ['login_required']

        format_style = options.get('format_style')
        if format_style not in FMTR:
            raise CommandError("Format style '%s' does not exist. Options: %s" % (format_style, FMTR.keys()))
        pretty_json = format_style == 'pretty-json'
        if pretty_json:
            format_style = 'json'
        fmtr = FMTR[format_style]

        urlconf = options.get('urlconf')

        views = []
        for settings_mod in settings_modules:
            if not hasattr(settings_mod, urlconf):
                raise CommandError("Settings module {} does not have the attribute {}.".format(settings_mod, urlconf))

            try:
                urlconf = __import__(getattr(settings_mod, urlconf), {}, {}, [''])
            except Exception as e:
                if options.get('traceback', None):
                    import traceback
                    traceback.print_exc()
                print(style.ERROR("Error occurred while trying to load %s: %s" % (getattr(settings_mod, urlconf), str(e))))
                continue

            view_functions = self.extract_views_from_urlpatterns(urlconf.urlpatterns)
            for (func, regex, url_name) in view_functions:
                if hasattr(func, '__globals__'):
                    func_globals = func.__globals__
                elif hasattr(func, 'func_globals'):
                    func_globals = func.func_globals
                else:
                    func_globals = {}

                decorators = [d for d in decorator if d in func_globals]

                if isinstance(func, functools.partial):
                    func = func.func
                    decorators.insert(0, 'functools.partial')

                if hasattr(func, '__name__'):
                    func_name = func.__name__
                elif hasattr(func, '__class__'):
                    func_name = '%s()' % func.__class__.__name__
                else:
                    func_name = re.sub(r' at 0x[0-9a-f]+', '', repr(func))

                module = '{0}.{1}'.format(func.__module__, func_name)
                url_name = url_name or ''
                url = simplify_regex(regex)
                decorator = ', '.join(decorators)

                if format_style == 'json':
                    views.append({"url": url, "module": module, "name": url_name, "decorators": decorator})
                else:
                    views.append(fmtr.format(
                        module='{0}.{1}'.format(style.MODULE(func.__module__), style.MODULE_NAME(func_name)),
                        url_name=style.URL_NAME(url_name),
                        url=style.URL(url),
                        decorator=decorator,
                    ))

        if not options.get('unsorted', False) and format_style != 'json':
            views = sorted(views)

        if format_style == 'aligned':
            views = [row.split(',', 3) for row in views]
            widths = [len(max(columns, key=len)) for columns in zip(*views)]
            views = [
                '   '.join('{0:<{1}}'.format(cdata, width) for width, cdata in zip(widths, row))
                for row in views
            ]
        elif format_style == 'table':
            # Reformat all data and show in a table format

            views = [row.split(',', 3) for row in views]
            widths = [len(max(columns, key=len)) for columns in zip(*views)]
            table_views = []

            header = (style.MODULE_NAME('URL'), style.MODULE_NAME('Module'), style.MODULE_NAME('Name'), style.MODULE_NAME('Decorator'))
            table_views.append(
                ' | '.join('{0:<{1}}'.format(title, width) for width, title in zip(widths, header))
            )
            table_views.append('-+-'.join('-' * width for width in widths))

            for row in views:
                table_views.append(
                    ' | '.join('{0:<{1}}'.format(cdata, width) for width, cdata in zip(widths, row))
                )

            # Replace original views so we can return the same object
            views = table_views

        elif format_style == 'json':
            if pretty_json:
                return json.dumps(views, indent=4)
            return json.dumps(views)

        return "\n".join([v for v in views]) + "\n"
    def handle(self, **options):
        self.set_options(**options)

        message = ['\n']
        if self.dry_run:
            message.append(
                'You have activated the --dry-run option so no files will be modified.\n\n'
            )

        message.append(
            'You have requested to collect static files at the destination\n'
            'location as specified in your settings \n\n')

        message.append(
            "The staticfiles directories look up and minifying {static_file_directories}"
            .format(static_file_directories=self.static_dir))

        if self.is_local_storage() and self.storage.location:
            destination_path = self.storage.location
            message.append(':\n\n    %s\n\n' % destination_path)
            should_warn_user = (self.storage.exists(destination_path) and any(
                self.storage.listdir(destination_path)))
        else:
            destination_path = None
            message.append('.\n\n')
            # Destination files existence not checked; play it safe and warn.
            should_warn_user = True

        if self.interactive and should_warn_user:
            if self.clear:
                message.append(
                    'This will DELETE ALL FILES in this location!\n')
            else:
                message.append('This will overwrite existing files!\n')

            message.append('Are you sure you want to do this?\n\n'
                           "Type 'yes' to continue, or 'no' to cancel: ")
            if input(''.join(message)) != 'yes':
                raise CommandError("Collecting static files cancelled.")

            # Delete the static folder
            if os.path.exists(self.static_root) and os.path.isdir(
                    self.static_root):
                shutil.rmtree(self.static_root)

        for directory in self.static_dir:
            for root, dirs, files in os.walk(directory):
                for file in files:
                    self._create_json_file(file, root)

        self._json_creation()

        message.append('Initialized {file_name} json file'.format(
            file_name=self.json_file_name))

        collected = self.collect()
        modified_count = len(collected['modified'])
        unmodified_count = len(collected['unmodified'])
        post_processed_count = len(collected['post_processed'])

        if self.verbosity >= 1:
            template = ("\n%(modified_count)s %(identifier)s %(action)s"
                        "%(destination)s%(unmodified)s%(post_processed)s.\n")
            summary = template % {
                'modified_count':
                modified_count,
                'identifier':
                'static file' + ('' if modified_count == 1 else 's'),
                'action':
                'symlinked' if self.symlink else 'copied',
                'destination':
                (" to '%s'" % destination_path if destination_path else ''),
                'unmodified':
                (', %s unmodified' %
                 unmodified_count if collected['unmodified'] else ''),
                'post_processed':
                (collected['post_processed']
                 and ', %s post-processed' % post_processed_count or ''),
            }
            return summary
Example #21
0
    def handle(self, *args, **options):
        now = datetime.datetime.now(tzlocal())
        if get_container_id() != cache.get(get_cronjob_worker_cache_key()):
            raise CommandError("You're not the worker!")
        print('%s: %s' %
              (now.strftime('%Y-%m-%d %H:%M'), self.__module__.split('.')[-1]))
        site = Site.objects.get_current()
        if site.instelling:
            sg = sendgrid.SendGridAPIClient(settings.SENDGRID_API_KEY)
            now = now + dateutil.relativedelta.relativedelta(months=-1)
            regeling_nieuw = Regeling.objects.filter(
                **{'datum_gecreeerd__gt': now})
            gebruikers_nieuw = User.objects.filter(**{
                'date_joined__gt': now,
            }).exclude(profiel=None)
            regeling_gewijzigd = Regeling.objects.filter(
                **{
                    'datum_gecreeerd__lte': now,
                    'datum_opgeslagen__gt': now,
                })

            regeling_nieuw_str = [[
                r.titel,
                'https://%s%s' %
                (site.domain, reverse('detail_regeling', kwargs={'pk': r.id}))
            ] for r in regeling_nieuw]
            regeling_gewijzigd_str = [[
                r.titel,
                'https://%s%s' %
                (site.domain, reverse('detail_regeling', kwargs={'pk': r.id}))
            ] for r in regeling_gewijzigd]
            gebruikers_nieuw_str = [[
                r.profiel.naam_volledig,
                'https://%s%s' %
                (site.domain, reverse('detail_contact', kwargs={'pk': r.id}))
            ] for r in gebruikers_nieuw]

            django_engine = engines['django']
            maand = maanden[int(now.strftime('%-m')) - 1]
            subject = 'VraagMij - updates maand %s' % maand
            data = {
                'regeling_nieuw': regeling_nieuw_str,
                'regeling_gewijzigd': regeling_gewijzigd_str,
                'gebruikers_nieuw': gebruikers_nieuw_str,
                'subject': subject,
            }

            for u in User.objects.exclude(profiel=None):
                if u.profiel.hou_me_op_de_hoogte_mail:
                    o = {
                        'naam': u.profiel.naam_volledig,
                        'profiel': u.profiel,
                    }
                    o.update(data)
                    o.update(app_settings())
                    template = django_engine.from_string(
                        site.instelling.update_mail_content)
                    template_html = django_engine.from_string(
                        site.instelling.update_mail_content_html)
                    o.update({
                        'content': template_html.render(o),
                    })
                    body_html = render_to_string('email/update_mail.html', o)
                    body = template.render(o)

                    mail_settings = MailSettings()

                    mail = Mail(
                        from_email=('noreply@%s' % site.domain, 'VraagMij'),
                        to_emails=(u.email, u.profiel.naam_volledig),
                        subject=subject,
                        plain_text_content=body,
                        html_content=body_html,
                    )
                    mail.mail_settings = mail_settings

                    if settings.ENV != 'develop':
                        try:
                            response = sg.send(mail)
                            print(response.status_code)
                            print(response.body)
                            print(response.headers)
                        except Exception as e:
                            print(e.message)
                    else:
                        print(body_html)
Example #22
0
    def handle(self, *args, **options):
        if not options["interactive"]:
            options["hostname"] = options["hostname"] or get_host_name()

        # blank allows ansible scripts to dump errors cleanly.
        print("                                     ")
        print("   _   __  ___    _     _ _          ")
        print("  | | / / / _ \  | |   (_) |         ")
        print("  | |/ / / /_\ \ | |    _| |_ ___    ")
        print("  |    \ |  _  | | |   | | __/ _ \   ")
        print("  | |\  \| | | | | |___| | ||  __/   ")
        print("  \_| \_/\_| |_/ \_____/_|\__\___|   ")
        print("                                     ")
        print("https://learningequality.org/ka-lite/")
        print("                                     ")
        print("         version %s" % VERSION)
        print("                                     ")

        if sys.version_info >= (2, 8) or sys.version_info < (2, 6):
            raise CommandError(
                "You must have Python version 2.6.x or 2.7.x installed. Your version is: %d.%d.%d\n"
                % sys.version_info[:3])
        if sys.version_info < (2, 7, 9):
            logging.warning(
                "It's recommended that you install Python version 2.7.9. Your version is: %d.%d.%d\n"
                % sys.version_info[:3])
            if sys.version_info < (2, 7):
                warnings.warn(
                    "Support for Python 2.6 will be discontinued in 0.16, please upgrade.",
                    RemovedInKALite_v016_Warning)

        if options["interactive"]:
            print(
                "--------------------------------------------------------------------------------"
            )
            print(
                "This script will configure the database and prepare it for use."
            )
            print(
                "--------------------------------------------------------------------------------"
            )
            raw_input("Press [enter] to continue...")

        # Tried not to be os-specific, but ... hey. :-/
        # benjaoming: This doesn't work, why is 502 hard coded!? Root is normally
        # '0' And let's not care about stuff like this, people can be free to
        # run this as root if they want :)
        if not is_windows() and hasattr(os, "getuid") and os.getuid() == 502:
            print(
                "-------------------------------------------------------------------"
            )
            print("WARNING: You are installing KA-Lite as root user!")
            print(
                "\tInstalling as root may cause some permission problems while running"
            )
            print("\tas a normal user in the future.")
            print(
                "-------------------------------------------------------------------"
            )
            if options["interactive"]:
                if not raw_input_yn(
                        "Do you wish to continue and install it as root?"):
                    raise CommandError("Aborting script.\n")

        git_migrate_path = options["git_migrate_path"]

        if git_migrate_path:
            call_command("gitmigrate",
                         path=git_migrate_path,
                         interactive=options["interactive"])

        # TODO(benjaoming): This is used very loosely, what does it mean?
        # Does it mean that the installation path is clean or does it mean
        # that we should remove (clean) items from a previous installation?
        install_clean = not kalite.is_installed()

        database_kind = settings.DATABASES["default"]["ENGINE"]
        database_file = ("sqlite" in database_kind
                         and settings.DATABASES["default"]["NAME"]) or None

        if database_file and os.path.exists(database_file):
            # We found an existing database file.  By default,
            #   we will upgrade it; users really need to work hard
            #   to delete the file (but it's possible, which is nice).
            print(
                "-------------------------------------------------------------------"
            )
            print("WARNING: Database file already exists!")
            print(
                "-------------------------------------------------------------------"
            )
            if not options["interactive"] \
               or raw_input_yn("Keep database file and upgrade to KA Lite version %s? " % VERSION) \
               or not raw_input_yn("Remove database file '%s' now? " % database_file) \
               or not raw_input_yn("WARNING: all data will be lost!  Are you sure? "):
                install_clean = False
                print("Upgrading database to KA Lite version %s" % VERSION)
            else:
                install_clean = True
                print("OK.  We will run a clean install; ")
                # After all, don't delete--just move.
                print(
                    "the database file will be moved to a deletable location.")

        if not install_clean and not database_file and not kalite.is_installed(
        ):
            # Make sure that, for non-sqlite installs, the database exists.
            raise Exception(
                "For databases not using SQLite, you must set up your database before running setup."
            )

        # Do all input at once, at the beginning
        if install_clean and options["interactive"]:
            if not options["username"] or not options["password"]:
                print(
                    "Please choose a username and password for the admin account on this device."
                )
                print(
                    "\tYou must remember this login information, as you will need"
                )
                print(
                    "\tto enter it to administer this installation of KA Lite."
                )
            (username,
             password) = get_username_password(options["username"],
                                               options["password"])
            email = options["email"]
            (hostname, description) = get_hostname_and_description(
                options["hostname"], options["description"])
        else:
            username = options["username"] = (options["username"] or getattr(
                settings, "INSTALL_ADMIN_USERNAME", None)
                                              or get_clean_default_username())
            password = options["password"] or getattr(
                settings, "INSTALL_ADMIN_PASSWORD", None)
            email = options["email"]  # default is non-empty
            hostname = options["hostname"]
            description = options["description"]

        if username and not validate_username(username):
            raise CommandError(
                "Username must contain only letters, digits, and underscores, and start with a letter.\n"
            )

        ########################
        # Now do stuff
        ########################

        # Clean *pyc files if we are in a git repo
        if settings.IS_SOURCE:
            clean_pyc(settings.SOURCE_DIR)
        else:
            # Because we install dependencies as data_files, we run into problems,
            # namely that the pyc files are left dangling.
            distributed_packages = [
                os.path.join(kalite.ROOT_DATA_PATH, 'dist-packages'),
                os.path.join(kalite.ROOT_DATA_PATH, 'python-packages'),
            ]
            # Try locating django
            for dir_to_clean in distributed_packages:
                clean_pyc(dir_to_clean)

        # Move database file (if exists)
        if install_clean and database_file and os.path.exists(database_file):
            # This is an overwrite install; destroy the old db
            dest_file = tempfile.mkstemp()[1]
            print(
                "(Re)moving database file to temp location, starting clean install. Recovery location: %s"
                % dest_file)
            shutil.move(database_file, dest_file)

        # benjaoming: Commented out, this hits the wrong directories currently
        # and should not be necessary.
        # If we have problems with pyc files, we're doing something else wrong.
        # See https://github.com/learningequality/ka-lite/issues/3487

        # Should clean_pyc for (clean) reinstall purposes
        # call_command("clean_pyc", interactive=False, verbosity=options.get("verbosity"), path=os.path.join(settings.PROJECT_PATH, ".."))

        # Migrate the database
        call_command("syncdb",
                     interactive=False,
                     verbosity=options.get("verbosity"))
        call_command("migrate", merge=True, verbosity=options.get("verbosity"))
        Settings.set("database_version", VERSION)

        # download assessment items
        # This can take a long time and lead to Travis stalling. None of this
        # is required for tests, and does not apply to the central server.
        if options.get("no-assessment-items", False):

            logging.warning(
                "Skipping assessment item downloading and configuration.")

        else:

            call_command("syncdb",
                         interactive=False,
                         verbosity=options.get("verbosity"),
                         database="assessment_items")

            # Outdated location of assessment items - move assessment items from their
            # old location (CONTENT_ROOT/khan where they were mixed with other content
            # items)

            # TODO(benjaoming) for 0.15, remove the "move assessment items"
            # mechanism
            writable_assessment_items = os.access(KHAN_ASSESSMENT_ITEM_ROOT,
                                                  os.W_OK)

            # Remove old assessment items
            if os.path.exists(OLD_ASSESSMENT_ITEMS_LOCATION) and os.access(
                    OLD_ASSESSMENT_ITEMS_LOCATION, os.W_OK):
                logging.info("Deleting old assessment items")
                shutil.rmtree(OLD_ASSESSMENT_ITEMS_LOCATION)

            if writable_assessment_items and options[
                    'force-assessment-item-dl']:
                call_command("unpack_assessment_zip", ASSESSMENT_ITEMS_ZIP_URL)
            elif options['force-assessment-item-dl']:
                raise RuntimeError(
                    "Got force-assessment-item-dl but directory not writable")
            elif not settings.ASSESSMENT_ITEMS_SYSTEM_WIDE and not settings.RUNNING_IN_TRAVIS and options[
                    'interactive']:
                print(
                    "\nStarting in version 0.13, you will need an assessment items package in order to access many of the available exercises."
                )
                print(
                    "If you have an internet connection, you can download the needed package. Warning: this may take a long time!"
                )
                print(
                    "If you have already downloaded the assessment items package, you can specify the file in the next step."
                )
                print("Otherwise, we will download it from {url}.".format(
                    url=ASSESSMENT_ITEMS_ZIP_URL))

                if raw_input_yn(
                        "Do you wish to download the assessment items package now?"
                ):
                    ass_item_filename = ASSESSMENT_ITEMS_ZIP_URL
                elif raw_input_yn(
                        "Have you already downloaded the assessment items package?"
                ):
                    ass_item_filename = get_assessment_items_filename()
                else:
                    ass_item_filename = None

                if not ass_item_filename:
                    logging.warning(
                        "No assessment items package file given. You will need to download and unpack it later."
                    )
                else:
                    call_command("unpack_assessment_zip", ass_item_filename)

            elif options[
                    'interactive'] and not settings.ASSESSMENT_ITEMS_SYSTEM_WIDE:
                logging.warning(
                    "Assessment item directory not writable, skipping download."
                )
            elif not settings.ASSESSMENT_ITEMS_SYSTEM_WIDE:
                logging.warning(
                    "No assessment items package file given. You will need to download and unpack it later."
                )
            else:
                print("Found bundled assessment items")

        # Individually generate any prerequisite models/state that is missing
        if not Settings.get("private_key"):
            call_command("generatekeys", verbosity=options.get("verbosity"))
        if not Device.objects.count():
            call_command("initdevice",
                         hostname,
                         description,
                         verbosity=options.get("verbosity"))
        if not Facility.objects.count():
            Facility.initialize_default_facility()

        # Create the admin user
        # blank password (non-interactive) means don't create a superuser
        if password:
            admin = get_object_or_None(User, username=username)
            if not admin:
                call_command("createsuperuser",
                             username=username,
                             email=email,
                             interactive=False,
                             verbosity=options.get("verbosity"))
                admin = User.objects.get(username=username)
            admin.set_password(password)
            admin.save()

        # Now deploy the static files
        logging.info("Copying static media...")
        call_command("collectstatic", interactive=False, verbosity=0)
        call_command("collectstatic_js_reverse", interactive=False)

        # This is not possible in a distributed env
        if not settings.CENTRAL_SERVER:

            kalite_executable = 'kalite'
            if not spawn.find_executable('kalite'):
                if os.name == 'posix':
                    start_script_path = os.path.realpath(
                        os.path.join(settings.PROJECT_PATH, "..", "bin",
                                     kalite_executable))
                else:
                    start_script_path = os.path.realpath(
                        os.path.join(settings.PROJECT_PATH, "..", "bin",
                                     "windows", "kalite.bat"))
            else:
                start_script_path = kalite_executable

            # Run videoscan, on the distributed server.
            print("Scanning for video files in the content directory (%s)" %
                  settings.CONTENT_ROOT)
            call_command("videoscan")

            # done; notify the user.
            print(
                "\nCONGRATULATIONS! You've finished setting up the KA Lite server software."
            )
            print(
                "You can now start KA Lite with the following command:\n\n\t%s start\n\n"
                % start_script_path)

            if options['interactive']:
                if raw_input_yn("Do you wish to start the server now?"):
                    print("Running {0} start".format(start_script_path))
                    p = subprocess.Popen([start_script_path, "start"],
                                         env=os.environ)
                    p.wait()
    def handle(self, **options):
        force_exec = options.get('force_exec')
        source_address = options.get('source_address')
        target_address = options.get('target_address')

        if not source_address or len(source_address) == 0:
            raise CommandError(
                "Source Address '--source-address' is mandatory")

        if not target_address or len(target_address) == 0:
            raise CommandError(
                "Target Address '--target-address' is mandatory")

        print(f"This will change all Maps, Layers, \
Styles and Links Base URLs from [{source_address}] to [{target_address}].")
        print(
            "The operation may take some time, depending on the amount of Layer on GeoNode."
        )
        message = 'You want to proceed?'

        if force_exec or helpers.confirm(prompt=message, resp=False):
            try:
                _cnt = Map.objects.filter(
                    thumbnail_url__icontains=source_address).update(
                        thumbnail_url=Func(F('thumbnail_url'),
                                           Value(source_address),
                                           Value(target_address),
                                           function='replace'))
                logger.info(f"Updated {_cnt} Maps")

                _cnt = MapLayer.objects.filter(
                    ows_url__icontains=source_address).update(
                        ows_url=Func(F('ows_url'),
                                     Value(source_address),
                                     Value(target_address),
                                     function='replace'))
                MapLayer.objects.filter(
                    layer_params__icontains=source_address).update(
                        layer_params=Func(F('layer_params'),
                                          Value(source_address),
                                          Value(target_address),
                                          function='replace'))
                logger.info(f"Updated {_cnt} MapLayers")

                _cnt = Layer.objects.filter(
                    thumbnail_url__icontains=source_address).update(
                        thumbnail_url=Func(F('thumbnail_url'),
                                           Value(source_address),
                                           Value(target_address),
                                           function='replace'))
                logger.info(f"Updated {_cnt} Layers")

                _cnt = Style.objects.filter(
                    sld_url__icontains=source_address).update(
                        sld_url=Func(F('sld_url'),
                                     Value(source_address),
                                     Value(target_address),
                                     function='replace'))
                logger.info(f"Updated {_cnt} Styles")

                _cnt = Link.objects.filter(
                    url__icontains=source_address).update(
                        url=Func(F('url'),
                                 Value(source_address),
                                 Value(target_address),
                                 function='replace'))
                logger.info(f"Updated {_cnt} Links")

                _cnt = ResourceBase.objects.filter(
                    thumbnail_url__icontains=source_address).update(
                        thumbnail_url=Func(F('thumbnail_url'),
                                           Value(source_address),
                                           Value(target_address),
                                           function='replace'))
                _cnt += ResourceBase.objects.filter(
                    csw_anytext__icontains=source_address).update(
                        csw_anytext=Func(F('csw_anytext'),
                                         Value(source_address),
                                         Value(target_address),
                                         function='replace'))
                _cnt += ResourceBase.objects.filter(
                    metadata_xml__icontains=source_address).update(
                        metadata_xml=Func(F('metadata_xml'),
                                          Value(source_address),
                                          Value(target_address),
                                          function='replace'))
                logger.info(f"Updated {_cnt} ResourceBases")

                site = Site.objects.get_current()
                if site:
                    site.name = site.name.replace(source_address,
                                                  target_address)
                    site.domain = site.domain.replace(source_address,
                                                      target_address)
                    site.save()
                    print("Updated 1 Site")

                if check_ogc_backend(geoserver.BACKEND_PACKAGE):
                    if Application.objects.filter(name='GeoServer').exists():
                        _cnt = Application.objects.filter(
                            name='GeoServer').update(
                                redirect_uris=Func(F('redirect_uris'),
                                                   Value(source_address),
                                                   Value(target_address),
                                                   function='replace'))
                        logger.info(f"Updated {_cnt} OAUth2 Redirect URIs")

            finally:
                print("...done!")
Example #24
0
    def handle(self, **options):
        # Pick up the options
        database = options["database"]
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" % database)
        if options["user"]:
            try:
                user = User.objects.all().using(database).get(username=options["user"])
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            user = None

        now = datetime.now()
        task = None
        try:
            # Initialize the task
            setattr(_thread_locals, "database", database)
            if "task" in options and options["task"]:
                try:
                    task = Task.objects.all().using(database).get(pk=options["task"])
                except Exception:
                    raise CommandError("Task identifier not found")
                if (
                    task.started
                    or task.finished
                    or task.status != "Waiting"
                    or task.name not in ("frepple_backup", "backup")
                ):
                    raise CommandError("Invalid task identifier")
                task.status = "0%"
                task.started = now
            else:
                task = Task(
                    name="backup", submitted=now, started=now, status="0%", user=user
                )

            # Choose the backup file name
            backupfile = now.strftime("database.%s.%%Y%%m%%d.%%H%%M%%S.dump" % database)
            task.message = "Backup to file %s" % backupfile

            # Run the backup command
            # Commenting the next line is a little more secure, but requires you to
            # create a .pgpass file.
            os.environ["PGPASSWORD"] = settings.DATABASES[database]["PASSWORD"]
            args = [
                "pg_dump",
                "-Fc",
                "-w",
                "--username=%s" % settings.DATABASES[database]["USER"],
                "--file=%s"
                % os.path.abspath(os.path.join(settings.FREPPLE_LOGDIR, backupfile)),
            ]
            if settings.DATABASES[database]["HOST"]:
                args.append("--host=%s" % settings.DATABASES[database]["HOST"])
            if settings.DATABASES[database]["PORT"]:
                args.append("--port=%s" % settings.DATABASES[database]["PORT"])
            args.append(settings.DATABASES[database]["NAME"])
            with subprocess.Popen(args) as p:
                try:
                    task.processid = p.pid
                    task.save(using=database)
                    p.wait()
                except Exception:
                    p.kill()
                    p.wait()
                    raise Exception("Run of run pg_dump failed")

            # Task update
            task.processid = None
            task.status = "99%"
            task.save(using=database)

            # Delete backups older than a month
            pattern = re.compile("database.*.*.*.dump")
            for f in os.listdir(settings.FREPPLE_LOGDIR):
                if os.path.isfile(os.path.join(settings.FREPPLE_LOGDIR, f)):
                    # Note this is NOT 100% correct on UNIX. st_ctime is not alawys the creation date...
                    created = datetime.fromtimestamp(
                        os.stat(os.path.join(settings.FREPPLE_LOGDIR, f)).st_ctime
                    )
                    if pattern.match(f) and (now - created).days > 31:
                        try:
                            os.remove(os.path.join(settings.FREPPLE_LOGDIR, f))
                        except Exception:
                            pass

            # Task update
            task.status = "Done"
            task.finished = datetime.now()
            task.processid = None

        except Exception as e:
            if task:
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
                task.processid = None
            raise e

        finally:
            if task:
                task.save(using=database)
            setattr(_thread_locals, "database", None)
Example #25
0
    def handle(self, **options):
        # Pick up the options
        self.database = options['database']
        if self.database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               self.database)
        if options['user']:
            try:
                self.user = User.objects.all().using(
                    self.database).get(username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            try:
                self.user = User.objects.all().using(
                    self.database).filter(is_superuser=True)[0]
            except:
                self.user = None

        now = datetime.now()

        task = None
        self.logfile = None
        errors = 0
        try:
            # Initialize the task
            if options['task']:
                try:
                    task = Task.objects.all().using(
                        self.database).get(pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name != 'import from folder':
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
            else:
                task = Task(name='import from folder',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=self.user)
            task.save(using=self.database)

            # Choose the right self.delimiter and language
            self.delimiter = get_format('DECIMAL_SEPARATOR',
                                        settings.LANGUAGE_CODE,
                                        True) == ',' and ';' or ','
            translation.activate(settings.LANGUAGE_CODE)

            # Execute
            if os.path.isdir(
                    settings.DATABASES[self.database]['FILEUPLOADFOLDER']):

                # Open the logfile
                self.logfile = open(
                    os.path.join(
                        settings.DATABASES[self.database]['FILEUPLOADFOLDER'],
                        'importfromfolder.log'), "a")
                print("%s Started import from folder\n" % datetime.now(),
                      file=self.logfile,
                      flush=True)

                all_models = [(ct.model_class(), ct.pk)
                              for ct in ContentType.objects.all()
                              if ct.model_class()]
                models = []
                for ifile in os.listdir(
                        settings.DATABASES[self.database]['FILEUPLOADFOLDER']):
                    if not ifile.lower().endswith(
                            '.csv') and not ifile.lower().endswith('.csv.gz'):
                        continue
                    filename0 = ifile.split('.')[0]

                    model = None
                    contenttype_id = None
                    for m, ct in all_models:
                        if filename0.lower() in (
                                m._meta.model_name.lower(),
                                m._meta.verbose_name.lower(),
                                m._meta.verbose_name_plural.lower()):
                            model = m
                            contenttype_id = ct
                            print("%s Matched a model to file: %s" %
                                  (datetime.now(), ifile),
                                  file=self.logfile,
                                  flush=True)
                            break

                    if not model or model in EXCLUDE_FROM_BULK_OPERATIONS:
                        print("%s Ignoring data in file: %s" %
                              (datetime.now(), ifile),
                              file=self.logfile,
                              flush=True)
                    elif self.user and not self.user.has_perm(
                            '%s.%s' %
                        (model._meta.app_label,
                         get_permission_codename('add', model._meta))):
                        # Check permissions
                        print("%s You don't have permissions to add: %s" %
                              (datetime.now(), ifile),
                              file=self.logfile,
                              flush=True)
                    else:
                        deps = set([model])
                        GridReport.dependent_models(model, deps)

                        models.append((ifile, model, contenttype_id, deps))

                # Sort the list of models, based on dependencies between models
                models = GridReport.sort_models(models)

                i = 0
                cnt = len(models)
                for ifile, model, contenttype_id, dependencies in models:
                    task.status = str(int(10 + i / cnt * 80)) + '%'
                    task.message = 'Processing data file %s' % ifile
                    task.save(using=self.database)
                    i += 1
                    filetoparse = os.path.join(
                        os.path.abspath(settings.DATABASES[self.database]
                                        ['FILEUPLOADFOLDER']), ifile)
                    print("%s Started processing data in file: %s" %
                          (datetime.now(), ifile),
                          file=self.logfile,
                          flush=True)
                    errors += self.parseCSVloadfromfolder(model, filetoparse)

            else:
                errors += 1
                cnt = 0
                print("%s Failed, folder does not exist" % datetime.now(),
                      file=self.logfile,
                      flush=True)

            # Task update
            if errors:
                task.status = 'Failed'
                if not cnt:
                    task.message = "Destination folder does not exist"
                else:
                    task.message = "Uploaded %s data files with %s errors" % (
                        cnt, errors)
            else:
                task.status = 'Done'
                task.message = "Uploaded %s data files" % cnt
            task.finished = datetime.now()

        except KeyboardInterrupt:
            if task:
                task.status = 'Cancelled'
                task.message = 'Cancelled'
            if self.logfile:
                print('%s Cancelled\n' % datetime.now(),
                      file=self.logfile,
                      flush=True)

        except Exception as e:
            print("%s Failed" % datetime.now(), file=self.logfile, flush=True)
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
            raise e

        finally:
            if task:
                if not errors:
                    task.status = '100%'
                else:
                    task.status = 'Failed'
            task.finished = datetime.now()
            task.save(using=self.database)
            if self.logfile:
                print('%s End of import from folder\n' % datetime.now(),
                      file=self.logfile,
                      flush=True)
                self.logfile.close()
Example #26
0
    def handle(self, *labels, **options):
        verbosity = int(options.get('verbosity'))

        # Django 1.4 compatibility fix
        stdout = options.get('stdout', None)
        stdout = stdout if stdout else sys.stdout

        stderr = options.get('stderr', None)
        stderr = stderr if stderr else sys.stderr

        if not labels:
            print(self.print_help('thumbnail', ''), file=stderr)
            sys.exit(1)

        if len(labels) != 1:
            raise CommandError('`%s` is not a valid argument' % labels)

        label = labels[0]

        if label not in [
                'cleanup', 'clear', 'clear_delete_referenced',
                'clear_delete_all'
        ]:
            raise CommandError('`%s` unknown action' % label)

        if label == 'cleanup':
            if verbosity >= 1:
                print("Cleanup thumbnails", end=' ... ', file=stdout)

            default.kvstore.cleanup()

            if verbosity >= 1:
                print("[Done]", file=stdout)

            return

        if label == 'clear_delete_referenced':
            if verbosity >= 1:
                print("Delete all thumbnail files referenced in " +
                      "Key Value Store",
                      end=' ... ',
                      file=stdout)

            default.kvstore.delete_all_thumbnail_files()

            if verbosity >= 1:
                print('[Done]', file=stdout)

        if verbosity >= 1:
            print("Clear the Key Value Store", end=' ... ', file=stdout)

        default.kvstore.clear()

        if verbosity >= 1:
            print('[Done]', file=stdout)

        if label == 'clear_delete_all':
            if verbosity >= 1:
                print("Delete all thumbnail files in THUMBNAIL_PREFIX",
                      end=' ... ',
                      file=stdout)

            delete_all_thumbnails()

            if verbosity >= 1:
                print('[Done]', file=stdout)
Example #27
0
    def handle(self, app_or_project, name, target=None, **options):
        self.app_or_project = app_or_project
        self.a_or_an = 'an' if app_or_project == 'config' else 'a'
        self.paths_to_remove = []
        self.verbosity = options['verbosity']

        self.validate_name(name)

        # if some directory is given, make sure it's nicely expanded
        if target is None:
            top_dir = os.path.join(os.getcwd(), name)
            try:
                os.makedirs(top_dir)
            except FileExistsError:
                raise CommandError("'%s' already exists" % top_dir)
            except OSError as e:
                raise CommandError(e)
        else:
            if app_or_project == 'config':
                self.validate_name(os.path.basename(target), 'directory')
            top_dir = os.path.abspath(os.path.expanduser(target))
            if not os.path.exists(top_dir):
                raise CommandError("Destination directory '%s' does not "
                                   "exist, please create it first." % top_dir)

        extensions = tuple(handle_extensions(options['extensions']))
        extra_files = []
        for file in options['files']:
            extra_files.extend(map(lambda x: x.strip(), file.split(',')))
        if self.verbosity >= 2:
            self.stdout.write(
                'Rendering %s template files with extensions: %s' %
                (app_or_project, ', '.join(extensions)))
            self.stdout.write(
                'Rendering %s template files with filenames: %s' %
                (app_or_project, ', '.join(extra_files)))
        base_name = '%s_name' % app_or_project
        base_subdir = '%s_template' % app_or_project
        base_directory = '%s_directory' % app_or_project
        camel_case_name = 'camel_case_%s_name' % app_or_project
        camel_case_value = ''.join(x for x in name.title() if x != '_')

        context = Context(
            {
                **options,
                base_name: name,
                base_directory: top_dir,
                camel_case_name: camel_case_value,
                'docs_version': get_docs_version(),
                'django_version': django.__version__,
            },
            autoescape=False)

        # Setup a stub settings environment for template rendering
        if not settings.configured:
            settings.configure()
            django.setup()

        template_dir = self.handle_template(options['template'], base_subdir)
        prefix_length = len(template_dir) + 1

        for root, dirs, files in os.walk(template_dir):

            path_rest = root[prefix_length:]
            relative_dir = path_rest.replace(base_name, name)
            if relative_dir:
                target_dir = os.path.join(top_dir, relative_dir)
                os.makedirs(target_dir, exist_ok=True)

            for dirname in dirs[:]:
                if dirname.startswith('.') or dirname == '__pycache__':
                    dirs.remove(dirname)

            for filename in files:
                if filename.endswith(('.pyo', '.pyc', '.py.class')):
                    # Ignore some files as they cause various breakages.
                    continue
                old_path = os.path.join(root, filename)
                new_path = os.path.join(top_dir, relative_dir,
                                        filename.replace(base_name, name))
                for old_suffix, new_suffix in self.rewrite_template_suffixes:
                    if new_path.endswith(old_suffix):
                        new_path = new_path[:-len(old_suffix)] + new_suffix
                        break  # Only rewrite once

                if os.path.exists(new_path):
                    raise CommandError(
                        "%s already exists. Overlaying %s %s into an existing "
                        "directory won't replace conflicting files." % (
                            new_path,
                            self.a_or_an,
                            app_or_project,
                        ))

                # Only render the Python files, as we don't want to
                # accidentally render Django templates files
                if new_path.endswith(extensions) or filename in extra_files:
                    with open(old_path, encoding='utf-8') as template_file:
                        content = template_file.read()
                    template = Engine().from_string(content)
                    content = template.render(context)
                    with open(new_path, 'w', encoding='utf-8') as new_file:
                        new_file.write(content)
                else:
                    shutil.copyfile(old_path, new_path)

                if self.verbosity >= 2:
                    self.stdout.write('Creating %s' % new_path)
                try:
                    shutil.copymode(old_path, new_path)
                    self.make_writeable(new_path)
                except OSError:
                    self.stderr.write(
                        "Notice: Couldn't set permission bits on %s. You're "
                        "probably using an uncommon filesystem setup. No "
                        "problem." % new_path, self.style.NOTICE)

        if self.paths_to_remove:
            if self.verbosity >= 2:
                self.stdout.write('Cleaning up temporary files.')
            for path_to_remove in self.paths_to_remove:
                if os.path.isfile(path_to_remove):
                    os.remove(path_to_remove)
                else:
                    shutil.rmtree(path_to_remove)
Example #28
0
    def parse_options(self, repo, options):
        """Parse parameters"""
        self.filemask = options['filemask']
        self.vcs = options['vcs']
        if options['push_url_same']:
            self.push_url = repo
        else:
            self.push_url = options['push_url']
        self.file_format = options['file_format']
        self.language_regex = options['language_regex']
        self.main_component = options['main_component']
        self.name_template = options['name_template']
        if '%s' in self.name_template:
            self.name_template = self.name_template.replace(
                '%s', '{{ component }}'
            )
        self.license = options['license']
        self.license_url = options['license_url']
        self.push_on_commit = options['push_on_commit']
        self.base_file_template = options['base_file_template']
        self.new_base_template = options['new_base_template']
        if '%s' in self.base_file_template:
            self.base_file_template = self.base_file_template.replace(
                '%s', '{{ component }}'
            )

        # Is file format supported?
        if self.file_format not in FILE_FORMATS:
            raise CommandError(
                'Invalid file format: {0}'.format(options['file_format'])
            )

        # Is vcs supported?
        if self.vcs not in VCS_REGISTRY:
            raise CommandError(
                'Invalid vcs: {0}'.format(options['vcs'])
            )

        # Do we have correct mask?
        # - if there is **, then it's simple mask (it's invalid in regexp)
        # - validate regexp otherwise
        if '**' in self.filemask and '*' in self.filemask.replace('**', ''):
            match = re.escape(self.filemask)
            match = match.replace(r'\*\*', '(?P<component>[[WILDCARD]])', 1)
            match = match.replace(r'\*\*', '(?P=component)')
            match = match.replace(r'\*', '(?P<language>[[WILDCARD]])', 1)
            match = match.replace(r'\*', '(?P=language)')
            match = match.replace('[[WILDCARD]]', '[^/]*')
            self.filemask = match
        else:
            try:
                compiled = re.compile(self.filemask)
            except re.error as error:
                raise CommandError(
                    'Failed to compile regular expression "{0}": {1}'.format(
                        self.filemask, error
                    )
                )
            if ('component' not in compiled.groupindex or
                    'language' not in compiled.groupindex):
                raise CommandError(
                    'Component regular expression lacks named group '
                    '"component" and/or "language"'
                )
Example #29
0
    def handle(self, *args, **options):
        if (len(args) < 1):
            raise CommandError('At least one "package_name" is required!')

        objs = []
        argsIter = iter(args)
        if options['all']:
            argsIter = ['all']
        for package in argsIter:
            if options['all']:
                wids = AbstractWidget.objects.all()
            else:
                wids = AbstractWidget.objects.filter(package=package)
            inps = AbstractInput.objects.filter(
                widget__in=[x.id for x in wids])
            outs = AbstractOutput.objects.filter(
                widget__in=[x.id for x in wids])
            opts = AbstractOption.objects.filter(
                abstract_input__in=[x.id for x in inps])
            cats = Category.objects.filter(
                id__in=[x.category.id for x in wids])

            #retrieve all parents
            catNum = len(cats)
            while True:
                cats = cats | Category.objects.filter(
                    id__in=[x.parent.id for x in cats if x.parent != None])
                if catNum == len(cats):
                    break
                else:
                    catNum = len(cats)

            objs.extend(cats)
            objs.extend(wids)
            objs.extend(outs)
            objs.extend(inps)
            objs.extend(opts)

            if len(wids) > 0:
                self.stdout.write('Package "%s" contains:\n' % package)
                self.stdout.write('    % 4i AbstractWidget(s)\n' % len(wids))
                self.stdout.write('    % 4i AbstractInput(s)\n' % len(inps))
                self.stdout.write('    % 4i AbstractOutput(s)\n' % len(outs))
                self.stdout.write('    % 4i AbstractOption(s)\n' % len(opts))
                self.stdout.write('    % 4i Category(s)\n' % len(cats))
            else:
                self.stdout.write('Package "%s" was not found!\n' % package)

        #be careful uid is only changed on these instances and is not written to the database
        for a in objs:
            a.uid = str(uuid.uuid4())
            if options['renew']:
                a.save()

        self.stdout.write('UID renew procedure successfully finished. ')
        if options['renew']:
            self.stdout.write('Database was modified!\n')
        else:
            self.stdout.write(
                'Database was NOT modified! See help of the command, especially the option YES_SAVE_TO_DB.\n'
            )
    def handle(self, *args, **options):
        try:
            contest = Contest.objects.get(id=options['contest_id'])
        except Contest.DoesNotExist:
            raise CommandError(_("Contest %s does not exist") % options['contest_id'])

        rcontroller = contest.controller.registration_controller()
        print(rcontroller)
        if not issubclass(
            getattr(rcontroller, 'participant_admin', None),
            OnsiteRegistrationParticipantAdmin,
        ):
            raise CommandError(_("Wrong type of contest"))

        arg = options['filename_or_url']

        if arg.startswith('http://') or arg.startswith('https://'):
            self.stdout.write(_("Fetching %s...\n") % (arg,))
            stream = six.moves.urllib.request.urlopen(arg)
        else:
            if not os.path.exists(arg):
                raise CommandError(_("File not found: ") + arg)
            stream = open(arg, 'r')

        reader = csv.reader(stream)
        header = next(reader)
        if header != self.COLUMNS:
            raise CommandError(
                _(
                    "Missing header or invalid columns: "
                    "%(header)s\nExpected: %(expected)s"
                )
                % {'header': ', '.join(header), 'expected': ', '.join(self.COLUMNS)}
            )

        with transaction.atomic():
            ok = True
            all_count = 0
            for row in reader:
                all_count += 1

                for i, _column in enumerate(self.COLUMNS):
                    if not isinstance(row[i], six.text_type):
                        row[i] = row[i].decode('utf8')

                try:
                    user = User.objects.get(username=row[1])
                    region = Region.objects.get(short_name=row[2], contest=contest)

                    participant, created = Participant.objects.get_or_create(
                        contest=contest, user=user
                    )

                    reg = OnsiteRegistration(
                        participant=participant,
                        number=row[0],
                        local_number=row[3],
                        region=region,
                    )

                    reg.full_clean()
                    reg.save()
                except User.DoesNotExist:
                    self.stdout.write(
                        _("Error for user=%(user)s: user does" " not exist\n")
                        % {'user': row[1]}
                    )
                    ok = False
                except Region.DoesNotExist:
                    self.stdout.write(
                        _(
                            "Error for user=%(user)s: region %(region)s does"
                            " not exist\n"
                        )
                        % {'user': row[1], 'region': row[2]}
                    )
                    ok = False
                except DatabaseError as e:
                    # This assumes that we'll get the message in this
                    # encoding. It is not perfect, but much better than
                    # ascii.
                    message = e.message.decode('utf-8')
                    self.stdout.write(
                        _("DB Error for user=%(user)s: %(message)s\n")
                        % {'user': row[1], 'message': message}
                    )
                    ok = False
                except ValidationError as e:
                    for k, v in six.iteritems(e.message_dict):
                        for message in v:
                            if k == '__all__':
                                self.stdout.write(
                                    _("Error for user=%(user)s: %(message)s\n")
                                    % {'user': row[1], 'message': message}
                                )
                            else:
                                self.stdout.write(
                                    _(
                                        "Error for user=%(user)s, "
                                        "field %(field)s: %(message)s\n"
                                    )
                                    % {'user': row[1], 'field': k, 'message': message}
                                )
                    ok = False

            if ok:
                self.stdout.write(_("Processed %d entries") % (all_count))
            else:
                raise CommandError(
                    _("There were some errors. Database not " "changed.\n")
                )
Example #31
0
    def import_initial(self, project, repo, branch):
        """Import the first repository of a project"""
        # Checkout git to temporary dir
        workdir = self.checkout_tmp(project, repo, branch)
        # Create fake discovery without existing component
        discovery = self.get_discovery(None, workdir)

        components = project.component_set.all()

        component = None

        # Create first component (this one will get full git repo)
        if self.main_component:
            match = None
            for match in discovery.matched_components.values():
                if match['slug'] == self.main_component:
                    break
            if match is None or match['slug'] != self.main_component:
                raise CommandError(
                    'Specified --main-component was not found in matches!'
                )
        else:
            # Try if one is already there
            for match in discovery.matched_components.values():
                try:
                    component = components.get(
                        repo=repo, filemask=match['mask']
                    )
                except Component.DoesNotExist:
                    continue
            # Pick random
            if component is None:
                match = list(discovery.matched_components.values())[0]

        try:
            if component is None:
                component = components.get(slug=match['slug'])
            self.logger.warning(
                'Component %s already exists, skipping and using it '
                'as a main component',
                match['slug']
            )
            shutil.rmtree(workdir)
        except Component.DoesNotExist:
            self.logger.info(
                'Creating component %s as main one', match['slug']
            )

            # Rename gitrepository to new name
            os.rename(workdir, os.path.join(project.full_path, match['slug']))

            # Create new component
            component = discovery.create_component(
                None,
                match,
                project=project,
                repo=repo,
                branch=branch,
                vcs=self.vcs,
                push_on_commit=self.push_on_commit,
                license=self.license,
                license_url=self.license_url,
            )

        return component
Example #32
0
def call_command(command_name, *args, **options):
    """
    Call the given command, with the given options and args/kwargs.

    This is the primary API you should use for calling specific commands.

    `command_name` may be a string or a command object. Using a string is
    preferred unless the command object is required for further processing or
    testing.

    Some examples:
        call_command('migrate')
        call_command('shell', plain=True)
        call_command('sqlmigrate', 'myapp')

        from django.core.management.commands import flush
        cmd = flush.Command()
        call_command(cmd, verbosity=0, interactive=False)
        # Do something with cmd ...
    """
    if isinstance(command_name, BaseCommand):
        # Command object passed in.
        command = command_name
        command_name = command.__class__.__module__.split('.')[-1]
    else:
        # Load the command object by name.
        try:
            app_name = get_commands()[command_name]
        except KeyError:
            raise CommandError("Unknown command: %r" % command_name)

        if isinstance(app_name, BaseCommand):
            # If the command is already loaded, use it directly.
            command = app_name
        else:
            command = load_command_class(app_name, command_name)

    # Simulate argument parsing to get the option defaults (see #10080 for details).
    parser = command.create_parser('', command_name)
    # Use the `dest` option name from the parser option
    opt_mapping = {
        min(s_opt.option_strings).lstrip('-').replace('-', '_'): s_opt.dest
        for s_opt in parser._actions if s_opt.option_strings
    }
    arg_options = {
        opt_mapping.get(key, key): value
        for key, value in options.items()
    }
    parse_args = [str(a) for a in args]
    # Any required arguments which are passed in via **options must be passed
    # to parse_args().
    parse_args += [
        '{}={}'.format(min(opt.option_strings), arg_options[opt.dest])
        for opt in parser._actions if opt.required and opt.dest in options
    ]
    defaults = parser.parse_args(args=parse_args)
    defaults = dict(defaults._get_kwargs(), **arg_options)
    # Raise an error if any unknown options were passed.
    stealth_options = set(command.base_stealth_options +
                          command.stealth_options)
    dest_parameters = {action.dest for action in parser._actions}
    valid_options = (dest_parameters | stealth_options).union(opt_mapping)
    unknown_options = set(options) - valid_options
    if unknown_options:
        raise TypeError("Unknown option(s) for %s command: %s. "
                        "Valid options are: %s." % (
                            command_name,
                            ', '.join(sorted(unknown_options)),
                            ', '.join(sorted(valid_options)),
                        ))
    # Move positional args out of options to mimic legacy optparse
    args = defaults.pop('args', ())
    if 'skip_checks' not in options:
        defaults['skip_checks'] = True

    return command.execute(*args, **defaults)
Example #33
0
    def handle(self, *args, **options):

        errors = []

        module_store = modulestore()

        print("Starting Swap from Auto Track Cohort Pilot command")

        verified_track_cohorts_setting = self._latest_settings()

        if not verified_track_cohorts_setting:
            raise CommandError("No MigrateVerifiedTrackCohortsSetting found")

        if not verified_track_cohorts_setting.enabled:
            raise CommandError(
                "No enabled MigrateVerifiedTrackCohortsSetting found")

        old_course_key = verified_track_cohorts_setting.old_course_key
        rerun_course_key = verified_track_cohorts_setting.rerun_course_key
        audit_cohort_names = verified_track_cohorts_setting.get_audit_cohort_names(
        )

        # Verify that the MigrateVerifiedTrackCohortsSetting has all required fields
        if not old_course_key:
            raise CommandError(
                u"No old_course_key set for MigrateVerifiedTrackCohortsSetting with ID: '%s'"
                % verified_track_cohorts_setting.id)

        if not rerun_course_key:
            raise CommandError(
                u"No rerun_course_key set for MigrateVerifiedTrackCohortsSetting with ID: '%s'"
                % verified_track_cohorts_setting.id)

        if not audit_cohort_names:
            raise CommandError(
                u"No audit_cohort_names set for MigrateVerifiedTrackCohortsSetting with ID: '%s'"
                % verified_track_cohorts_setting.id)

        print(
            u"Running for MigrateVerifiedTrackCohortsSetting with old_course_key='%s' and rerun_course_key='%s'"
            % (verified_track_cohorts_setting.old_course_key,
               verified_track_cohorts_setting.rerun_course_key))

        # Get the CourseUserGroup IDs for the audit course names from the old course
        audit_course_user_group_ids = CourseUserGroup.objects.filter(
            name__in=audit_cohort_names,
            course_id=old_course_key,
            group_type=CourseUserGroup.COHORT,
        ).values_list('id', flat=True)

        if not audit_course_user_group_ids:
            raise CommandError(
                u"No Audit CourseUserGroup found for course_id='%s' with group_type='%s' for names='%s'"
                % (old_course_key, CourseUserGroup.COHORT, audit_cohort_names))

        # Get all of the audit CourseCohorts from the above IDs that are RANDOM
        random_audit_course_user_group_ids = CourseCohort.objects.filter(
            course_user_group_id__in=audit_course_user_group_ids,
            assignment_type=CourseCohort.RANDOM).values_list(
                'course_user_group_id', flat=True)

        if not random_audit_course_user_group_ids:
            raise CommandError(
                u"No Audit CourseCohorts found for course_user_group_ids='%s' with assignment_type='%s"
                % (audit_course_user_group_ids, CourseCohort.RANDOM))

        # Get the CourseUserGroupPartitionGroup for the above IDs, these contain the partition IDs and group IDs
        # that are set for group_access inside of modulestore
        random_audit_course_user_group_partition_groups = list(
            CourseUserGroupPartitionGroup.objects.filter(
                course_user_group_id__in=random_audit_course_user_group_ids))

        if not random_audit_course_user_group_partition_groups:
            raise CommandError(
                u"No Audit CourseUserGroupPartitionGroup found for course_user_group_ids='%s'"
                % random_audit_course_user_group_ids)

        # Get the single VerifiedTrackCohortedCourse for the old course
        try:
            verified_track_cohorted_course = VerifiedTrackCohortedCourse.objects.get(
                course_key=old_course_key)
        except VerifiedTrackCohortedCourse.DoesNotExist:
            raise CommandError(
                u"No VerifiedTrackCohortedCourse found for course: '%s'" %
                old_course_key)

        if not verified_track_cohorted_course.enabled:
            raise CommandError(
                u"VerifiedTrackCohortedCourse not enabled for course: '%s'" %
                old_course_key)

        # Get the single CourseUserGroupPartitionGroup for the verified_track
        # based on the verified_track name for the old course
        try:
            verified_course_user_group = CourseUserGroup.objects.get(
                course_id=old_course_key,
                group_type=CourseUserGroup.COHORT,
                name=verified_track_cohorted_course.verified_cohort_name)
        except CourseUserGroup.DoesNotExist:
            raise CommandError(
                u"No Verified CourseUserGroup found for course_id='%s' with group_type='%s' for names='%s'"
                % (old_course_key, CourseUserGroup.COHORT,
                   verified_track_cohorted_course.verified_cohort_name))

        try:
            verified_course_user_group_partition_group = CourseUserGroupPartitionGroup.objects.get(
                course_user_group_id=verified_course_user_group.id)
        except CourseUserGroupPartitionGroup.DoesNotExist:
            raise CommandError(
                u"No Verified CourseUserGroupPartitionGroup found for course_user_group_ids='%s'"
                % random_audit_course_user_group_ids)

        # Verify the enrollment track CourseModes exist for the new course
        try:
            CourseMode.objects.get(course_id=rerun_course_key,
                                   mode_slug=CourseMode.AUDIT)
        except CourseMode.DoesNotExist:
            raise CommandError(
                u"Audit CourseMode is not defined for course: '%s'" %
                rerun_course_key)

        try:
            CourseMode.objects.get(course_id=rerun_course_key,
                                   mode_slug=CourseMode.VERIFIED)
        except CourseMode.DoesNotExist:
            raise CommandError(
                u"Verified CourseMode is not defined for course: '%s'" %
                rerun_course_key)

        items = module_store.get_items(rerun_course_key)
        if not items:
            raise CommandError(u"Items for Course with key '%s' not found." %
                               rerun_course_key)

        items_to_update = []

        all_cohorted_track_group_ids = set()
        for audit_course_user_group_partition_group in random_audit_course_user_group_partition_groups:
            all_cohorted_track_group_ids.add(
                audit_course_user_group_partition_group.group_id)
            all_cohorted_track_group_ids.add(
                verified_course_user_group_partition_group.group_id)

        for item in items:
            # Verify that there exists group access for this xblock, otherwise skip these checks
            if item.group_access:
                set_audit_enrollment_track = False
                set_verified_enrollment_track = False

                # Check the partition and group IDs for the audit course groups, if they exist in
                # the xblock's access settings then set the audit track flag to true
                for audit_course_user_group_partition_group in random_audit_course_user_group_partition_groups:
                    audit_partition_group_access = item.group_access.get(
                        audit_course_user_group_partition_group.partition_id,
                        None)
                    if (audit_partition_group_access and
                            audit_course_user_group_partition_group.group_id
                            in audit_partition_group_access):
                        print(
                            u"Queueing XBlock at location: '%s' for Audit Content Group update "
                            % item.location)
                        set_audit_enrollment_track = True

                # Check the partition and group IDs for the verified course group, if it exists in
                # the xblock's access settings then set the verified track flag to true
                verified_partition_group_access = item.group_access.get(
                    verified_course_user_group_partition_group.partition_id,
                    None)
                if verified_partition_group_access:
                    non_verified_track_access_groups = (
                        set(verified_partition_group_access) -
                        all_cohorted_track_group_ids)
                    # If the item has group_access that is not the
                    # verified or audit group IDs then raise an error
                    # This only needs to be checked for this partition_group once
                    if non_verified_track_access_groups:
                        errors.append(
                            u"Non audit/verified cohorted content group set for xblock, location '%s' with IDs '%s'"
                            %
                            (item.location, non_verified_track_access_groups))
                    if verified_course_user_group_partition_group.group_id in verified_partition_group_access:
                        print(
                            u"Queueing XBlock at location: '%s' for Verified Content Group update "
                            % item.location)
                        set_verified_enrollment_track = True

                # Add the enrollment track ids to a group access array
                enrollment_track_group_access = []
                if set_audit_enrollment_track:
                    enrollment_track_group_access.append(
                        settings.COURSE_ENROLLMENT_MODES['audit']['id'])
                if set_verified_enrollment_track:
                    enrollment_track_group_access.append(
                        settings.COURSE_ENROLLMENT_MODES['verified']['id'])

                # If there are no errors, and either the audit track, or verified
                #  track needed an update, set the access, update and publish
                if set_verified_enrollment_track or set_audit_enrollment_track:
                    # Sets whether or not an xblock has changes
                    has_changes = module_store.has_changes(item)

                    # Check that the xblock does not have changes and add it to be updated, otherwise add an error
                    if not has_changes:
                        item.group_access = {
                            ENROLLMENT_TRACK_PARTITION_ID:
                            enrollment_track_group_access
                        }
                        items_to_update.append(item)
                    else:
                        errors.append(
                            u"XBlock '%s' with location '%s' needs access changes, but is a draft"
                            % (item.display_name, item.location))

        partitions_to_delete = random_audit_course_user_group_partition_groups
        partitions_to_delete.append(verified_course_user_group_partition_group)

        # If there are no errors iterate over and update all of the items that had the access changed
        if not errors:
            for item in items_to_update:
                module_store.update_item(item,
                                         ModuleStoreEnum.UserID.mgmt_command)
                module_store.publish(item.location,
                                     ModuleStoreEnum.UserID.mgmt_command)
                print(u"Updated and published XBlock at location: '%s'" %
                      item.location)

        # Check if we should delete any partition groups if there are no errors.
        # If there are errors, none of the xblock items will have been updated,
        # so this section will throw errors for each partition in use
        if partitions_to_delete and not errors:
            partition_service = PartitionService(rerun_course_key)
            course = partition_service.get_course()
            for partition_to_delete in partitions_to_delete:
                # Get the user partition, and the index of that partition in the course
                partition = partition_service.get_user_partition(
                    partition_to_delete.partition_id)
                if partition:
                    partition_index = course.user_partitions.index(partition)
                    group_id = int(partition_to_delete.group_id)

                    # Sanity check to verify that all of the groups being deleted are empty,
                    # since they should have been converted to use enrollment tracks instead.
                    # Taken from contentstore/views/course.py.remove_content_or_experiment_group
                    usages = GroupConfiguration.get_partitions_usage_info(
                        module_store, course)
                    used = group_id in usages[partition.id]
                    if used:
                        errors.append(
                            u"Content group '%s' is in use and cannot be deleted."
                            % partition_to_delete.group_id)

                    # If there are not errors, proceed to update the course and user_partitions
                    if not errors:
                        # Remove the groups that match the group ID of the partition to be deleted
                        # Else if there are no match groups left, remove the user partition
                        matching_groups = [
                            group for group in partition.groups
                            if group.id == group_id
                        ]
                        if matching_groups:
                            group_index = partition.groups.index(
                                matching_groups[0])
                            partition.groups.pop(group_index)
                            # Update the course user partition with the updated groups
                            if partition.groups:
                                course.user_partitions[
                                    partition_index] = partition
                            else:
                                course.user_partitions.pop(partition_index)
                        module_store.update_item(
                            course, ModuleStoreEnum.UserID.mgmt_command)

        # If there are any errors, join them together and raise the CommandError
        if errors:
            raise CommandError(
                (u"Error for MigrateVerifiedTrackCohortsSetting with ID='%s'\n"
                 % verified_track_cohorts_setting.id) + "\t\n".join(errors))

        print(u"Finished for MigrateVerifiedTrackCohortsSetting with ID='%s" %
              verified_track_cohorts_setting.id)
    def process_multipart(self, dmarcemail):
        """Extract multipart report"""
        report = FBReport()
        dmarc_reporter = None
        try:
            dmarc_source = dmarcemail.get_payload()
            dmarc_reporter = dmarcemail.get('from')
            report.reporter = FBReporter.objects.get(email=dmarc_reporter)
            mimepart = dmarcemail.get_payload()
        except ObjectDoesNotExist:
            try:
                report.reporter = FBReporter.objects.create(
                    org_name=dmarc_reporter,
                    email=dmarc_reporter,
                )
            except:
                msg = 'Failed to find or create reporter {}'.format(
                    dmarc_reporter)
                logger.error(msg)
                raise CommandError(msg)
        except:
            msg = 'Unable to get rfc822 report'
            logger.error(msg)
            tf = tempfile.mkstemp(prefix='dmarc-', suffix='.eml')
            tmpf = os.fdopen(tf[0], 'w')
            tmpf.write(dmarcemail.get_payload())
            tmpf.close()
            msg = 'Saved as: {}'.format(tf[1])
            logger.error(msg)
            raise CommandError(msg)
        fp = StringIO()
        g = Generator(fp, maxheaderlen=0)
        g.flatten(dmarcemail)
        report.feedback_source = fp.getvalue()
        g = None
        fp = None

        # Get the human readable part
        try:
            mimepart = dmarcemail.get_payload(0)
            if mimepart.get_content_type() == 'text/plain':
                # get the human-readable part of the message
                report.description = mimepart
        except:
            msg = 'Unable to get human readable part'
            logger.warning(msg)

        # Get the feedback report
        try:
            mimepart = dmarcemail.get_payload(1)
            if mimepart.get_content_type() == 'message/feedback-report':
                fp = StringIO()
                g = Generator(fp, maxheaderlen=0)
                g.flatten(mimepart)
                report.feedback_report = fp.getvalue()
                g = None
                fp = None
            else:
                msg = 'Found {} instead of message/feedback-report'.format(
                    mimepart.get_content_type())
                logger.error(msg)
        except:
            msg = 'Unable to get feedback-report part'
            logger.error(msg)

        if report.feedback_report:
            for line in report.feedback_report.splitlines():
                line = line.lstrip()
                (ls0, ls1, ls2) = line.partition(':')
                ls0 = ls0.strip()
                ls2 = ls2.strip()
                if ls1:
                    if not report.domain:
                        if ls0 == 'Reported-Domain':
                            report.domain = ls2
                    if not report.source_ip:
                        if ls0 == 'Source-IP':
                            report.source_ip = ls2
                    if not report.email_from:
                        if ls0 == 'Original-Mail-From':
                            report.email_from = ls2
                    if not report.date:
                        if ls0 == 'Arrival-Date':
                            try:
                                # get tuples
                                t = parsedate_tz(ls2)
                                # get timestamp
                                t = mktime_tz(t)
                                report.date = datetime.fromtimestamp(t)
                                tz_utc = pytz.timezone('UTC')
                                report.date = report.date.replace(
                                    tzinfo=tz_utc)
                            except:
                                msg = 'Unable to get date from: {}'.format(ls2)
                                logger.error(msg)
                                report.date = datetime.now()
                    if not report.dmarc_result:
                        if ls0 == 'Delivery-Result':
                            report.dmarc_result = ls2
                    if ls0 == 'Authentication-Results':
                        ar = ls2.split()
                        for r in ar:
                            (r0, r1, r2) = r.partition('=')
                            if r1:
                                if not report.dkim_alignment and r0 == 'dkim':
                                    report.dkim_alignment = r2.rstrip(';')
                                if not report.spf_alignment and r0 == 'spf':
                                    report.spf_alignment = r2.rstrip(';')

        # Get the rfc822 headers and any message
        fp = StringIO()
        g = Generator(fp, maxheaderlen=0)
        try:
            mimepart = dmarcemail.get_payload(2, False)
            mimepart_type = mimepart.get_content_type()
            g.flatten(mimepart)
            if mimepart_type == 'message/rfc822':
                report.email_source = fp.getvalue()
            elif mimepart_type == 'message/rfc822-headers':
                report.email_source = fp.getvalue()
            elif mimepart_type == 'text/rfc822':
                report.email_source = fp.getvalue()
            elif mimepart_type == 'text/rfc822-headers':
                report.email_source = fp.getvalue()
            else:
                msg = 'Found {} instead of rfc822'.format(mimepart_type)
                logger.debug(msg)
        except:
            msg = 'Unable to get rfc822 part'
            logger.warning(msg)
        g = None
        fp = None
        if report.email_source:
            for line in report.email_source.splitlines():
                line = line.lstrip()
                (ls0, ls1, ls2) = line.partition(':')
                ls0 = ls0.strip()
                ls2 = ls2.strip()
                if ls1:
                    if not report.email_subject:
                        if ls0 == 'Subject':
                            report.email_subject = ls2

        try:
            report.save()
        except:
            msg = 'Failed save from {}'.format(report.reporter)
            logger.error(msg)
            tf = tempfile.mkstemp(prefix='dmarc-', suffix='.eml')
            tmpf = os.fdopen(tf[0], 'w')
            tmpf.write(dmarcemail.get_payload())
            tmpf.close()
            msg = 'Saved as: {}'.format(tf[1])
            logger.error(msg)