def handle(self, *args, **options):
        log = Logger(path=__file__,
            force_verbose=options.get('verbose'),
            force_silent=options.get('silent')
        )

        log.log('Building blurbs... Please be patient as this can take some time.')

        for cat in list(settings.AUTO_USERS.keys()):
            for u in settings.AUTO_USERS[cat]:
                if u.get('blurb'):
                    text = u.get(
                        'blurb', {'text': None, 'workshop': None}).get('text')
                    workshop = u.get(
                        'blurb', {'text': None, 'workshop': None}).get('workshop')
                    if text and workshop:
                        SAVE_DIR = f'{settings.BUILD_DIR}_workshops/{workshop}'

                        if find_dir(workshop):
                            with open(f'{SAVE_DIR}/{DATA_FILE}', 'w+') as file:
                                file.write(yaml.dump({
                                    'workshop': workshop,
                                    'user': u.get('username'),
                                    'text': PARSER.fix_html(text)
                                }))

                                log.log(f'Saved blurb datafile: {SAVE_DIR}/{DATA_FILE}.')
                        else:
                            log.error(
                                f'No directory available for `{workshop}` ({SAVE_DIR}). Did you run `python manage.py build --repo {workshop}` before running this script?', kill=True)

        if log._save(data='buildblurbs', name='warnings.md', warnings=True) or log._save(data='buildblurbs', name='logs.md', warnings=False, logs=True) or log._save(data='buildblurbs', name='info.md', warnings=False, logs=False, info=True):
            log.log(f'Log files with any warnings and logging information is now available in: `{log.LOG_DIR}`', force=True)
Exemple #2
0
    def handle(self, *args, **options):
        log = Logger(path=__file__,
                     force_verbose=options.get('verbose'),
                     force_silent=options.get('silent'))

        if options.get('all'):
            options['name'] = [x[0] for x in settings.AUTO_REPOS]

        if not options.get('name'):
            log.error(
                'No workshop names provided. Use any of the following settings:\n    --name [repository name]\n    --all'
            )

        if not options.get('branch'):
            branch = 'v2.0'
        else:
            branch = options.get('branch')

        log.log(
            'Building workshop files... Please be patient as this can take some time.'
        )

        for workshop in options.get('name'):
            SAVE_DIR = f'{settings.BUILD_DIR}_workshops/{workshop}'
            DATA_FILE = f'{workshop}.yml'
            if not options.get('force'):
                check_for_cancel(SAVE_DIR, workshop, log=log)

            if not pathlib.Path(SAVE_DIR).exists():
                pathlib.Path(SAVE_DIR).mkdir(parents=True)

            # branch = 'v2.0'  # TODO: #467 fix this...
            loader = WorkshopCache(repository=workshop, branch=branch, log=log)
            data = loader.data
            del data['raw']
            data['sections'] = loader.sections
            data['parent_branch'] = loader.branch
            data['parent_repo'] = workshop
            data['parent_backend'] = 'Github'

            # Save all data
            with open(f'{SAVE_DIR}/{DATA_FILE}', 'w+') as file:
                file.write(yaml.dump(data))

                log.log(f'Saved workshop datafile: `{SAVE_DIR}/{DATA_FILE}`')

            if log._save(data=workshop, name='warnings.md',
                         warnings=True) or log._save(data=workshop,
                                                     name='logs.md',
                                                     warnings=False,
                                                     logs=True) or log._save(
                                                         data=workshop,
                                                         name='info.md',
                                                         warnings=False,
                                                         logs=False,
                                                         info=True):
                log.log(
                    'Log files with any warnings and logging information is now available in the'
                    + log.LOG_DIR,
                    force=True)
Exemple #3
0
    def handle(self, *args, **options):
        log = Logger(path=__file__,
                     force_verbose=options.get('verbose'),
                     force_silent=options.get('silent'))

        files = {
            x: y['data_file']
            for x, y in built_data.items() if os.path.exists(y['data_file'])
        }
        raw = get_settings(files)

        for cat, data in raw.items():
            model = built_data[cat]['model']
            if model == Workshop:
                for obj in data:
                    Workshop.objects.filter(name=obj['fields']['name']).update(
                        views=obj['fields']['views'])
                log.log(f'Loaded Workshop fragile data ({len(data)} objects).')
            elif model == Progress:
                for obj in data:
                    profile, created = Profile.objects.get_or_create(
                        user__first_name=obj['fields']['profile'][0],
                        user__last_name=obj['fields']['profile'][1])
                    workshop = Workshop.objects.get_by_natural_key(
                        obj['fields']['workshop'])
                    Progress.objects.update_or_create(
                        profile=profile,
                        workshop=workshop,
                        defaults={
                            'page': obj['fields']['page'],
                            'modified': obj['fields']['modified']
                        })
                log.log(f'Loaded Progress fragile data ({len(data)} objects).')
            elif model == Issue:
                for obj in data:
                    lesson = Lesson.objects.get_by_natural_key(
                        obj['fields']['lesson'])
                    user = User.objects.get(username=obj['fields']['user'][0])
                    issue, created = Issue.objects.get_or_create(
                        workshop=workshop,
                        lesson=lesson,
                        user=user,
                        website=obj['fields']['website'],
                        open=obj['fields']['website'],
                        comment=obj['fields']['comment'])
                log.log(f'Loaded Issue fragile data ({len(data)} objects).')
            else:
                log.error(
                    f'Could not process some of the fragile data. This likely means that you have created ways to save fragile data but not built a way to ingest the fragile data back into the database. Revisit the code for ingestfragile command (backend.management.commands.ingestfragile) and ensure all is well.'
                )

        # Delete all files with fragile data
        [file.unlink() for file in files.values()]

        log.log(
            f'Ingested all the fragile data back into the database. ({len(files)} files processed.)'
        )
    def handle(self, *args, **options):
        log = Logger(path=__file__,
                     force_verbose=options.get('verbose'),
                     force_silent=options.get('silent'))
        input = Input(path=__file__)

        test_for_required_files(REQUIRED_PATHS=REQUIRED_PATHS, log=log)
        data = get_yaml(FULL_PATH, log=log)

        for group_name, permission_set in data.items():
            group, created = Group.objects.get_or_create(name=group_name)

            if not created and not options.get('force'):
                choice = input.ask(
                    f'Group `{group_name}` already exists. Update with new information? [y/N]'
                )
                if choice.lower() != 'y':
                    continue

            for codename in permission_set:
                try:
                    # Find permission object and add to group
                    perm = Permission.objects.get(codename=codename)
                    group.permissions.add(perm)
                    log.log(f'Adding {codename} to group {group.__str__()}.')
                except Permission.DoesNotExist:
                    log.error(f'{codename} not found.')

        if log._save(data='ingestgroups', name='warnings.md',
                     warnings=True) or log._save(data='ingestgroups',
                                                 name='logs.md',
                                                 warnings=False,
                                                 logs=True) or log._save(
                                                     data='ingestgroups',
                                                     name='info.md',
                                                     warnings=False,
                                                     logs=False,
                                                     info=True):
            log.log(
                f'Log files with any warnings and logging information is now available in: `{log.LOG_DIR}`',
                force=True)
    def handle(self, *args, **options):
        log = Logger(path=__file__,
            force_verbose=options.get('verbose'),
            force_silent=options.get('silent')
        )

        workshops = get_all_existing_workshops()

        if options.get('name'):
            workshops = get_all_existing_workshops(options.get('name'))

        for _ in workshops:
            slug, path = _
            workshop, frontmatter = None, None
            DATAFILE = f'{path}/{slug}.yml'

            superdata = get_yaml(DATAFILE, log=log)

            # Separate out data
            frontmatterdata = superdata.get('sections').get('frontmatter')
            name = superdata.get('name')

            # 1. FIND WORKSHOP
            try:
                workshop = Workshop.objects.get(name=name)
            except:
                log.error(f'The workshop `{slug}` could not be found. Make sure you ran python manage.py ingestworkshop --name {slug} before running this command.')

            # 2. FIND FRONTMATTER
            try:
                frontmatter = Frontmatter.objects.get(workshop=workshop)
            except:
                log.error(f'Frontmatter for the workshop `{slug}` could not be found. Make sure you ran python manage.py ingestworkshop --name {slug} before running this command.')

            for prereqdata in frontmatterdata.get('prerequisites'):
                linked_workshop, linked_installs, linked_insight = None, None, None
                url = prereqdata.get('url')
                category = Prerequisite.EXTERNAL_LINK

                if prereqdata.get('type') == 'workshop':
                    linked_workshop = search_workshop(prereqdata.get(
                        'potential_name'), name, log, DATAFILE)
                    q = f'Prerequisite workshop `{linked_workshop.name}`'
                    category = Prerequisite.WORKSHOP
                    log.log(
                        f'Linking workshop prerequisite for `{name}`: {linked_workshop.name}')
                elif prereqdata.get('type') == 'install':
                    # currently, not using prereqdata.get('potential_slug_fragment') - might be something we want to do in the future
                    linked_installs = search_install(prereqdata.get(
                        'potential_name'), name, log, DATAFILE)
                    q = f'Prerequisite installations ' + \
                        ', '.join([f'`{x.software}`' for x in linked_installs])
                    category = Prerequisite.INSTALL
                    log.log(
                        f'Linking installation prerequisite for `{name}`: {[x.software for x in linked_installs]}')
                elif prereqdata.get('type') == 'insight':
                    linked_insight = search_insight(prereqdata.get('potential_name'), prereqdata.get(
                        'potential_slug_fragment'), name, log, DATAFILE)
                    q = f'Prerequisite insight `{linked_insight.title}`'
                    category = Prerequisite.INSIGHT
                    log.log(
                        f'Linking insight prerequisite for `{name}`: {linked_insight.title}')

                if category == Prerequisite.EXTERNAL_LINK:
                    label = prereqdata.get('url_text')
                else:
                    label = ''

                clean_up(category, linked_workshop, linked_insight, url)
                prerequisite, created = Prerequisite.objects.update_or_create(
                    category=category, 
                    linked_workshop=linked_workshop, 
                    linked_insight=linked_insight, 
                    url=url,
                    defaults={
                        'text': prereqdata.get('text', ''), 
                        'required': prereqdata.get('required'), 
                        'recommended': prereqdata.get('recommended'),
                        'label': label
                    }
                )

                if linked_installs:
                    for software in linked_installs:
                        through = PrerequisiteSoftware(prerequisite=prerequisite, software=software, required=prereqdata.get(
                            'required'), recommended=prereqdata.get('recommended'))
                        through.save()

                frontmatter.prerequisites.add(prerequisite)

        log.log(
            'Added/updated requirements for workshops: ' + ', '.join([x[0] for x in workshops]))

        if log._save(data='ingestprerequisites', name='warnings.md', warnings=True) or log._save(data='ingestprerequisites', name='logs.md', warnings=False, logs=True) or log._save(data='ingestprerequisites', name='info.md', warnings=False, logs=False, info=True):
            log.log(f'Log files with any warnings and logging information is now available in: `{log.LOG_DIR}`', force=True)
def _check_users(REQUIRED_IN_USERS=['first_name', 'last_name', 'username'],
                 AUTO_USERS=AUTO_USERS):
    for cat, userlist in AUTO_USERS.items():
        for u in userlist:
            for section in REQUIRED_IN_USERS:
                if not u.get(section):
                    log.error(
                        f'User setup file does not contain section `{section}` (in user with username `{u.get("username")}`). Make sure all the users in the `{SETUP_FILES["users.yml"]}` file contains all the required sections: `{"`, `".join(REQUIRED_IN_USERS)}`.'
                    )
    return True


if not _check_normalizer():
    log.error(
        'An unknown error occurred while checking for sections in NORMALIZING_SECTIONS.'
    )
if not _check_dirs_existence():
    log.error(
        'An unknown error occurred while ensuring that all cache directories exist.'
    )
if not _check_dirs_existence(DIRS=IMAGE_CACHE):
    log.error(
        'An unknown error occurred while ensuring that all image cache directories exist.'
    )
if not _check_dirs_existence(DIRS=STATIC_IMAGES):
    log.error(
        'An unknown error occurred while ensuring that static image directories exist.'
    )
if not _check_users():
    log.error(
Exemple #7
0
    def handle(self, *args, **options):
        log = Logger(path=__file__,
                     force_verbose=options.get('verbose'),
                     force_silent=options.get('silent'))
        input = Input(path=__file__)

        workshops = get_all_existing_workshops()

        if options.get('name'):
            workshops = get_all_existing_workshops(options.get('name'))

        for _ in workshops:
            slug, path = _
            DATAFILE = f'{path}/{slug}.yml'

            d = get_yaml(DATAFILE, log=log)

            # Separate out data
            imagedata = d.get('image')
            frontmatterdata = d.get('sections').get('frontmatter')
            praxisdata = d.get('sections').get('theory-to-practice')
            lessondata = d.get('sections').get('lessons')

            full_name = d.get('name')
            parent_backend = d.get('parent_backend')
            parent_branch = d.get('parent_branch')
            parent_repo = d.get('parent_repo')

            # 1. ENTER WORKSHOP
            workshop, created = Workshop.objects.update_or_create(
                name=full_name,
                slug=dhri_slugify(full_name),
                defaults={
                    'parent_backend': parent_backend,
                    'parent_branch': parent_branch,
                    'parent_repo': parent_repo,
                    'image_alt': imagedata['alt']
                })

            def _get_valid_name(filename):
                return filename.replace(
                    '@', '')  # TODO: should exist a built-in for django here?

            def _get_media_path(valid_filename):
                return settings.MEDIA_ROOT + '/' + Workshop.image.field.upload_to + valid_filename

            def _get_media_url(valid_filename):
                return Workshop.image.field.upload_to + valid_filename

            def _image_exists(valid_filename):
                media_path = _get_media_path(valid_filename)
                return os.path.exists(media_path)

            def _get_default_image():
                return Workshop.image.field.default

            if imagedata:
                source_file = imagedata['url']
                valid_filename = _get_valid_name(
                    slug + '-' + os.path.basename(imagedata['url']))
                if not _image_exists(valid_filename) or filecmp.cmp(
                        source_file,
                        _get_media_path(valid_filename),
                        shallow=False) == False:
                    try:
                        with open(source_file, 'rb') as f:
                            workshop.image = File(f, name=valid_filename)
                            workshop.save()
                    except FileNotFoundError:
                        log.error(
                            f'File `{source_file}` could not be found. Did you run `python manage.py buildworkshop` before you ran this command?'
                        )
                workshop.image.name = _get_media_url(valid_filename)
                workshop.save()
            else:
                log.warning(
                    f'Workshop {workshop.name} does not have an image assigned to it. Add filepaths to an existing file in your datafile ({DATAFILE}) if you want to update the specific workshop. Default workshop image (`{os.path.basename(_get_default_image())}`) will be assigned.'
                )
                workshop.image.name = Workshop.image.field.default
                workshop.save()

                if not _image_exists(
                        _get_valid_name(os.path.basename(
                            _get_default_image()))):
                    log.warning(
                        f'Default workshop image does not exist. You will want to add it manually to the correct folder: {_get_media_path("")}'
                    )

            # Saving the slug in a format that matches the GitHub repositories (special method `save_slug`)
            workshop.slug = slug
            workshop.save_slug()

            # 2. ENTER FRONTMATTER
            frontmatter, created = Frontmatter.objects.update_or_create(
                workshop=workshop,
                defaults={
                    'abstract': frontmatterdata.get('abstract'),
                    'estimated_time': frontmatterdata.get('estimated_time')
                })

            if frontmatterdata.get('ethical_considerations'):
                for point in frontmatterdata.get('ethical_considerations'):
                    _, created = EthicalConsideration.objects.update_or_create(
                        frontmatter=frontmatter, label=point.get('annotation'))

            if frontmatterdata.get('learning_objectives'):
                for point in frontmatterdata.get('learning_objectives'):
                    _, created = LearningObjective.objects.update_or_create(
                        frontmatter=frontmatter, label=point.get('annotation'))

            for cat in ['projects', 'readings', 'cheat_sheets', 'datasets']:
                if frontmatterdata.get(cat):
                    category, add_field = None, None
                    if cat == 'projects':
                        category = Resource.PROJECT
                        add_field = frontmatter.projects
                    elif cat == 'readings':
                        category = Resource.READING
                        add_field = frontmatter.readings
                    elif cat == 'cheat_sheets':
                        category = Resource.CHEATSHEET
                        add_field = frontmatter.cheat_sheets
                    elif cat == 'datasets':
                        category = Resource.DATASET
                        add_field = frontmatter.datasets

                    for point in frontmatterdata.get(cat):
                        if not add_field or not category:
                            log.error(
                                'Cannot interpret category `{cat}`. Make sure the script is correct and corresponds with the database structure.'
                            )

                        obj, created = Resource.objects.update_or_create(
                            category=category,
                            title=point.get('linked_text'),
                            url=point.get('url'),
                            annotation=point.get('annotation'))
                        if obj not in add_field.all():
                            add_field.add(obj)

            if frontmatterdata.get('contributors'):
                for point in frontmatterdata.get('contributors'):
                    profile = None
                    try:
                        profile = Profile.objects.get(
                            user__first_name=point.get('first_name'),
                            user__last_name=point.get('last_name'))
                    except:
                        for p in Profile.objects.all():
                            if f'{p.user.first_name} {p.user.last_name}' == point.get(
                                    'full_name'):
                                profile = p
                                log.info(
                                    f'In-depth search revealed a profile matching the full name for `{workshop.name}` contributor `{point.get("first_name")} {point.get("last_name")}`. It may or may not be the correct person, so make sure you verify it manually.'
                                )

                        if not p:
                            log.info(
                                f'Could not find user profile on the curriculum website for contributor `{point.get("full_name")}` (searching by first name `{point.get("first_name")}` and last name `{point.get("last_name")}`).'
                            )

                    contributor, created = Contributor.objects.update_or_create(
                        first_name=point.get('first_name'),
                        last_name=point.get('last_name'),
                        defaults={
                            'url': point.get('link'),
                            'profile': profile
                        })

                    collaboration, created = Collaboration.objects.update_or_create(
                        frontmatter=frontmatter,
                        contributor=contributor,
                        defaults={
                            'current': point.get('current'),
                            'role': point.get('role')
                        })

            # 3. ENTER PRAXIS
            praxis, created = Praxis.objects.update_or_create(
                workshop=workshop,
                defaults={
                    'intro': praxisdata.get('intro'),
                })

            for cat in ['discussion_questions', 'next_steps']:
                if praxisdata.get(cat):
                    obj = None
                    if cat == 'discussion_questions':
                        obj = DiscussionQuestion
                    elif cat == 'next_steps':
                        obj = NextStep

                    for order, point in enumerate(
                            praxisdata[cat], start=1
                    ):  # TODO: Should we pull out order manually here? Not necessary, right?
                        obj.objects.update_or_create(
                            praxis=praxis,
                            label=point.get('annotation'),
                            defaults={'order': order})

            for cat in ['further_readings', 'further_projects', 'tutorials']:
                if praxisdata.get(cat):
                    category, add_field = None, None
                    if cat == 'further_readings':
                        category = Resource.READING
                        add_field = praxis.further_readings
                    elif cat == 'further_projects':
                        category = Resource.PROJECT
                        add_field = praxis.further_projects
                    elif cat == 'tutorials':
                        category = Resource.TUTORIAL
                        add_field = praxis.tutorials

                    for point in praxisdata.get(cat):
                        if not add_field or not category:
                            log.error(
                                'Cannot interpret category `{cat}`. Make sure the script is correct and corresponds with the database structure.'
                            )

                        try:
                            obj, created = Resource.objects.update_or_create(
                                category=category,
                                title=point.get('linked_text'),
                                url=point.get('url'),
                                annotation=point.get('annotation'))
                            if obj not in add_field.all():
                                add_field.add(obj)
                        except IntegrityError:
                            obj = Resource.objects.get(
                                category=category,
                                title=point.get('linked_text'),
                                url=point.get('url'),
                            )
                            obj.annotation = point.get('annotation')
                            if obj not in add_field.all():
                                add_field.add(obj)
                            log.info(
                                f'Another resource with the same URL, title, and category already existed so updated with a new annotation: **{point.get("linked_text")} (old)**\n{point.get("annotation")}\n-------\n**{obj.title} (new)**\n{obj.annotation}'
                            )

            # 4. ENTER LESSONS

            for lessoninfo in lessondata:
                lesson, created = Lesson.objects.update_or_create(
                    workshop=workshop,
                    title=lessoninfo.get('header'),
                    defaults={
                        'order': lessoninfo.get('order'),
                        'text': lessoninfo.get('content'),
                    })

                #print(lesson)
                for image in lessoninfo.get('lesson_images'):
                    #print('image time!')
                    LessonImage.objects.update_or_create(url=image.get('path'),
                                                         lesson=lesson,
                                                         alt=image.get('alt'))

                if not lessoninfo.get('challenge') and lessoninfo.get(
                        'solution'):
                    log.error(
                        f'Lesson `{lesson.title}` (in workshop {workshop}) has a solution but no challenge. Correct the files on GitHub and rerun the buildworkshop command and then re-attempt the ingestworkshop command. Alternatively, you can change the datafile content manually.'
                    )

                if lessoninfo.get('challenge'):
                    challenge, created = Challenge.objects.update_or_create(
                        lesson=lesson,
                        title=lessoninfo['challenge'].get('header'),
                        defaults={
                            'text': lessoninfo['challenge'].get('content')
                        })

                    if lessoninfo.get('solution'):
                        solution, created = Solution.objects.update_or_create(
                            challenge=challenge,
                            title=lessoninfo['solution'].get('header'),
                            defaults={
                                'text': lessoninfo['solution'].get('content')
                            })

                if lessoninfo.get('evaluation'):
                    evaluation, created = Evaluation.objects.get_or_create(
                        lesson=lesson)
                    for point in lessoninfo['evaluation'].get('content'):
                        question, created = Question.objects.update_or_create(
                            evaluation=evaluation, label=point.get('question'))
                        for is_correct, answers in point.get(
                                'answers').items():
                            is_correct = is_correct == 'correct'
                            for answertext in answers:
                                answer, created = Answer.objects.update_or_create(
                                    question=question,
                                    label=answertext,
                                    defaults={'is_correct': is_correct})

                if lessoninfo.get('keywords'):
                    # lessoninfo['keywords'].get('header') # TODO: not doing anything with keyword header yet
                    for keyword in lessoninfo['keywords'].get('content'):
                        terms = Term.objects.filter(term__iexact=keyword)
                        if terms.count() == 1:
                            lesson.terms.add(terms[0])
                        elif terms.count() == 0:
                            log.warning(
                                f'Keyword `{keyword}` (used in lesson `{lesson.title}`, workshop `{workshop}` cannot be found in the existing glossary. Are you sure it is in the glossary and synchronized with the database? Make sure the data file for glossary is available ({GLOSSARY_FILE}) and that the term is defined in the file. Then run python manage.py ingestglossary.'
                            )
                        else:
                            log.error(
                                f'Multiple definitions of `{keyword}` exists in the database. Try resetting the glossary and rerun python manage.py ingestglossary before you run the ingestworkshop command again.'
                            )

        log.log('Added/updated workshops: ' +
                ', '.join([x[0] for x in workshops]))
        if not options.get('no_reminder'):
            log.log(
                'Do not forget to run `ingestprerequisites` after running the `ingestworkshop` command (without the --name flag).',
                color='yellow')

        if log._save(data='ingestworkshop', name='warnings.md',
                     warnings=True) or log._save(data='ingestworkshop',
                                                 name='logs.md',
                                                 warnings=False,
                                                 logs=True) or log._save(
                                                     data='ingestworkshop',
                                                     name='info.md',
                                                     warnings=False,
                                                     logs=False,
                                                     info=True):
            log.log(
                f'Log files with any warnings and logging information is now available in: `{log.LOG_DIR}`',
                force=True)
    def handle(self, *args, **options):
        log = Logger(
            path=__file__,
            force_verbose=options.get('verbose'),
            force_silent=options.get('silent')
        )
        input = Input(path=__file__)

        workshops = get_all_existing_workshops()

        if options.get('name'):
            workshops = get_all_existing_workshops(options.get('name'))

        for _ in workshops:
            name, path = _
            DATAFILE = f'{path}/blurb.yml'

            try:
                data = get_yaml(DATAFILE, log=log, catch_error=True)
            except Exception as e:
                log.warning(f'Found no blurb for workshop `{name}`. Skipping and moving ahead...')
                continue

            if not data.get('user'):
                log.error(
                    f'Username was not defined for the blurb for workshop {name} was not found. Check the datafile {DATAFILE} to verify the username attributed to the blurb.')

            if not data.get('workshop'):
                log.warning(
                    f'Blurb had no workshop assigned, but will proceed with the blurb\'s parent folder ({name}) as assumed workshop. To fix this warning, you can try running python manage.py buildblurbs before running ingestblurbs.')
                data['workshop'] = name

            if not data.get('text'):
                log.error(
                    f'Blurb has no text assigned, and thus could not be ingested. Check the datafile {DATAFILE} to verify the workshop attributed to the blurb.')

            try:
                user = User.objects.get(username=data.get('user'))
            except:
                log.error(
                    f'The user attributed to the blurb ({data.get("username")}) was not found in the database. Did you try running python manage.py ingestusers before running ingestblurbs?')

            try:
                workshop = Workshop.objects.get(slug=data.get('workshop'))
            except:
                log.error(
                    f'The blurb\'s attached workshop ({data.get("workshop")}) was not found in the database. Did you try running python manage.py ingestworkshop --name {data.get("workshop")} before running ingestblurbs?')

            blurb, created = Blurb.objects.get_or_create(user=user, workshop=workshop, defaults={
                                                         'text': PARSER.fix_html(data.get('text'))})

            if not created and not options.get('force'):
                choice = input.ask(
                    f'Frontmatter for workshop `{workshop}` already exists. Update with new content? [y/N]')
                if choice.lower() != 'y':
                    continue

            blurb.text = data.get('text')
            blurb.save()

        log.log('Added/updated blurbs for workshops: ' + ', '.join([x[0] for x in workshops]))

        if log._save(data='ingestblurbs', name='warnings.md', warnings=True) or log._save(data='ingestblurbs', name='logs.md', warnings=False, logs=True) or log._save(data='ingestblurbs', name='info.md', warnings=False, logs=False, info=True):
            log.log(f'Log files with any warnings and logging information is now available in: `{log.LOG_DIR}`', force=True)
Exemple #9
0
    def handle(self, *args, **options):
        log = Logger(path=__file__,
                     force_verbose=options.get('verbose'),
                     force_silent=options.get('silent')
                     )
        input = Input(path=__file__)

        test_for_required_files(REQUIRED_PATHS=REQUIRED_PATHS, log=log)
        data = get_yaml(FULL_PATH, log=log)

        for userdata in data.get('users', []):
            if not userdata.get('username'):
                log.error(
                    f'Username is required. Check the datafile ({FULL_PATH}) to make sure that all the users in the file are assigned a username.')

            finder = User.objects.filter(username=userdata.get('username'))
            if finder.count():
                finder.update(
                    first_name=userdata.get('first_name'),
                    last_name=userdata.get('last_name'),
                    email=userdata.get('email'),
                    is_staff=userdata.get('staff')
                )
                
                user = User.objects.get(username=userdata.get('username'))
            else:
                func = User.objects.create_user
                if userdata.get('superuser'):
                    func = User.objects.create_superuser

                user = func(
                    username=userdata.get('username'),
                    first_name=userdata.get('first_name'),
                    last_name=userdata.get('last_name'),
                    email=userdata.get('email'),
                    is_staff=userdata.get('staff')
                )

                user.refresh_from_db()

            # if None, sets to unusable password, see https://docs.djangoproject.com/en/3.1/ref/contrib/auth/#django.contrib.auth.models.User.set_password
            if userdata.get('password'):
                user.set_password(userdata['password'])
            else:
                if options.get('nopass'):
                    user.set_unusable_password()
                else:
                    _password = input.ask(f'Password for `{userdata.get("username")}`?')
                    user.set_password(_password)
            user.save()

            if not userdata.get('profile'):
                log.error(f'User {userdata.get("username")} does not have profile information (bio, image, links, and/or pronouns) added. Make sure you add all this information for each user in the datafile before running this command ({FULL_PATH}).')

            profile, created = Profile.objects.update_or_create(
                user=user,
                defaults={
                    'bio': userdata.get('profile', {}).get('bio'),
                    'pronouns': userdata.get('profile', {}).get('pronouns')
                })

            if userdata.get('profile', {}).get('image'):
                profile_pic = userdata.get('profile', {}).get('image')
                if profile_picture_exists(profile_pic) and filecmp.cmp(profile_pic, get_profile_picture_path(profile_pic)):
                    profile.image.name = get_profile_picture_path(profile_pic, True)
                    profile.save()
                else:
                    with open(profile_pic, 'rb') as f:
                        profile.image = File(f, name=os.path.basename(f.name))
                        profile.save()
            else:
                profile.image.name = get_default_profile_picture()
                profile.save()

            if userdata.get('profile', {}).get('links'):
                for link in userdata.get('profile', {}).get('links'):
                    if link.get('cat') == 'personal':
                        link['cat'] = ProfileLink.PERSONAL
                    elif link.get('cat') == 'project':
                        link['cat'] = ProfileLink.PROJECT
                    else:
                        log.error(
                            f'Link {link.get("url")} is assigned a category that has no correspondence in the database model: {link.get("cat")}. Please set the category to either `personal` or `project`.')

                    _, _ = ProfileLink.objects.update_or_create(profile=profile, url=link.get('url'), defaults={
                        'cat': link.get('cat'),
                        'label': link.get('label')
                    })

        if not profile_picture_exists(get_default_profile_picture(full_path=True)):
            if data.get('default', False) and os.path.exists(data.get('default')):
                from shutil import copyfile

                copyfile(data.get('default'), get_default_profile_picture(full_path=True))
                log.log('Default profile picture added to the /media/ directory.')
            elif not data.get('default'):
                log.error(
                    f'No default profile picture was defined in your datafile (`{FULL_PATH}`). Add the file, and then add the path to the file (relative to the `django-app` directory) in a `default` dictionary in your `users.yml` file, like this:\n' + '`default: backend/setup/profile-pictures/default.jpg`')
            elif not os.path.exists(data.get('default')):
                log.error(
                    f'The default profile picture (`{data.get("default")}`) in your datafile (`{FULL_PATH}`) does not exist in its expected directory (`{os.path.dirname(data.get("default"))}`). Make sure it is in the directory or update the datafile accordingly, or add the file before running this command.')

        log.log('Added/updated users: ' +
                                 ', '.join([x.get('username') for x in data.get('users')]))

        if log._save(data='ingestusers', name='warnings.md', warnings=True) or log._save(data='ingestusers', name='logs.md', warnings=False, logs=True) or log._save(data='ingestusers', name='info.md', warnings=False, logs=False, info=True):
            log.log(f'Log files with any warnings and logging information is now available in: `{log.LOG_DIR}`', force=True)
Exemple #10
0
    def handle(self, *args, **options):
        log = Logger(path=__file__,
                     force_verbose=options.get('verbose'),
                     force_silent=options.get('silent'))

        if options.get('reset'):
            if options.get('force'):
                i = get_or_default(
                    f'Warning: This script is about to remove ALL OF THE OBJECTS from the database. Are you sure you want to continue?',
                    color='red',
                    default_variable='N')
                if i.lower() != 'y':
                    log.error('User opted to stop.')
            for model in all_models:
                name = model.__name__.replace('_', ' ')
                if not options.get('force'):
                    i = get_or_default(
                        f'Warning: This will remove all the `{name}` objects. Are you sure you want to continue?',
                        color='red',
                        default_variable='N')
                    if i.lower() != 'y':
                        continue
                model.objects.all().delete()

                log.log(f'Removed all `{name}` objects.')

        if options.get('resetusers'):
            if options.get('force'):
                i = get_or_default(
                    f'Warning: This script is about to remove ALL OF THE USERS from the database. Are you sure you want to continue?',
                    color='red',
                    default_variable='N')
                if i.lower() != 'y':
                    log.error('User opted to stop.')

            User.objects.all().delete()

            log.log(f'Removed all users.')

        call_command('ingestgroups',
                     force=True,
                     silent=options.get('silent'),
                     verbose=options.get('verbose'))
        call_command('ingestusers',
                     force=True,
                     silent=options.get('silent'),
                     verbose=options.get('verbose'))
        call_command('ingestglossary',
                     force=True,
                     silent=options.get('silent'),
                     verbose=options.get('verbose'))
        call_command('ingestinstalls',
                     force=True,
                     silent=options.get('silent'),
                     verbose=options.get('verbose'))
        call_command('ingestinsights',
                     force=True,
                     silent=options.get('silent'),
                     verbose=options.get('verbose'))
        call_command('ingestworkshop',
                     force=True,
                     silent=options.get('silent'),
                     verbose=options.get('verbose'),
                     no_reminder=True)
        call_command('ingestsnippets',
                     force=True,
                     silent=options.get('silent'),
                     verbose=options.get('verbose'))
        call_command('ingestblurbs',
                     force=True,
                     silent=options.get('silent'),
                     verbose=options.get('verbose'))
        call_command('ingestprerequisites',
                     force=True,
                     silent=options.get('silent'),
                     verbose=options.get('verbose'))
        call_command('ingestfragile',
                     force=True,
                     silent=options.get('silent'),
                     verbose=options.get('verbose'))
Exemple #11
0
    def handle(self, *args, **options):
        log = Logger(path=__file__,
                     force_verbose=options.get('verbose'),
                     force_silent=options.get('silent'))
        input = Input(path=__file__)

        test_for_required_files(REQUIRED_PATHS=REQUIRED_PATHS, log=log)
        data = get_yaml(FULL_PATH, log=log)

        for termdata in data:
            try:
                term, created = Term.objects.get_or_create(
                    term=termdata.get('term'))
            except IntegrityError:
                try:
                    term = Term.objects.get(
                        slug=dhri_slugify(termdata.get('term')))
                except:
                    log.error('An unknown error occurred. Try')

            term.term = termdata.get('term')
            term.explication = termdata.get('explication')
            term.save()

            if not created and not options.get('force'):
                choice = input.ask(
                    f'Term `{termdata.get("term")}` already exists. Update with new definition? [y/N]'
                )
                if choice.lower() != 'y':
                    continue

            Term.objects.filter(term=termdata.get('term')).update(
                explication=termdata.get('explication'))

            term.refresh_from_db()

            for cat in ['tutorials', 'readings', 'cheat_sheets']:
                if termdata.get(cat):
                    category, add_field = None, None
                    if cat == 'tutorials':
                        category = Resource.TUTORIAL
                        add_field = term.tutorials
                    elif cat == 'readings':
                        category = Resource.READING
                        add_field = term.readings
                    elif cat == 'cheat_sheets':
                        category = Resource.CHEATSHEET
                        add_field = term.cheat_sheets

                    for point in termdata.get(cat):
                        if not add_field or not category:
                            log.error(
                                'Cannot interpret category `{cat}`. Make sure the script is correct and corresponds with the database structure.'
                            )

                        try:
                            obj, created = Resource.objects.update_or_create(
                                category=category,
                                title=point.get('linked_text'),
                                url=point.get('url'),
                                annotation=point.get('annotation'))
                            if obj not in add_field.all():
                                add_field.add(obj)
                        except IntegrityError:
                            obj = Resource.objects.get(
                                category=category,
                                title=point.get('linked_text'),
                                url=point.get('url'),
                            )
                            obj.annotation = point.get('annotation')
                            if obj not in add_field.all():
                                add_field.add(obj)
                            log.info(
                                f'Another resource with the same URL, title, and category already existed so updated with a new annotation: **{point.get("linked_text")} (old)**\n{point.get("annotation")}\n-------\n**{obj.title} (new)**\n{obj.annotation}'
                            )

        log.log('Added/updated terms: ' +
                ', '.join([x.get('term') for x in data]))

        if log._save(data='ingestglossary', name='warnings.md',
                     warnings=True) or log._save(data='ingestglossary',
                                                 name='logs.md',
                                                 warnings=False,
                                                 logs=True) or log._save(
                                                     data='ingestglossary',
                                                     name='info.md',
                                                     warnings=False,
                                                     logs=False,
                                                     info=True):
            log.log(
                f'Log files with any warnings and logging information is now available in: `{log.LOG_DIR}`',
                force=True)