def get_ldap_value_for_field(model, field, attrs, dn, instance=None): """Get the value of a field in LDAP attributes. Looks at the site preference for sync fields to determine which LDAP field is associated with the model field, then gets this attribute and pythonises it. Raises KeyError if the desired field is not in the LDAP entry. Raises AttributeError if the requested field is not configured to be synced. """ setting_name = "ldap__" + setting_name_from_field(model, field) # Try sync if preference for this field is non-empty ldap_field = get_site_preferences()[setting_name].lower() if not ldap_field: raise AttributeError( f"Field {field.name} not configured to be synced.") if ldap_field in attrs: value = attrs[ldap_field][0] # Apply regex replace from config patterns = get_site_preferences()[setting_name + "_re"] templates = get_site_preferences()[setting_name + "_replace"] value = apply_templates(value, patterns, templates) # Opportunistically convert LDAP string value to Python object value = from_ldap(value, field, dn, ldap_field, instance) return value else: raise KeyError(f"Field {ldap_field} not in attributes of {dn}")
def test_parse_dd_mm_yyyy(): get_site_preferences()["csv_import__date_languages"] = "de" assert parse_date("12.01.2020") == date(2020, 1, 12) assert parse_date("12.12.1912") == date(1912, 12, 12) get_site_preferences()["csv_import__date_languages"] = "en-US" assert parse_date("12.01.2020") != date(2020, 1, 12) assert parse_date("12.12.1912") != date(1912, 12, 12)
def process(self, instance: Model, value): with_chronos = apps.is_installed("aleksis.apps.chronos") if with_chronos: Subject = apps.get_model("chronos", "Subject") group_type = get_site_preferences( )["csv_import__group_type_departments"] group_prefix = get_site_preferences( )["csv_import__group_prefix_departments"] groups = [] for subject_name in value: if with_chronos: # Get department subject subject, __ = Subject.objects.get_or_create( short_name=subject_name, defaults={"name": subject_name}) name = subject.name short_name = subject.short_name else: name = subject_name short_name = subject_name # Get department group group, __ = Group.objects.get_or_create( group_type=group_type, short_name=short_name, defaults={"name": with_prefix(group_prefix, name)}, ) if with_chronos: group.subject = subject group.save() groups.append(group) instance.member_of.add(*groups)
def test_parse_phone_number(): get_site_preferences()["csv_import__phone_number_country"] = "DE" fake_number = PhoneNumber(country_code=49, national_number=1635550217) assert parse_phone_number("+49-163-555-0217") == fake_number assert parse_phone_number("+491635550217") == fake_number assert parse_phone_number("0163-555-0217") == fake_number assert parse_phone_number("01635550217") == fake_number get_site_preferences()["csv_import__phone_number_country"] = "GB" assert parse_phone_number("0163-555-0217") != fake_number assert parse_phone_number("01635550217") != fake_number
def __init__(self, request, *args, **kwargs): self.request = request initial = kwargs.get("initial", {}) # Build layout with or without groups field base_layout = self.layout_base[:] if "groups" in initial: base_layout.remove("groups") self.layout = Layout(*base_layout) super().__init__(*args, **kwargs) if "groups" in initial: self.fields["groups"].required = False # Filter persons and groups by permissions if not self.request.user.has_perm( "alsijil.assign_grouprole"): # Global permission persons = Person.objects if initial.get("groups"): persons = persons.filter(member_of__in=initial["groups"]) if get_site_preferences( )["alsijil__group_owners_can_assign_roles_to_parents"]: persons = persons.filter( Q(member_of__owners=self.request.user.person) | Q(children__member_of__owners=self.request.user.person)) else: persons = persons.filter( member_of__owners=self.request.user.person) self.fields["person"].queryset = persons if "groups" not in initial: groups = Group.objects.for_current_school_term_or_all().filter( owners=self.request.user.person) self.fields["groups"].queryset = groups
def groups_to_show(self) -> models.QuerySet: groups = self.groups.all() if (groups.count() == 1 and groups[0].parent_groups.all() and get_site_preferences()["chronos__use_parent_groups"]): return groups[0].parent_groups.all() else: return groups
def parse_date(value: str) -> Union[date, None]: """Parse string date.""" languages_raw = get_site_preferences()["csv_import__date_languages"] languages = languages_raw.split(",") if languages_raw else [] try: return dateparser.parse(value, languages=languages).date() except (ValueError, AttributeError): return None
def parse_phone_number(value: str) -> Union[phonenumbers.PhoneNumber, None]: """Parse a phone number.""" try: return phonenumbers.parse( value, get_site_preferences()["csv_import__phone_number_country"]) except phonenumbers.NumberParseException: return None
def ldap_create_user(sender, request, user, **kwargs): """Create a user in LDAP upon registration through allauth.""" # Check if creation on registration is activated if not get_site_preferences()["ldap__user_create_on_register"]: return # Build attributes attrs = { attr: getattr(user, field) for field, attr in settings.AUTH_LDAP_USER_ATTR_MAP.items() } # Build DN for new object rdn_fields = get_site_preferences()["ldap__user_create_rdn_fields"] base_dn = settings.AUTH_LDAP_USER_SEARCH.base_dn rdn = "+".join( [f"{rdn_field}={attrs[rdn_field][0]}" for rdn_field in rdn_fields]) dn = f"{rdn},{base_dn}"
def ldap_sync_user_on_login(sender, user, ldap_user, **kwargs): """Synchronise Person meta-data and groups from ldap_user on User update.""" # Check if sync on login is activated if not get_site_preferences()["ldap__person_sync_on_login"]: return Person = apps.get_model("core", "Person") if get_site_preferences()["ldap__enable_sync"]: try: with transaction.atomic(): person = ldap_sync_from_user(user, ldap_user.dn, ldap_user.attrs.data) except Person.DoesNotExist: logger.warn(f"No matching person for user {user.username}") return except Person.MultipleObjectsReturned: logger.error( f"More than one matching person for user {user.username}") return except (DataError, IntegrityError, KeyError, ValueError) as e: logger.error( f"Data error while synchronising user {user.username}:\n{e}") return if get_site_preferences()["ldap__enable_group_sync"]: # Get groups from LDAP groups = ldap_user._get_groups() group_infos = list(groups._get_group_infos()) group_objects = ldap_sync_from_groups(group_infos) # Replace linked groups of logged-in user completely person.member_of.set(group_objects) logger.info(f"Replaced group memberships of {person}") try: person.save() except Exception as e: # Exceptions here are logged only because the synchronisation is optional # FIXME throw warning to user instead logger.error(f"Could not save person {person}:\n{e}")
def feedback(request): if request.method == "POST": form = FeedbackForm(request.POST) if form.is_valid(): # Read out form data design_rating = form.cleaned_data["design_rating"] performance_rating = form.cleaned_data["performance_rating"] usability_rating = form.cleaned_data["usability_rating"] overall_rating = form.cleaned_data["overall_rating"] more = form.cleaned_data["more"] ideas = form.cleaned_data["ideas"] apps = form.cleaned_data["apps"] # Register activity act = Activity.objects.create( title=_("You submitted feedback."), description=_( f"You rated AlekSIS with {overall_rating} out of 5 stars." ), app="Feedback", user=request.user.person, ) # Send mail context = { "design_rating": design_rating, "performance_rating": performance_rating, "usability_rating": usability_rating, "overall_rating": overall_rating, "more": more, "apps": apps, "ideas": ideas, "user": request.user, } send_templated_mail( template_name="feedback", from_email=request.user.person.mail_sender_via, headers={ "Reply-To": request.user.person.mail_sender, "Sender": request.user.person.mail_sender, }, recipient_list=[ get_site_preferences()["hjelp__feedback_recipient"] ], context=context, ) return render(request, "hjelp/feedback_submitted.html") else: form = FeedbackForm() return render(request, "hjelp/feedback.html", {"form": form})
def ldap_change_password(request, user, **kwargs): enable_password_change = get_site_preferences( )["ldap__enable_password_change"] admin_password_change = get_site_preferences( )["ldap__admin_password_change"] admin_dn = get_site_preferences()["ldap__admin_dn"] admin_password = get_site_preferences()["ldap__admin_password"] if not enable_password_change: # Do nothing if password change in LDAP is disabled return if not hasattr(user, "ldap_user"): # Add ldap_user relation to user if not available yet # This can happen on password reset, when the user is acted upon # but was never logged-in from django_auth_ldap.backend import LDAPBackend, _LDAPUser # noqa user.ldap_user = _LDAPUser(LDAPBackend(), user=user) # Get old and new password from submitted form # We rely on allauth already having validated the form before emitting the signal old = request.POST.get("oldpassword", None) new = request.POST["password1"] # Determine as which user to make the password change if old and not admin_password_change: # If we are changing a password as user, use their credentials # except if the preference mandates always using admin credentials bind_dn, password = user.ldap_user.dn, old elif admin_dn: # In all other cases, use admin credentials if available bind_dn, password = admin_dn, admin_password else: # If not available, try using the regular LDAP auth credentials bind_dn, password = None, None with TemporaryBind(user.ldap_user, bind_dn, password) as conn: conn.passwd_s(user.ldap_user.dn, old, new)
def import_rooms( validity_range: ValidityRange) -> Dict[int, chronos_models.Room]: """Import rooms.""" ref = {} # Get rooms rooms = run_default_filter(validity_range, mysql_models.Room.objects) for room in tqdm(rooms, desc="Import rooms", **TQDM_DEFAULTS): if not room.name: raise RuntimeError( "Room ID {}: Cannot import room without short name.".format( room.room_id)) # Build values short_name = room.name[:10] name = room.longname[:30] if room.longname else short_name import_ref = room.room_id logger.info("Import room {} …".format(short_name)) new_room, created = chronos_models.Room.objects.get_or_create( short_name=short_name, defaults={ "name": name, "import_ref_untis": import_ref }, ) if created: logger.info(" New room created") changed = False if get_site_preferences( )["untis_mysql__update_rooms_name"] and new_room.name != name: new_room.name = name changed = True logger.info(" Name updated") if new_room.import_ref_untis != import_ref: new_room.import_ref_untis = import_ref changed = True logger.info(" Import reference updated") if changed: new_room.save() ref[import_ref] = new_room return ref
def ask_faq(request): if request.method == "POST": form = FAQForm(request.POST) if form.is_valid(): # Read out form data question = form.cleaned_data["question"] act = Activity( title=_("You have submitted a question."), description=question, app="Hjelp", user=request.user.person, ) act.save() context = { "question": question, "user": request.user, } send_templated_mail( template_name="faq", from_email=request.user.person.mail_sender_via, headers={ "Reply-To": request.user.person.mail_sender, "Sender": request.user.person.mail_sender, }, recipient_list=[ get_site_preferences()["hjelp__faq_recipient"] ], context=context, ) return render(request, "hjelp/question_submitted.html") else: form = FAQForm() return render(request, "hjelp/ask.html", {"form": form})
def test_parse_date_iso(): get_site_preferences()["csv_import__date_languages"] = "" assert parse_date("2020-11-12") == date(2020, 11, 12)
def substitutions( request: HttpRequest, year: Optional[int] = None, month: Optional[int] = None, day: Optional[int] = None, is_print: bool = False, ) -> HttpResponse: """View all substitutions on a spcified day.""" context = {} if day: wanted_day = timezone.datetime(year=year, month=month, day=day).date() wanted_day = TimePeriod.get_next_relevant_day(wanted_day) else: wanted_day = TimePeriod.get_next_relevant_day(timezone.now().date(), datetime.now().time()) day_number = get_site_preferences( )["chronos__substitutions_print_number_of_days"] day_contexts = {} if is_print: next_day = wanted_day for i in range(day_number): day_contexts[next_day] = {"day": next_day} next_day = TimePeriod.get_next_relevant_day(next_day + timedelta(days=1)) else: day_contexts = {wanted_day: {"day": wanted_day}} for day in day_contexts: subs = build_substitutions_list(day) day_contexts[day]["substitutions"] = subs day_contexts[day]["announcements"] = ( Announcement.for_timetables().on_date(day).filter( show_in_timetables=True)) if get_site_preferences()["chronos__substitutions_show_header_box"]: subs = LessonSubstitution.objects.on_day(day).order_by( "lesson_period__lesson__groups", "lesson_period__period") absences = Absence.objects.on_day(day) day_contexts[day]["absent_teachers"] = absences.absent_teachers() day_contexts[day]["absent_groups"] = absences.absent_groups() day_contexts[day]["affected_teachers"] = subs.affected_teachers() affected_groups = subs.affected_groups() if get_site_preferences( )["chronos__affected_groups_parent_groups"]: groups_with_parent_groups = affected_groups.filter( parent_groups__isnull=False) groups_without_parent_groups = affected_groups.filter( parent_groups__isnull=True) affected_groups = Group.objects.filter( Q(child_groups__pk__in=groups_with_parent_groups. values_list("pk", flat=True)) | Q(pk__in=groups_without_parent_groups.values_list( "pk", flat=True))).distinct() day_contexts[day]["affected_groups"] = affected_groups if not is_print: context = day_contexts[wanted_day] context["datepicker"] = { "date": date_unix(wanted_day), "dest": reverse("substitutions"), } context["url_prev"], context[ "url_next"] = TimePeriod.get_prev_next_by_day( wanted_day, "substitutions_by_date") return render(request, "chronos/substitutions.html", context) else: context["days"] = day_contexts return render_pdf(request, "chronos/substitutions_print.html", context)
def report_issue(request): if request.method == "POST": form = IssueForm(request.POST) if form.is_valid(): # Read out form data category_1 = str(form.cleaned_data["category_1"]) category_2 = str(form.cleaned_data["category_2"]) category_3 = str(form.cleaned_data["category_3"]) free_text = form.cleaned_data["free_text"] short_description = form.cleaned_data["short_description"] long_description = form.cleaned_data["long_description"] # Register activity desc_categories = add_arrows([ category_1, category_2, category_3, free_text, ]) desc_act = f"{desc_categories} | {short_description}" act = Activity( title=_("You reported a problem."), description=desc_act, app="Hjelp", user=request.user.person, ) act.save() # Send mail context = { "categories": add_arrows([ category_1, category_2, category_3, free_text, ]), "categories_single": (element for element in [ category_1, category_2, category_3, free_text, ] if element and element != "None"), "short_description": short_description, "long_description": long_description, "user": request.user, } send_templated_mail( template_name="rebus", from_email=request.user.person.mail_sender_via, headers={ "Reply-To": request.user.person.mail_sender, "Sender": request.user.person.mail_sender, }, recipient_list=[ get_site_preferences()["hjelp__issue_report_recipient"] ], context=context, ) return render(request, "hjelp/issue_report_submitted.html") else: form = IssueForm() return render(request, "hjelp/issue_report.html", {"form": form})
def import_classes( validity_range: ValidityRange, teachers_ref: Dict[int, core_models.Person]) -> Dict[int, core_models.Group]: """Import classes.""" classes_ref = {} # Get classes course_classes = run_default_filter(validity_range, mysql_models.Class.objects, filter_term=True) for class_ in tqdm(course_classes, desc="Import classes", **TQDM_DEFAULTS): # Check if needed data are provided if not class_.name: raise RuntimeError( "Class ID {}: Cannot import class without short name.".format( class_.teacher_id)) # Build values short_name = class_.name[:16] name = class_.longname if class_.longname else short_name teacher_ids = untis_split_first(class_.teacherids, int) owners = [teachers_ref[t_id] for t_id in teacher_ids] import_ref = class_.class_id logger.info("Import class {} (as group) …".format(short_name)) try: new_group = core_models.Group.objects.get( short_name__iexact=short_name, school_term__in=[None, validity_range.school_term], ) except core_models.Group.DoesNotExist: new_group = core_models.Group.objects.create( short_name=short_name, name=name, import_ref_untis=import_ref, school_term=validity_range.school_term, ) logger.info(" New group created") changed = False if (get_site_preferences()["untis_mysql__update_groups_short_name"] and new_group.short_name != short_name): new_group.short_name = short_name changed = True logger.info(" Short name updated") if get_site_preferences( )["untis_mysql__update_groups_name"] and new_group.name != name: new_group.name = name changed = True logger.info(" Name updated") if new_group.import_ref_untis != import_ref: new_group.import_ref_untis = import_ref changed = True logger.info(" Import reference updated") if new_group.school_term != validity_range.school_term: new_group.school_term = validity_range.school_term changed = True logger.info(" School term updated") if changed: new_group.save() if get_site_preferences()["untis_mysql__overwrite_group_owners"]: new_group.owners.set(owners) logger.info(" Group owners set") else: new_group.owners.add(*owners) logger.info(" Group owners added") classes_ref[class_.class_id] = new_group return classes_ref
def save(self, carry_over=True, *args, **kwargs): if (get_site_preferences()["alsijil__carry_over"] and (self.topic or self.homework or self.group_note) and self.lesson_period and carry_over): self._carry_over_data() super().save(*args, **kwargs)
def mass_ldap_import(): """Add utility code for mass import from ldap.""" from django_auth_ldap.backend import LDAPBackend, _LDAPUser # noqa Person = apps.get_model("core", "Person") # Abuse pre-configured search object as general LDAP interface backend = LDAPBackend() connection = _LDAPUser(backend, "").connection # Synchronise all groups first if get_site_preferences()["ldap__enable_group_sync"]: ldap_groups = backend.settings.GROUP_SEARCH.execute(connection) group_objects = ldap_sync_from_groups(ldap_groups) # Create lookup table as cache for later code group_dict = {obj.ldap_dn: obj for obj in group_objects} # Guess LDAP username field from user filter uid_field = re.search( r"([a-zA-Z]+)=%\(user\)s", backend.settings.USER_SEARCH.searches[0].filterstr).group(1) # Synchronise user data for all found users ldap_users = backend.settings.USER_SEARCH.execute(connection, {"user": "******"}, escape=False) for dn, attrs in tqdm(ldap_users, desc="Sync. user infos", **TQDM_DEFAULTS): uid = attrs[uid_field][0] # Prepare an empty LDAPUser object with the target username ldap_user = _LDAPUser(backend, username=uid) # Get existing or new User object and pre-populate user, created = backend.get_or_build_user(uid, ldap_user) ldap_user._user = user ldap_user._attrs = attrs ldap_user._dn = dn ldap_user._populate_user_from_attributes() user.save() try: with transaction.atomic(): person = ldap_sync_from_user(user, dn, attrs) except Person.DoesNotExist: logger.warn(f"No matching person for user {user.username}") continue except Person.MultipleObjectsReturned: logger.error( f"More than one matching person for user {user.username}") continue except (DataError, IntegrityError, KeyError, ValueError) as e: logger.error( f"Data error while synchronising user {user.username}:\n{e}") continue else: logger.info(f"Successfully imported user {uid}") # Synchronise group memberships now if get_site_preferences()["ldap__enable_group_sync"]: member_attr = getattr(backend.settings.GROUP_TYPE, "member_attr", "memberUid") owner_attr = get_site_preferences()["ldap__group_sync_owner_attr"] for ldap_group in tqdm( ldap_groups, desc="Sync. group members", total=len(ldap_groups), **TQDM_DEFAULTS, ): dn, attrs = ldap_group if dn not in group_dict: logger.warning( f"Skip {dn} because there are no groups with this dn.") continue group = group_dict[dn] ldap_members = [_.lower() for _ in attrs[member_attr] ] if member_attr in attrs else [] if member_attr.lower() == "memberuid": members = Person.objects.filter( user__username__in=ldap_members) else: members = Person.objects.filter(ldap_dn__in=ldap_members) if get_site_preferences()["ldap__group_sync_owner_attr"]: ldap_owners = [_.lower() for _ in attrs[owner_attr] ] if owner_attr in attrs else [] if get_site_preferences( )["ldap__group_sync_owner_attr_type"] == "uid": owners = Person.objects.filter( user__username__in=ldap_owners) elif get_site_preferences( )["ldap__group_sync_owner_attr_type"] == "dn": owners = Person.objects.filter(ldap_dn__in=ldap_owners) group.members.set(members) if get_site_preferences()["ldap__group_sync_owner_attr"]: group.owners.set(owners) group.save() logger.info(f"Set group members of group {group}") # Synchronise primary groups all_persons = set(Person.objects.all()) for person in tqdm(all_persons, desc="Sync. primary groups", **TQDM_DEFAULTS): person.auto_select_primary_group() Person.objects.bulk_update(all_persons, ("primary_group", )) logger.info("Commiting transaction; this can take some time.")
def ldap_sync_from_groups(group_infos): """Synchronise group information from LDAP results to Django.""" Group = apps.get_model("core", "Group") SchoolTerm = apps.get_model("core", "SchoolTerm") # Get current school term school_term = SchoolTerm.current # Resolve Group objects from LDAP group objects ldap_groups = {} for ldap_group in tqdm(group_infos, desc="Parsing group infos", **TQDM_DEFAULTS): # Skip group if one of the name fields is missing # FIXME Throw exceptions and catch outside sync_field_short_name = get_site_preferences( )["ldap__group_sync_field_short_name"] if sync_field_short_name not in ldap_group[1]: logger.error( f"LDAP group with DN {ldap_group[0]} does not have field {sync_field_short_name}" ) continue sync_field_name = get_site_preferences()["ldap__group_sync_field_name"] if sync_field_name not in ldap_group[1]: logger.error( f"LDAP group with DN {ldap_group[0]} does not have field {sync_field_name}" ) continue # Apply regex replace from config short_name = apply_templates( ldap_group[1][get_site_preferences() ["ldap__group_sync_field_short_name"]][0], get_site_preferences()["ldap__group_sync_field_short_name_re"], get_site_preferences() ["ldap__group_sync_field_short_name_replace"], ) name = apply_templates( ldap_group[1][get_site_preferences() ["ldap__group_sync_field_name"]][0], get_site_preferences()["ldap__group_sync_field_name_re"], get_site_preferences()["ldap__group_sync_field_name_replace"], ) # Shorten names to fit into model fields short_name = short_name[:Group._meta.get_field("short_name"). max_length] name = name[:Group._meta.get_field("name").max_length] ldap_groups[ldap_group[0].lower()] = { "short_name": short_name, "name": name } all_dns = set(ldap_groups.keys()) # First, update all existing groups with known DNs existing = Group.objects.filter( ldap_dn__in=all_dns, school_term=school_term).select_related(None) existing_dns = set([v.ldap_dn for v in existing]) for obj in existing: obj.name = ldap_groups[obj.ldap_dn]["name"] obj.short_name = ldap_groups[obj.ldap_dn]["short_name"] logger.info(f"Updating {len(existing)} Django groups") try: Group.objects.bulk_update(existing, ("name", "short_name")) except IntegrityError as e: logger.error( f"Integrity error while trying to import LDAP groups:\n{e}") else: logger.debug(f"Updated {len(existing)} Django groups") # Second, create all groups with unknown DNs nonexisting_dns = all_dns - existing_dns nonexisting = [] for dn in nonexisting_dns: nonexisting.append( Group( ldap_dn=dn, name=ldap_groups[dn]["name"], short_name=ldap_groups[dn]["short_name"], school_term=school_term, )) logger.info(f"Creating {len(nonexisting)} Django groups") try: Group.objects.bulk_create(nonexisting) except IntegrityError as e: logger.error( f"Integrity error while trying to import LDAP groups:\n{e}") else: logger.debug(f"Created {len(nonexisting)} Django groups") # Return all groups ever touched return set(existing) | set(nonexisting)
def ldap_sync_from_user(user, dn, attrs): """Synchronise person information from a User object (with ldap_user) to Django.""" Person = apps.get_model("core", "Person") # Check if there is an existing person connected to the user. if Person.objects.filter(user__username=user.username).exists(): person = user.person created = False logger.info( f"Existing person {person} already linked to user {user.username}") # FIXME ALso account for existing person with DN here else: # Build filter criteria depending on config matches = {} defaults = {} # Match on all fields selected in preferences fields_map = {f.name: f for f in Person.syncable_fields()} for field_name in get_site_preferences()["ldap__matching_fields"]: try: value = get_ldap_value_for_field(Person, fields_map[field_name], attrs, dn) except KeyError: # Field is not set in LDAP, match on remaining fields continue matches[field_name] = value if not matches: raise KeyError(f"No matching fields found for {dn}") # Pre-fill all mandatory non-matching fields from User object for missing_key in ("first_name", "last_name", "email"): if missing_key not in matches: defaults[missing_key] = getattr(user, missing_key) if get_site_preferences()["ldap__create_missing_persons"]: person, created = Person.objects.get_or_create(**matches, defaults=defaults) else: person = Person.objects.get(**matches) created = False person.user = user status = "New" if created else "Existing" logger.info(f"{status} person {person} linked to user {user.username}") person.ldap_dn = dn.lower() if not created: person.first_name = user.first_name person.last_name = user.last_name person.email = user.email # Synchronise additional fields if enabled for field in Person.syncable_fields(): try: value = get_ldap_value_for_field(Person, field, attrs, dn, person) except (AttributeError, KeyError): # A syncable field is not configured to sync or missing in LDAP continue setattr(person, field.name, value) logger.debug(f"Field {field.name} set to {value} for {person}") person.save() return person
def import_subjects( validity_range: ValidityRange) -> Dict[int, chronos_models.Subject]: """Import subjects.""" subjects_ref = {} # Get subjects subjects = run_default_filter(validity_range, mysql_models.Subjects.objects, filter_term=False) for subject in tqdm(subjects, desc="Import subjects", **TQDM_DEFAULTS): # Check if needed data are provided if not subject.name: raise RuntimeError( "Subject ID {}: Cannot import subject without short name.". format(subject.subject_id)) # Build values short_name = subject.name[:10] name = subject.longname if subject.longname else short_name colour_fg = untis_colour_to_hex(subject.forecolor) colour_bg = untis_colour_to_hex(subject.backcolor) import_ref = subject.subject_id logger.info("Import subject {} …".format(short_name)) # Get or create subject object by short name new_subject, created = chronos_models.Subject.objects.get_or_create( short_name=short_name, defaults={ "name": name, "colour_fg": colour_fg, "colour_bg": colour_bg, "import_ref_untis": import_ref, }, ) if created: logger.info(" New subject created") # Force sync changed = False if get_site_preferences()["untis_mysql__update_subjects"] and ( new_subject.name != name or new_subject.colour_fg != colour_fg or new_subject.colour_bg != colour_bg): new_subject.name = name new_subject.colour_fg = untis_colour_to_hex(subject.forecolor) new_subject.colour_bg = untis_colour_to_hex(subject.backcolor) changed = True logger.info(" Name, foreground and background colour updated") if new_subject.import_ref_untis != import_ref: new_subject.import_ref_untis = import_ref changed = True logger.info(" Import reference updated") if changed: new_subject.save() subjects_ref[import_ref] = new_subject return subjects_ref
def import_supervision_areas( validity_range: ValidityRange, breaks_ref, teachers_ref) -> Dict[int, chronos_models.SupervisionArea]: """Import supervision areas.""" ref = {} # Get supervision areas areas = run_default_filter(validity_range, mysql_models.Corridor.objects, filter_term=False) for area in tqdm(areas, desc="Import supervision areas", **TQDM_DEFAULTS): if not area.name: raise RuntimeError( "Supervision area ID {}: Cannot import supervision area without short name." .format(area.corridor_id)) short_name = area.name[:10] name = area.longname[:50] if area.longname else short_name colour_fg = untis_colour_to_hex(area.forecolor) colour_bg = untis_colour_to_hex(area.backcolor) import_ref = area.corridor_id logger.info("Import supervision area {} …".format(short_name)) new_area, created = chronos_models.SupervisionArea.objects.get_or_create( short_name=short_name, defaults={ "name": name, "colour_fg": colour_fg, "colour_bg": colour_bg, "import_ref_untis": import_ref, }, ) if created: logger.info(" New supervision area created") changed = False if get_site_preferences( )["untis_mysql__update_supervision_areas"] and ( new_area.name != new_area.name or new_area.colour_fg != colour_fg or new_area.colour_bg != colour_bg): new_area.name = name new_area.colour_fg = colour_fg new_area.colour_bg = colour_bg changed = True logger.info(" Name, foreground and background colour updated") if new_area.import_ref_untis != import_ref: new_area.import_ref_untis = import_ref changed = True logger.info(" Import reference updated") if changed: new_area.save() logger.info(" Import supervisions for this area") # Parse raw data raw_supervisions = connect_untis_fields(area, "breaksupervision", 16) supervisions_ref = {} for raw_supervision in raw_supervisions: # Split more and get teacher id raw_supervision_2 = raw_supervision.split("~") teacher_id = int(raw_supervision_2[1]) term_id = int(raw_supervision_2[0]) if term_id != validity_range.import_ref_untis: continue if teacher_id in teachers_ref: # Get weekday, period after break and teacher weekday = int(raw_supervision_2[2]) - 1 period_after_break = int(raw_supervision_2[3]) teacher = teachers_ref[teacher_id] logger.info( "Import supervision on weekday {} before the {}. period (teacher {})" .format(weekday, period_after_break, teacher)) # Get or create new_supervision, created = new_area.supervisions.get_or_create( validity=validity_range, break_item=breaks_ref[weekday][period_after_break], teacher=teacher, ) # Log if created: logger.info(" New supervision created") # Save supervisions in reference dict if weekday not in supervisions_ref: supervisions_ref[weekday] = {} if period_after_break not in supervisions_ref[weekday]: supervisions_ref[weekday][period_after_break] = [] supervisions_ref[weekday][period_after_break].append( new_supervision) for supervision in new_area.supervisions.filter( validity=validity_range): delete = True # Get weekday and period after break weekday = supervision.break_item.weekday period_after_break = supervision.break_item.before_period_number # Delete supervision if no longer existing if weekday in supervisions_ref: if period_after_break in supervisions_ref[weekday]: if supervision in supervisions_ref[weekday][ period_after_break]: delete = False if delete: supervision.delete() logger.info(" Supervision {} deleted".format(supervision)) ref[import_ref] = {"area": new_area, "supervisions": supervisions_ref} return ref
def test_parse_date_en(): get_site_preferences()["csv_import__date_languages"] = "" assert parse_date("11/12/2020") == date(2020, 11, 12) get_site_preferences()["csv_import__date_languages"] = "de" assert parse_date("11/12/2020") != date(2020, 11, 12)
def import_lessons( validity_range: ValidityRange, time_periods_ref, rooms_ref, subjects_ref, teachers_ref, classes_ref, ): """Import lessons.""" # Lessons lessons = run_default_filter(validity_range, mysql_models.Lesson.objects) existing_lessons = [] for lesson in tqdm(lessons, desc="Import lessons", **TQDM_DEFAULTS): lesson_id = lesson.lesson_id logger.info(_("Import lesson {}").format(lesson_id)) if not lesson.lesson_tt: logger.warning(_(" Skip because missing times").format(lesson_id)) continue existing_lessons.append(lesson_id) # Split data (,) raw_lesson_data = connect_untis_fields(lesson, "lessonelement", 10) raw_time_data = lesson.lesson_tt.split(",") raw_time_data_2 = [] for el in raw_time_data: # Split data (~) raw_time_data_2.append(el.split("~")) # Get time periods and rooms time_periods = [] rooms_per_periods = [] for el in raw_time_data_2: weekday = int(el[1]) - 1 hour = int(el[2]) room_ids = untis_split_third(el[3], conv=int) # Get rooms rooms = [] for room_id in room_ids: r = rooms_ref[room_id] rooms.append(r) # Get time period time_period = time_periods_ref[weekday][hour] time_periods.append(time_period) rooms_per_periods.append(rooms) # Split data more (~) raw_lesson_data_2 = [] for el in raw_lesson_data: raw_lesson_data_2.append(el.split("~")) # All part lessons (courses) for i, el in enumerate(raw_lesson_data_2): logger.info(" Lesson part {}".format(i)) # Get plain ids teacher_id = int(el[0]) subject_id = int(el[2]) class_ids = untis_split_third(el[17], conv=int) # Get teacher if teacher_id != 0: teacher = teachers_ref[teacher_id] else: teacher = None teachers = [teacher] if teacher else [] # Get subject if subject_id != 0: subject = subjects_ref[subject_id] else: logger.warning(_(" Skip because missing subject")) continue # Get classes course_classes = [] for class_id in class_ids: c = classes_ref[class_id] course_classes.append(c) if get_site_preferences()["untis_mysql__use_course_groups"]: # Negative import_ref denotes a course group group_import_ref = -int("{}{}".format(lesson_id, i)) # Search by parent groups and subject qs = core_models.Group.objects.filter( parent_groups__in=[c.id for c in course_classes], subject_id=subject.id, ).filter( Q(school_term__isnull=True) | Q(school_term=validity_range.school_term)) # Check if found groups match match = False for found_group in qs: if compare_m2m(course_classes, found_group.parent_groups.all()): match = True course_group = found_group logger.info( " Course group found by searching by parent groups and subject" ) changed = False if not match: # No matching group found # Build names and refs for course groups group_short_name = "{}-{}".format( "".join([c.short_name for c in course_classes]), subject.short_name, ) group_name = "{}: {}".format( ", ".join([c.short_name for c in course_classes]), subject.short_name, ) # Get or create course group course_group, created = core_models.Group.objects.get_or_create( short_name=group_short_name, defaults={"name": group_name}) # Log if created: logger.info(" Course group created") # Update parent groups course_group.parent_groups.set(course_classes) logger.info(" Parent groups set") # Update name if course_group.name != group_name: course_group.name = group_name logger.info(" Name of course group updated") changed = True # Update owners course_group.owners.set(teachers) # Update import ref if course_group.import_ref_untis != group_import_ref: course_group.import_ref_untis = group_import_ref logger.info(" Import reference of course group updated") changed = True if course_group.subject != subject: course_group.subject = subject logger.info( " Subject reference of course group updated") changed = True if course_group.school_term != validity_range.school_term: course_group.school_term = validity_range.school_term logger.info( " School term reference of course group updated") changed = True if changed: course_group.save() groups = [course_group] else: groups = course_classes # Get old lesson old_lesson_qs = chronos_models.Lesson.objects.filter( lesson_id_untis=lesson_id, element_id_untis=i, validity=validity_range) if old_lesson_qs.exists(): # Update existing lesson logger.info(" Existing lesson found") old_lesson = old_lesson_qs[0] if old_lesson.subject != subject: old_lesson.subject = subject old_lesson.save() logger.info(" Subject updated") lesson = old_lesson else: # Create new lesson lesson = chronos_models.Lesson.objects.create( subject=subject, lesson_id_untis=lesson_id, element_id_untis=i, validity=validity_range, ) logger.info(" New lesson created") # Sync groups lesson.groups.set(groups) # Sync teachers lesson.teachers.set(teachers) # All times for this course old_lesson_periods_qs = chronos_models.LessonPeriod.objects.filter( lesson=lesson) # If length has changed, delete all lesson periods if old_lesson_periods_qs.count() != len(time_periods): old_lesson_periods_qs.delete() logger.info(" Lesson periods deleted") # Sync time periods for j, time_period in enumerate(time_periods): logger.info(" Import lesson period {}".format(time_period)) # Get room if provided rooms = rooms_per_periods[j] if i < len(rooms): room = rooms[i] else: room = None # Check if an old lesson period is provided old_lesson_period_qs = old_lesson_periods_qs.filter( element_id_untis=j) if old_lesson_period_qs.exists(): # Update old lesson period old_lesson_period = old_lesson_period_qs[0] if old_lesson_period.period != time_period or old_lesson_period.room != room: old_lesson_period.period = time_period old_lesson_period.room = room old_lesson_period.save() logger.info(" Time period and room updated") else: # Create new lesson period chronos_models.LessonPeriod.objects.create( lesson=lesson, period=time_period, room=room, element_id_untis=j) logger.info(" New time period added") for lesson in chronos_models.Lesson.objects.filter( validity=validity_range): if lesson.lesson_id_untis and lesson.lesson_id_untis not in existing_lessons: logger.info("Lesson {} deleted".format(lesson.id)) with reversion.create_revision(): set_comment(_("Deleted by UNTIS import")) lesson.save() lesson.delete()
def import_teachers( validity_range: ValidityRange) -> Dict[int, core_models.Person]: """Import teachers.""" teachers_ref = {} # Get teachers teachers = run_default_filter(validity_range, mysql_models.Teacher.objects) for teacher in tqdm(teachers, desc="Import teachers", **TQDM_DEFAULTS): # Check if needed data are provided if not teacher.name: raise RuntimeError( "Teacher ID {}: Cannot import teacher without short name.". format(teacher.teacher_id)) # Build values short_name = teacher.name first_name = teacher.firstname if teacher.firstname else "?" last_name = teacher.longname if teacher.longname else teacher.name import_ref = teacher.teacher_id logger.info("Import teacher {} (as person) …".format(short_name)) try: new_teacher = core_models.Person.objects.get( short_name__iexact=short_name) except core_models.Person.DoesNotExist: new_teacher = core_models.Person.objects.create( short_name=short_name, first_name=first_name, last_name=last_name, import_ref_untis=import_ref, ) logger.info(" New person created") changed = False if get_site_preferences()["untis_mysql__update_persons_name"] and ( new_teacher.first_name != first_name or new_teacher.last_name != last_name): new_teacher.first_name = first_name new_teacher.last_name = last_name changed = True logger.info(" First and last name updated") if (get_site_preferences()["untis_mysql__update_persons_short_name"] and new_teacher.short_name != short_name): new_teacher.short_name = short_name changed = True logger.info(" Short name updated") if new_teacher.import_ref_untis != import_ref: new_teacher.import_ref_untis = import_ref changed = True logger.info(" Import reference updated") if changed: new_teacher.save() teachers_ref[teacher.teacher_id] = new_teacher return teachers_ref