Exemplo n.º 1
0
def image ( config, picture_id, session = False ):
	if session is False:
		session = authenticate.authenticate(config)

	if session == False:
		return {"status" : "error", "type" : "authenticate"}
	else:
		url = "https://www.lectio.dk/lectio/%s/GetImage.aspx?pictureid=%s&fullsize=1" % ( str(config["school_id"]), str(picture_id) )

		# Insert the session information from the auth function
		cookies = {
			"lecmobile" : "0",
			"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
			"LastLoginUserName" : session["LastLoginUserName"],
			"lectiogsc" : session["lectiogsc"],
			"LectioTicket" : session["LectioTicket"]
		}

		# Insert User-agent headers and the cookie information
		headers = {
			"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
			"Content-Type" : "application/x-www-form-urlencoded",
			"Host" : "www.lectio.dk",
			"Origin" : "https://www.lectio.dk",
			"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
		}

		response = proxy.session.get(url, headers=headers)

		return response.text
Exemplo n.º 2
0
def image(config, session=False):
    url = "https://www.lectio.dk/lectio/%s/fravaer_billede.aspx?elevid=%s&year=%s&startdate=%s&enddate=%s" % (
        str(config["school_id"]), str(config["student_id"]), str(
            config["year"]), config["start_date"].strftime("%d-%m-%Y"),
        config["end_date"].strftime("%d-%m-%Y"))

    if session is False:
        session = authenticate.authenticate(config)

    if session == False:
        return {"status": "error", "type": "authenticate"}

    # Insert the session information from the auth function
    cookies = {
        "lecmobile": "0",
        "ASP.NET_SessionId": session["ASP.NET_SessionId"],
        "LastLoginUserName": session["LastLoginUserName"],
        "lectiogsc": session["lectiogsc"],
        "LectioTicket": session["LectioTicket"]
    }

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    return response.text
Exemplo n.º 3
0
def image ( config, session = False ):
	url = "https://www.lectio.dk/lectio/%s/fravaer_billede.aspx?elevid=%s&year=%s&startdate=%s&enddate=%s" % ( str(config["school_id"]), str(config["student_id"]), str(config["year"]), config["start_date"].strftime("%d-%m-%Y"), config["end_date"].strftime("%d-%m-%Y") )

	if session is False:
		session = authenticate.authenticate(config)

	if session == False:
		return {"status" : "error", "type" : "authenticate"}

	# Insert the session information from the auth function
	cookies = {
		"lecmobile" : "0",
		"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
		"LastLoginUserName" : session["LastLoginUserName"],
		"lectiogsc" : session["lectiogsc"],
		"LectioTicket" : session["LectioTicket"]
	}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	return response.text
Exemplo n.º 4
0
def field_of_study(config, session=False):
    url = "https://www.lectio.dk/lectio/%s/contextcard/contextcard.aspx?lectiocontextcard=%s" % (
        str(config["school_id"]), str(config["context_card_id"]))

    if session is False:
        session = authenticate.authenticate(config)

    if session == False:
        return {"status": "error", "type": "authenticate"}

    # Insert the session information from the auth function
    cookies = {
        "lecmobile": "0",
        "ASP.NET_SessionId": session["ASP.NET_SessionId"],
        "LastLoginUserName": session["LastLoginUserName"],
        "lectiogsc": session["lectiogsc"],
        "LectioTicket": session["LectioTicket"]
    }

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)

    if soup.find("span", attrs={"id": "ctl00_Content_cctitle"}) is None:
        return {"status": False, "error": "Data not found"}

    tables = soup.findAll("table")
    idProg = re.compile(
        r"\/lectio\/(?P<school_id>.*)\/studieretningSe.aspx\?sid=(?P<field_of_study_id>.*)"
    )
    idGroups = idProg.match(
        soup.find(
            attrs={"id": "ctl00_Content_linksrep_ctl00_somelink"})["href"])

    return {
        "status": "ok",
        "field_of_study": {
            "name":
            unicode(tables[1].findAll("td")[1].text),
            "field_of_study_id":
            idGroups.group("field_of_study_id")
            if not idGroups is None else "",
            "school_id":
            str(config["school_id"]),
            "context_card_id":
            str(config["context_card_id"])
        }
    }
Exemplo n.º 5
0
def image(config, picture_id, session=False):
    if session is False:
        session = authenticate.authenticate(config)

    if session == False:
        return {"status": "error", "type": "authenticate"}
    else:
        url = "https://www.lectio.dk/lectio/%s/GetImage.aspx?pictureid=%s&fullsize=1" % (
            str(config["school_id"]), str(picture_id))

        # Insert the session information from the auth function
        cookies = {
            "lecmobile": "0",
            "ASP.NET_SessionId": session["ASP.NET_SessionId"],
            "LastLoginUserName": session["LastLoginUserName"],
            "lectiogsc": session["lectiogsc"],
            "LectioTicket": session["LectioTicket"]
        }

        # Insert User-agent headers and the cookie information
        headers = {
            "User-Agent":
            "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
            "Content-Type": "application/x-www-form-urlencoded",
            "Host": "www.lectio.dk",
            "Origin": "https://www.lectio.dk",
            "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
        }

        response = proxy.session.get(url, headers=headers)

        return response.text
Exemplo n.º 6
0
def class_teams(config):

    url = "https://www.lectio.dk/lectio/%s/studieplan.aspx?klasseid=%s&displaytype=ganttkalender&ganttdimension=hold" % (
        str(config["school_id"]), str(config["class_id"]))

    # Sorting settings
    settings = {}

    cookies = {}

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)

    if soup.find("div", attrs={"id": "s_m_Content_Content_IntervalTablePnl"
                               }) is None:
        return {"status": False, "error": "Data not found"}

    teams = []
    subjectProg = re.compile(ur"(?P<abbrevation>.*) - (?P<name>.*)")
    idProg = re.compile(
        r"\/lectio\/(?P<school_id>.*)\/studieplan.aspx\?holdelementid=(?P<team_element_id>.*)&displaytype=ugeteksttabel"
    )

    for row in soup.find(attrs={
            "id": "s_m_Content_Content_IntervalTablePnl"
    }).find("table").findAll("tr")[3:]:
        subjectGroups = subjectProg.match(row.find("a")["title"])
        idGroups = idProg.match(row.find("a")["href"])
        teams.append({
            "name":
            unicode(row.find("a").text.strip()),
            "team_element_id":
            idGroups.group("team_element_id") if not idGroups is None else "",
            "subject": {
                "abbrevation":
                subjectGroups.group("abbrevation")
                if not subjectGroups is None else "",
                "name":
                subjectGroups.group("name")
                if not subjectGroups is None else ""
            }
        })

    return {"status": "ok", "teams": teams}
Exemplo n.º 7
0
def team_info ( config ):
	url = "https://www.lectio.dk/lectio/%s/aktivitet/AktivitetLinks.aspx?holdelementid=%s" % ( str(config["school_id"]), str(config["team_element_id"]) )

	# Insert the session information from the auth function
	cookies = {
	}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("div", attrs={"id" : "s_m_HeaderContent_subnavigator_generic_tr"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	#soup.find(attrs={"id" : "s_m_HeaderContent_MainTitle"}).find("div").decompose()

	nameProg = re.compile(ur"(?P<type>[.^\S]*) (?P<name>.*) - (.*)")
	nameGroups = nameProg.match(soup.find(attrs={"id" : "s_m_HeaderContent_MainTitle"}).text)
	contextCards = []
	teamType = "team" if "Holdet" in soup.find(attrs={"id" : "s_m_HeaderContent_MainTitle"}).text else "group"
	links = soup.find(attrs={"id" : "s_m_HeaderContent_subnavigator_generic_tr"}).findAll("a")
	teamIdProg = re.compile(r"\/lectio\/(?P<school_id>.*)\/DokumentOversigt.aspx\?holdid=(?P<team_id>.*)&holdelementid=(?P<team_element_id>.*)")
	if teamType == "team":
		teamIdGroups = teamIdProg.match(soup.find(text="Dokumenter").parent["href"])
	else:
		teamIdGroups = teamIdProg.match(soup.find(text="Dokumenter").parent["href"])

	if not teamIdGroups is None:
		contextCards.append("H" + teamIdGroups.group("team_id"))

	information = {
		"context_cards" : contextCards,
		"team_element_id" : str(config["team_element_id"]),
		"type" : teamType,
		"name" : nameGroups.group("name") if not nameGroups is None else "",
		"team_id" : teamIdGroups.group("team_id") if not teamIdGroups is None else "",
		"name_text" : soup.find(attrs={"id" : "s_m_HeaderContent_MainTitle"}).text
	}

	return {
		"status" : "ok",
		"information" : information
	}
Exemplo n.º 8
0
def field_of_study(config, session=False):
    url = "https://www.lectio.dk/lectio/%s/studieretningElevValg.aspx?elevid=%s" % (
        str(config["school_id"]), str(config["student_id"]))

    if session == False:
        session = authenticate.authenticate(config)

    if session == False:
        return {"status": "error", "type": "authenticate"}

    cookies = {
        "lecmobile": "0",
        "ASP.NET_SessionId": session["ASP.NET_SessionId"],
        "LastLoginUserName": session["LastLoginUserName"],
        "lectiogsc": session["lectiogsc"],
        "LectioTicket": session["LectioTicket"]
    }

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)

    if soup.find("div", attrs={"id": "s_m_Content_Content_id0_pa"}) is None:
        return {"status": False, "error": "Data not found"}

    elements = soup.find("div", attrs={
        "id": "s_m_Content_Content_id0_pa"
    }).findAll("td")

    information = {
        "student_type": elements[0].find("span").text,
        "student_start_term": elements[1].find("span").text,
        "field_of_study": {
            "name":
            unicode(elements[2].find("span").text),
            "context_card_id":
            elements[2].find("span")["lectiocontextcard"],
            "field_of_study_id":
            elements[2].find("span")["lectiocontextcard"].replace("SR", "")
        },
        "student_id": str(config["student_id"])
    }

    return {"status": "ok", "information": information}
Exemplo n.º 9
0
def field_of_study ( config, session = False ):
	url = "https://www.lectio.dk/lectio/%s/studieretningElevValg.aspx?elevid=%s" % ( str(config["school_id"]), str(config["student_id"]) )

	if session == False:
		session = authenticate.authenticate(config)

	if session == False:
		return {"status" : "error", "type" : "authenticate"}

	cookies = {
		"lecmobile" : "0",
		"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
		"LastLoginUserName" : session["LastLoginUserName"],
		"lectiogsc" : session["lectiogsc"],
		"LectioTicket" : session["LectioTicket"]
	}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("div", attrs={"id" : "s_m_Content_Content_id0_pa"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	elements = soup.find("div", attrs={"id" : "s_m_Content_Content_id0_pa"}).findAll("td")

	information = {
		"student_type" : elements[0].find("span").text,
		"student_start_term" : elements[1].find("span").text,
		"field_of_study" : {
			"name" : unicode(elements[2].find("span").text),
			"context_card_id" : elements[2].find("span")["lectiocontextcard"],
			"field_of_study_id" : elements[2].find("span")["lectiocontextcard"].replace("SR", "")
		},
		"student_id" : str(config["student_id"])
	}

	return {
		"status" : "ok",
		"information" : information
	}
Exemplo n.º 10
0
def private_activity ( config, session = False ):
	if session is False:
		session = authenticate.authenticate(config)

	if session == False:
		return {"status" : "error", "type" : "authenticate"}

	url = "https://www.lectio.dk/lectio/%s/privat_aftale.aspx?aftaleid=%s" % ( str(config["school_id"]), str(config["activity_id"]) )

	cookies = {
		"lecmobile" : "0",
		"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
		"LastLoginUserName" : session["LastLoginUserName"],
		"lectiogsc" : session["lectiogsc"],
		"LectioTicket" : session["LectioTicket"]
	}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("div", attrs={"id" : "m_Content_island1_pa"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	studentProg = re.compile(r"\/lectio\/(?P<school_id>.*)\/forside.aspx\?elevid=(?P<student_id>.*)")
	studentGroups = studentProg.match(soup.find("meta", attrs={"name" : "msapplication-starturl"})["content"])

	appointment = {
		"activity_id" : str(config["activity_id"]),
		"school_id" : str(config["school_id"]),
		"title" : unicode(soup.find("input", attrs={"id" : "m_Content_titelTextBox_tb"})["value"].replace("\r\n", "")),
		"comment" : unicode(soup.find("textarea", attrs={"id" : "m_Content_commentTextBox_tb"}).text.replace("\r\n", "")),
		"student_id" : studentGroups.group("student_id"),
		"start" : datetime.strptime("%s %s" % (soup.find("input", attrs={"id" : "m_Content_startdateCtrl__date_tb"})["value"], soup.find("input", attrs={"id" : "m_Content_startdateCtrl_startdateCtrl_time_tb"})["value"]), "%d/%m-%Y %H:%M"),
		"end" : datetime.strptime("%s %s" % (soup.find("input", attrs={"id" : "m_Content_enddateCtrl__date_tb"})["value"], soup.find("input", attrs={"id" : "m_Content_enddateCtrl_enddateCtrl_time_tb"})["value"]), "%d/%m-%Y %H:%M")
	}

	return {
		"status" : "ok",
		"appointment" : appointment
	}
Exemplo n.º 11
0
def school_info(config):
    url = "https://www.lectio.dk/lectio/%s/default.aspx" % (str(
        config["school_id"]))

    cookies = {}

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)
    idProg = re.compile(
        r"\/lectio\/(?P<school_id>.*)\/SkemaUgeaendringer.aspx\?lecafdeling=(?P<branch_id>.*)"
    )

    if soup.find("ul", attrs={"class": "linklist"}) is None:
        return {"status": False, "error": "Data not found"}

    elements = soup.find("ul", attrs={"class": "linklist"}).findAll("a")
    idGroups = idProg.match(elements[1]["href"])
    terms = []

    for row in soup.find(attrs={"id": "m_ChooseTerm_term"}).findAll("option"):
        terms.append({
            "value": row["value"],
            "year_string": row.text,
            "current": True if "selected" in row.attrs else False
        })

    information = {
        "name":
        soup.find(attrs={
            "id": "m_masterleftDiv"
        }).find(text=True).string.replace("\r\n", "").replace("\t",
                                                              "").strip(),
        "school_id":
        str(config["school_id"]),
        "branch_id":
        idGroups.group("branch_id") if not idGroups is None else "",
        "terms":
        terms
    }

    return {"status": "ok", "information": information}
Exemplo n.º 12
0
def materials ( config ):
	url = "https://www.lectio.dk/lectio/%s/MaterialOverview.aspx?holdelement_id=%s" % ( str(config["school_id"]), str(config["team_element_id"]) )

	cookies = {}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("table", attrs={"id" : "m_Content_MaterialsStudents"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	rows = soup.find("table", attrs={"id" : "m_Content_MaterialsStudents"}).findAll("tr")
	materialsList = []

	if len(rows) > 1:
		rows.pop(0)
		titleProg = re.compile(ur"(?P<authors>.*): (?P<title>.*), (?P<publisher>.*)")

		for row in rows:
			elements = row.findAll("td")
			title = unicode(elements[0].text.replace("\n", ""))
			titleGroups = titleProg.match(title)
			materialsList.append({
				"title_text" : title,
				"title" : titleGroups.group("title") if not titleGroups is None else title,
				"publisher" : titleGroups.group("publisher") if not titleGroups is None else "",
				"authors" : titleGroups.group("authors").split(", ") if not titleGroups is None else "",
				"type" : "book" if unicode(elements[1].text.replace("\n", "")) == u"Bog" else unicode(elements[1].text.replace("\n", "")),
				"book_storage" : True if elements[2].text.replace("\n", "") == "Ja" else False,
				"comment" : unicode(elements[3].text.strip("\n").replace("\n", "")),
				"ebook" : elements[4].text.strip("\n").replace("\n", "")
			})

	return {
		"status" : "ok",
		"materials" : materialsList
	}
Exemplo n.º 13
0
def xprs_subject ( config ):
	url = "https://www.lectio.dk/lectio/%s/contextcard/contextcard.aspx?lectiocontextcard=%s" % ( str(config["school_id"]), str(config["context_card_id"]) )

	# Insert the session information from the auth function
	cookies = {
	}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("span", attrs={"id" : "ctl00_Content_cctitle"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	tables = soup.findAll("table")

	codeProg = re.compile(r"(?P<code>[.^\S]*) (?P<name>.*)")
	codeGroups = codeProg.match(tables[1].findAll("td")[1].text)

	level = "Unknown"

	if not codeGroups is None:
		level = "A" if "A" in codeGroups.group("code") else "B" if "B" in codeGroups.group("code") else "C"

	return {
		"status" : "ok",
		"xprs_subject" : {
			"name" : soup.find(attrs={"id" : "ctl00_Content_cctitle"}).text.replace("XPRS-f*g - ", ""),
			"code" : codeGroups.group("code").replace("A", "").replace("B", "").replace("C", "") if not codeGroups is None else "",
			"subject_sub_type" : "none" if tables[1].findAll("td")[3].text == "Ingen underfag" else "differs" if tables[1].findAll("td")[3].text == "Variable underfag" else tables[1].findAll("td")[3].text,
			"context_card_id" : str(config["context_card_id"]),
			"level" : level,
			"xprs_subject_id" : str(config["context_card_id"]).replace("XF", ""),
			"code_full" : codeGroups.group("code") if not codeGroups is None else "",
			"notices" : tables[1].findAll("td")[5].text.split("\n"),
			"code_full_name" : tables[1].findAll("td")[1].text
		}
	}
Exemplo n.º 14
0
def field_of_study ( config, session = False ):
	url = "https://www.lectio.dk/lectio/%s/contextcard/contextcard.aspx?lectiocontextcard=%s" % ( str(config["school_id"]), str(config["context_card_id"]) )

	if session is False:
		session = authenticate.authenticate(config)

	if session == False:
		return {"status" : "error", "type" : "authenticate"}

	# Insert the session information from the auth function
	cookies = {
		"lecmobile" : "0",
		"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
		"LastLoginUserName" : session["LastLoginUserName"],
		"lectiogsc" : session["lectiogsc"],
		"LectioTicket" : session["LectioTicket"]
	}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("span", attrs={"id" : "ctl00_Content_cctitle"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	tables = soup.findAll("table")
	idProg = re.compile(r"\/lectio\/(?P<school_id>.*)\/studieretningSe.aspx\?sid=(?P<field_of_study_id>.*)")
	idGroups = idProg.match(soup.find(attrs={"id" : "ctl00_Content_linksrep_ctl00_somelink"})["href"])

	return {
		"status" : "ok",
		"field_of_study" : {
			"name" : unicode(tables[1].findAll("td")[1].text),
			"field_of_study_id" : idGroups.group("field_of_study_id") if not idGroups is None else "",
			"school_id" : str(config["school_id"]),
			"context_card_id" : str(config["context_card_id"])
		}
 	}
Exemplo n.º 15
0
def class_teams ( config ):

	url = "https://www.lectio.dk/lectio/%s/studieplan.aspx?klasseid=%s&displaytype=ganttkalender&ganttdimension=hold" % ( str(config["school_id"]), str(config["class_id"]) )

	# Sorting settings
	settings = {}

	cookies = {}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("div", attrs={"id" : "s_m_Content_Content_IntervalTablePnl"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	teams = []
	subjectProg = re.compile(ur"(?P<abbrevation>.*) - (?P<name>.*)")
	idProg = re.compile(r"\/lectio\/(?P<school_id>.*)\/studieplan.aspx\?holdelementid=(?P<team_element_id>.*)&displaytype=ugeteksttabel")

	for row in soup.find(attrs={"id" : "s_m_Content_Content_IntervalTablePnl"}).find("table").findAll("tr")[3:]:
		subjectGroups = subjectProg.match(row.find("a")["title"])
		idGroups = idProg.match(row.find("a")["href"])
		teams.append({
			"name" : unicode(row.find("a").text.strip()),
			"team_element_id" : idGroups.group("team_element_id") if not idGroups is None else "",
			"subject" : {
				"abbrevation" : subjectGroups.group("abbrevation") if not subjectGroups is None else "",
				"name" : subjectGroups.group("name") if not subjectGroups is None else ""
			}
		})

	return {
		"status" : "ok",
		"teams" : teams
	}
Exemplo n.º 16
0
def school_info ( config ):
	url = "https://www.lectio.dk/lectio/%s/default.aspx" % ( str(config["school_id"]) )

	cookies = {}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)
	idProg = re.compile(r"\/lectio\/(?P<school_id>.*)\/SkemaUgeaendringer.aspx\?lecafdeling=(?P<branch_id>.*)")

	if soup.find("ul", attrs={"class" : "linklist"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	elements = soup.find("ul", attrs={"class" : "linklist"}).findAll("a")
	idGroups = idProg.match(elements[1]["href"])
	terms = []

	for row in soup.find(attrs={"id" : "m_ChooseTerm_term"}).findAll("option"):
		terms.append({
			"value" : row["value"],
			"year_string" : row.text,
			"current" : True if "selected" in row.attrs else False
		})

	information = {
		"name" : soup.find(attrs={"id" : "m_masterleftDiv"}).find(text=True).string.replace("\r\n", "").replace("\t","").strip(),
		"school_id" : str(config["school_id"]),
		"branch_id" : idGroups.group("branch_id") if not idGroups is None else "",
		"terms" : terms
	}

	return {
		"status" : "ok",
		"information" : information
	}
Exemplo n.º 17
0
def subject_list(start, end, school_id):
    increase = 1
    subjects = []

    cards = []

    for code in range(0, end - start + 1):
        cards.append(start + (code * increase))

    for code in cards:
        url = "http://www.lectio.dk/lectio/%s/FindSkema.aspx?type=hold&f*g=%s" % (
            str(school_id), str(code))

        cookies = {}

        # Insert User-agent headers and the cookie information
        headers = {
            "User-Agent":
            "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
            "Content-Type": "application/x-www-form-urlencoded",
            "Host": "www.lectio.dk",
            "Origin": "https://www.lectio.dk",
            "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
        }
        error = False
        try:
            response = proxy.session.get(url, headers=headers)
        except Exception, e:
            print code
            error = True

        if error == False:
            html = response.text

            soup = Soup(html)

            if not soup.find("table", attrs={"id": "m_Content_contenttbl"
                                             }) is None:
                elements = soup.find("table",
                                     attrs={
                                         "id": "m_Content_contenttbl"
                                     }).find("span").text.split(" - ")

                subjects.append({
                    "abbrevation": elements[0].encode("utf8"),
                    "name": elements[1].encode("utf8"),
                    "subject_id": str(code)
                })
Exemplo n.º 18
0
def folders ( config, session = False ):
	if session is False:
		session = authenticate.authenticate(config)

	if session == False:
		return {"status" : "error", "type" : "authenticate"}

	cookies = {
		"lecmobile" : "0",
		"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
		"LastLoginUserName" : session["LastLoginUserName"],
		"lectiogsc" : session["lectiogsc"],
		"LectioTicket" : session["LectioTicket"]
	}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	url = "https://www.lectio.dk/lectio/%s/DokumentOversigt.aspx?elevid=%s" %( str(config["school_id"]), str(config["student_id"]) )

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("div", attrs={"id" : "s_m_Content_Content_FolderTreeView"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	folders = find_folders(soup.find("div", attrs={"id" : "s_m_Content_Content_FolderTreeView"}), False, session, config)

	return {
		"status" : "ok",
		"folders" : folders
	}
Exemplo n.º 19
0
def folders(config, session=False):
    if session is False:
        session = authenticate.authenticate(config)

    if session == False:
        return {"status": "error", "type": "authenticate"}

    cookies = {
        "lecmobile": "0",
        "ASP.NET_SessionId": session["ASP.NET_SessionId"],
        "LastLoginUserName": session["LastLoginUserName"],
        "lectiogsc": session["lectiogsc"],
        "LectioTicket": session["LectioTicket"]
    }

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    url = "https://www.lectio.dk/lectio/%s/DokumentOversigt.aspx?elevid=%s" % (
        str(config["school_id"]), str(config["student_id"]))

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)

    if soup.find("div", attrs={"id": "s_m_Content_Content_FolderTreeView"
                               }) is None:
        return {"status": False, "error": "Data not found"}

    folders = find_folders(
        soup.find("div", attrs={"id": "s_m_Content_Content_FolderTreeView"}),
        False, session, config)

    return {"status": "ok", "folders": folders}
Exemplo n.º 20
0
def work_methods ( config ):
	url = "https://www.lectio.dk/lectio/%s/studieplan.aspx?holdelementid=%s&displaytype=holdogarbejdsform" % ( str(config["school_id"]), str(config["team_element_id"]) )

	cookies = {}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("table", attrs={"id" : "s_m_Content_Content_CompetanceTbl"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	rows = soup.find("table", attrs={"id" : "s_m_Content_Content_CompetanceTbl"}).findAll("tr")
	rows.pop(0)

	methods = []

	for row in rows:
		elements = row.findAll("td")
		methods.append(unicode(elements[0].text))

	return {
		"status" : "ok",
		"methods" : methods,
		"term" : {
			"value" : soup.find("select", attrs={"id" : "s_m_ChooseTerm_term"}).select('option[selected="selected"]')[0]["value"],
			"years_string" : soup.find("select", attrs={"id" : "s_m_ChooseTerm_term"}).select('option[selected="selected"]')[0].text
		}
	}
Exemplo n.º 21
0
def subject_list ( start, end, school_id ):
	increase = 1
	subjects = []

	cards = []

	for code in range(0, end-start+1):
		cards.append(start + (code*increase))

	for code in cards:
		url = "http://www.lectio.dk/lectio/%s/FindSkema.aspx?type=hold&f*g=%s" % ( str(school_id), str(code) )

		cookies = {}

		# Insert User-agent headers and the cookie information
		headers = {
			"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
			"Content-Type" : "application/x-www-form-urlencoded",
			"Host" : "www.lectio.dk",
			"Origin" : "https://www.lectio.dk",
			"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
		}
		error = False
		try:
			response = proxy.session.get(url, headers=headers)
		except Exception, e:
			print code
			error = True

		if error == False:
			html = response.text

			soup = Soup(html)

			if not soup.find("table", attrs={"id" : "m_Content_contenttbl"}) is None:
				elements = soup.find("table", attrs={"id" : "m_Content_contenttbl"}).find("span").text.split(" - ")

				subjects.append({
					"abbrevation" : elements[0].encode("utf8"),
					"name" : elements[1].encode("utf8"),
					"subject_id" : str(code)
				})
Exemplo n.º 22
0
def document(config, session=False):
    url = "https://www.lectio.dk/lectio/%s/dokumentrediger.aspx?dokumentid=%s" % (
        str(config["school_id"]), str(config["document_id"]))

    if session is False:
        session = authenticate.authenticate(config)

    if session == False:
        return {"status": "error", "type": "authenticate"}

    cookies = {
        "lecmobile": "0",
        "ASP.NET_SessionId": session["ASP.NET_SessionId"],
        "LastLoginUserName": session["LastLoginUserName"],
        "lectiogsc": session["lectiogsc"],
        "LectioTicket": session["LectioTicket"]
    }

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)

    if soup.find("div", attrs={"id": "m_Content_Dokument_pa"}) is None:
        return {"status": False, "error": "Data not found"}

    offset = 0

    elements = soup.find("div", attrs={
        "id": "m_Content_Dokument_pa"
    }).findAll("td")

    if len(elements) < 7:
        offset = 1

    creator = context_card.user(
        {
            "context_card_id":
            elements[3 - offset].find("span")["lectiocontextcard"],
            "school_id":
            config["school_id"]
        }, session)["user"]

    changer = elements[4 - offset].find("span")["lectiocontextcard"]
    elements[4 - offset].find("span").decompose()
    dateText = elements[4 - offset].text.replace(" af ", "").strip()
    dateTimeProg = re.compile(
        r"(?P<day>.*)/(?P<month>.*)-(?P<year>.*) (?P<hour>.*):(?P<minute>.*)")
    dateGroups = dateTimeProg.match(dateText)
    date = datetime.strptime(
        "%s/%s-%s %s:%s" % (functions.zeroPadding(dateGroups.group("day")),
                            functions.zeroPadding(dateGroups.group("month")),
                            dateGroups.group("year"), dateGroups.group("hour"),
                            dateGroups.group("minute")),
        "%d/%m-%Y %H:%M") if not dateGroups is None else ""

    connectionRows = soup.find("table",
                               attrs={
                                   "id": "m_Content_AffiliationsGV"
                               }).findAll("tr")
    connectionRows.pop(0)

    connections = []

    for row in connectionRows:
        rowElements = row.findAll("td")

        data = {
            "context_card_id":
            rowElements[0]["lectiocontextcard"],
            "type":
            "team" if "H" in rowElements[0]["lectiocontextcard"] else "teacher"
            if "T" in rowElements[0]["lectiocontextcard"] else "student",
            "name":
            unicode(rowElements[0].find("span").text),
            "can_edit":
            True if "checked" in rowElements[1].find("input").attrs else False
        }

        if rowElements[2].find("select"):
            folder_id = rowElements[2].find("select").select(
                'option[selected="selected"]')[0]["value"]
            data["folder_id"] = folder_id

        connections.append(data)

    document = {
        "name":
        unicode(elements[0].find("a").text).replace("\t", "").replace(
            "\n", "").replace("\r", "").strip(),
        "extension":
        os.path.splitext(elements[0].find("a").text.replace("\t", "").replace(
            "\n", "").replace("\r", "").strip())[1].replace(".", ""),
        "size":
        elements[2 - offset].text.replace(",", ".").replace("\t", "").replace(
            "\n", "").replace("\r", "").strip(),
        "document_id":
        str(config["document_id"]),
        "creator":
        creator,
        "changer": {
            "context_card_id": changer,
            "type": "teacher" if "T" in changer else "student",
            "date": date
        },
        "comment":
        soup.find("textarea", attrs={
            "id": "m_Content_EditDocComments_tb"
        }).text.replace("\r\n", ""),
        "public":
        True if "checked" in soup.find("input",
                                       attrs={
                                           "id": "m_Content_EditDocIsPublic"
                                       }).attrs else False,
        "connections":
        connections,
        "term": {
            "value":
            soup.find("select", attrs={
                "id": "m_ChooseTerm_term"
            }).select('option[selected="selected"]')[0]["value"],
            "years_string":
            soup.find("select", attrs={
                "id": "m_ChooseTerm_term"
            }).select('option[selected="selected"]')[0].text
        }
    }

    return {"status": "ok", "document": document}
Exemplo n.º 23
0
def activity_info(config, activity_id, session = False, modules = None ):
    if not session == False:
        if session is True:
            session = authenticate.authenticate(config)

        if session == False:
            return {"status" : "error", "type" : "authenticate"}

    url = urls.activity_info.replace("{{SCHOOL_ID}}", str(config["school_id"])).replace("{{ACTIVITY_ID}}", str(activity_id))

    if not session == False:
        # Insert the session information from the auth function
        cookies = {
            "lecmobile" : "0",
            "ASP.NET_SessionId" : session["ASP.NET_SessionId"],
            "LastLoginUserName" : session["LastLoginUserName"],
            "lectiogsc" : session["lectiogsc"],
            "LectioTicket" : session["LectioTicket"]
        }

    else:
        cookies = {}

    settings = {}

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type" : "application/x-www-form-urlencoded",
        "Host" : "www.lectio.dk",
        "Origin" : "https://www.lectio.dk",
        "Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.post(url, data=settings, headers=headers)

    html = response.text

    soup = Soup(html)

    # Find all the different rows in the table
    rows = []

    for x in soup.find("div", attrs={"id" : "m_Content_LectioDetailIslandLesson_pa"}).find("table").findAll("tr", recursive=False):
        rows.append(x.find("td"))

    headers = soup.find("div", attrs={"id" : "m_Content_LectioDetailIslandLesson_pa"}).find("table").findAll("th")

    headers[3].string.replace_with("EleverAs")

    # Make rows[n] match headers[n]
    for index, element in enumerate(rows):
        table = element.find_parent("table")
        if table["class"][0] == u"NoFrame":
            del rows[index]

    # Generate a map of rows
    rowMap = functions.mapRows(headers, rows)

    # Retrieve the values
    showed_in_values = unicode(rowMap["Vises"].text).split(", ")
    showed_in = []

    type = unicode(rowMap["Type"].text)
    status = unicode(rowMap["Status"].text)
    students_resserved = unicode(rowMap["Deltagerereserveret"].text)

    teams = [] # Done
    students = [] # Done
    ressources = [] # Test Missing
    rooms = [] # Done
    teachers = [] # Done
    documents = [] # Done
    links = [] # Done
    students_education_assigned = [] # Missing Test
    homework = []

    ressourceProg = re.compile(r"\/lectio\/(?P<school_id>.*)\/SkemaNy.aspx\?type=lokale&nosubnav=1&id=(?P<ressource_id>.*)&week=(?P<week>.*)")

    for x in rowMap["Ressourcer"].findAll("a"):
        ressoureceGroups = ressourceProg.match(x["href"])

        ressources.append({
            "ressource_id" : ressoureceGroups.group("ressource_id") if not ressoureceGroups is None else ""
        })

    for x in rowMap["EleverAs"].findAll("a"):
        students_education_assigned.append({
            "student_id" : x["lectiocontextcard"].replace("S", "")
        })

    dateProg = re.compile(r"(?P<day_name>.*) (?P<day>.*)\/(?P<month>.*) (?P<module>.*)\. modul, uge (?P<week>.*)")
    termValue = soup.find("select", attrs={"id" : "m_ChooseTerm_term"}).select('option[selected="selected"]')[0]["value"]
    alternativeDateProg = re.compile(r"(?P<day_name>.*) (?P<day>.*)\/(?P<month>.*) (?P<start_time>.*) - (?P<end_time>.*), uge (?P<week>.*)")
    multiDayProg = re.compile(r"(?P<start_day_name>.*) (?P<start_day>.*)\/(?P<start_month>.*) (?P<start_time>.*) - (?P<end_day_name>.*) (?P<end_day>.*)\/(?P<end_month>.*) (?P<end_time>.*), uge (?P<week>.*)")

    altDateGroups = alternativeDateProg.match(rowMap["Tidspunkt"].text.strip().replace("\r", "").replace("\n", "").replace("\t", ""))
    dateGroups = dateProg.match(rowMap["Tidspunkt"].text.strip().replace("\r", "").replace("\n", "").replace("\t", ""))
    multiDayGroups  = multiDayProg.match(rowMap["Tidspunkt"].text.strip().replace("\r", "").replace("\n", "").replace("\t", ""))

    startDate = None
    endDate = None

    if not dateGroups is None and not modules == None:
        if int(dateGroups.group("month")) < 8:
            year = int(termValue) + 1
        else:
            year = int(termValue)

        startTime = "12:00"
        endTime = "00:00"

        for x in modules:
            if str(x["module"]) == str(dateGroups.group("module")):
                startTime = x["start"]
                endTime = x["end"]

        startDate = datetime.strptime("%s/%s-%s %s" % (functions.zeroPadding(dateGroups.group("day")), functions.zeroPadding(dateGroups.group("month")), year, startTime), "%d/%m-%Y %H:%M")
        endDate = datetime.strptime("%s/%s-%s %s" % (functions.zeroPadding(dateGroups.group("day")), functions.zeroPadding(dateGroups.group("month")), year, endTime), "%d/%m-%Y %H:%M")
    elif not multiDayGroups is None:
        if int(multiDayGroups.group("month")) < 8:
            year = int(termValue) + 1
        else:
            year = int(termValue)

        startDate = datetime.strptime("%s/%s-%s %s" % (functions.zeroPadding(multiDayGroups.group("day")), functions.zeroPadding(multiDayGroups.group("month")), year, multiDayGroups.group("start_time")), "%d/%m-%Y %H:%M")
        endDate = datetime.strptime("%s/%s-%s %s" % (functions.zeroPadding(multiDayGroups.group("day")), functions.zeroPadding(multiDayGroups.group("month")), year, multiDayGroups.group("end_time")), "%d/%m-%Y %H:%M")
    elif not altDateGroups is None:
        if int(altDateGroups.group("month")) < 8:
            year = int(termValue) + 1
        else:
            year = int(termValue)

        startDate = datetime.strptime("%s/%s-%s %s" % (functions.zeroPadding(altDateGroups.group("day")), functions.zeroPadding(altDateGroups.group("month")), year, altDateGroups.group("start_time")), "%d/%m-%Y %H:%M")
        endDate = datetime.strptime("%s/%s-%s %s" % (functions.zeroPadding(altDateGroups.group("day")), functions.zeroPadding(altDateGroups.group("month")), year, altDateGroups.group("end_time")), "%d/%m-%Y %H:%M")

    # Created and updated dates
    metaProg = re.compile(values.activity_updated_regex)

    metaElements = rowMap["Systeminformation"].text.strip().split("\n")
    metaString = ""
    for me in metaElements:
        metaString = metaString + " " + me.replace("\t\t\t\t", "").replace("\r", "").strip()

    metaGroups = metaProg.search(metaString)

    # Loop through the documents and append to the list
    documentTable = rowMap["Dokumenter"].find("table")
    if not documentTable == None:
        documentRows = documentTable.findAll("td")
        for documentRow in documentRows:
            # Split the size from the unit
            fileSizeProg = re.compile(values.file_size_regex)
            fileSizeGroups = fileSizeProg.search(documentRow.text)

            # Find the different document info elements
            elements = documentRow.findAll("a")

            if len(elements) > 0:
                # Filter the id from the document url
                documentProg = re.compile(values.document_url_regex)
                documentGroups = documentProg.search(elements[1]["href"])

                # Append to the list
                documents.append({
                    "name" : elements[1].text.encode("utf8"),
                    "size" : {
                        "size" : fileSizeGroups.group("size").replace(",", "."),
                        "unit" : fileSizeGroups.group("unit_name")
                    },
                    "type" : "timetable_document",
                    "document_id" : documentGroups.group("document_id")
                })

    # Loop through the students and append to the list
    studentRows = rowMap["Elever"].findAll("a")
    for student,classObject in functions.grouped(studentRows,2):
        # Filter the id from the class URL
        studentClassProg = re.compile(values.class_url_regex)
        studentClassGroups = studentClassProg.search(classObject["href"])

        # Filter the student id from the URL
        studentIdProg = re.compile(values.student_url_regex)
        studentIdGroups = studentIdProg.search(student["href"])

        students.append({
            "name" : unicode(student.text),
            "class" : unicode(classObject.text),
            "context_card_id" : student["lectiocontextcard"],
            "student_id" : studentIdGroups.group("student_id"),
            "class_id" : studentClassGroups.group("class_id")
        })

    # Loop through the teams and append to the list
    for team in rowMap["Hold"].findAll("a"):
        # Filter the class name from the team name
        teamNameProg = re.compile(values.team_class_name_regex)
        teamNameGroups = teamNameProg.search(unicode(team.text))

        # Filter the id from the URL
        teamIdProg = re.compile(values.team_url_regex)
        teamIdGroups = teamIdProg.search(team["href"])

        if not teamIdGroups == None:
            # Append to the list
            teams.append({
                "class" : teamNameGroups.group("class_name"),
                "team" : teamNameGroups.group("team_name"),
                "name" : team.text,
                "team_id" : teamIdGroups.group("team_id")
            })

    # Loop through the values and append English and Computer easy readable values
    for value in showed_in_values:
        if value == u"i dags- og ugeændringer":
            showed_in.append("day_and_week_changes")
        elif value == u"Inde i berørte skemaer":
            showed_in.append("timetable")
        elif value == u"I toppen af berørte skemaer":
            showed_in.append("top_of_timetable")

    # Loop through the links and append them to the list
    for link in rowMap["Links"].findAll("a"):
        links.append({
            "url" : link["href"],
            "title" : unicode(link.text)
        })

    # Loop through the rooms and append them to the list
    for room in rowMap["Lokaler"].findAll("a"):
        # Initialize variables
        roomName = ""
        roomNumber = ""

        # Filter the number from the name
        roomNameProg = re.compile(values.room_name_regex)
        roomNameGroups = roomNameProg.search(unicode(room.text))

        if not roomNameGroups == None:
            roomName = roomNameGroups.group("room_name")
            roomNumber = roomNameGroups.group("room_number")

         # Initialize roomId RegEx
        roomIdProg = re.compile(values.room_url_regex)

        # Filter the id from the URL
        roomIdGroups = roomIdProg.search(room["href"])

        # Append the room to the list
        rooms.append({
            "name" : roomName,
            "number" : roomNumber,
            "room_id" : roomIdGroups.group("room_id")
        })

    # Loop through the teachers and append them to the list
    for teacher in rowMap["Laerere"].findAll("a"):
        # Filter the abbrevation from the name
        teacherNameProg = re.compile(values.name_with_abbrevation_regex)
        teacherNameGroups = teacherNameProg.search(unicode(teacher.text))

        # Filter the id from the URL
        teacherIdProg = re.compile(values.teacher_url_regex)
        teacherIdGroups = teacherIdProg.search(teacher["href"])

        # Append to the list
        teachers.append({
            "context_card_id" : teacher["lectiocontextcard"],
            "name" : teacherNameGroups.group("name"),
            "abbrevation" : teacherNameGroups.group("abbrevation"),
            "teacher_id" : teacherIdGroups.group("teacher_id"),
            "school_id" : teacherIdGroups.group("school_id")
        })

    # Loop over the diferent homework notes and append to the list
    for object in values.activity_homework_regexs:
        prog = re.compile(object["expression"])
        matches = prog.finditer(unicode(rowMap["Lektier"].text.replace("\t", "")))

        # Loop over the matches
        for element in matches:
            if object["name"] == "note":
                if not element.group("note") == "":
                    homework.append({
                        "note" : element.group("note"),
                        "type" : "note"
                    })
            else:
                homework.append({
                    "note" : element.group("note"),
                    "class" : element.group("class"),
                    "authors" : element.group("writers").split(", "),
                    "name" : element.group("name"),
                    "pages" : element.group("pages"),
                    "subject" : element.group("subject"),
                    "publisher" : element.group("publisher"),
                    "type" : "book"
            })
    # Initialize note variable
    note = unicode(rowMap["Note"].text)

    # Return all the information
    return {
        "status" : "ok",
        "time" : unicode(rowMap["Tidspunkt"].text),
        "teams" : teams,
        "type" : "school" if type == "Lektion" else "other_activity" if type == "Anden aktivitet" else "other",
        "students_education_assigned" : students_education_assigned,
        "teachers" : teachers,
        "rooms" : rooms,
        "ressources" : ressources,
        "note" : note.encode("utf8"),
        "documents" : documents,
        "homework" : homework, # Match books with the list of books
        "links" : links,
        "students_resserved" : "true" if students_resserved.strip() == "Ja" else "false",
        "showed_at" : showed_in,
        "activity_status" : "done" if status == "Afholdt" else "planned" if status == "Planlagt" else "cancelled" if status == "Aflyst" else "other",
        "students" : students,
        "created" : {
            "at" : datetime.strptime("%s/%s-%s %s:%s" % (functions.zeroPadding(metaGroups.group("created_date")),functions.zeroPadding(metaGroups.group("created_month")),functions.zeroPadding(metaGroups.group("created_year")),functions.zeroPadding(metaGroups.group("created_hour")),functions.zeroPadding(metaGroups.group("created_minute"))), "%d/%m-%Y %H:%M") if not metaGroups is None else "",
            "by" : metaGroups.group("created_teacher") if not metaGroups is None else ""
        },
        "updated" : {
            "at" : datetime.strptime("%s/%s-%s %s:%s" % (functions.zeroPadding(metaGroups.group("updated_date")),functions.zeroPadding(metaGroups.group("updated_month")),functions.zeroPadding(metaGroups.group("updated_year")),functions.zeroPadding(metaGroups.group("updated_hour")),functions.zeroPadding(metaGroups.group("updated_minute"))), "%d/%m-%Y %H:%M") if not metaGroups is None else "",
            "by" : metaGroups.group("updated_teacher") if not metaGroups is None else ""
        },
        "term" : {
            "value" : soup.find("select", attrs={"id" : "m_ChooseTerm_term"}).select('option[selected="selected"]')[0]["value"],
            "years_string" : soup.find("select", attrs={"id" : "m_ChooseTerm_term"}).select('option[selected="selected"]')[0].text
        },
        "date" : {
            "start" : startDate,
            "end" : endDate
        }
    }
Exemplo n.º 24
0
def student_surveys(config, session=False):
    url = "https://www.lectio.dk/lectio/%s/spoergeskema_rapport.aspx?type=mine&elevid=%s" % (
        str(config["school_id"]), str(config["student_id"]))

    if session is False:
        session = authenticate.authenticate(config)

    if session == False:
        return {"status": "error", "type": "authenticate"}
    # Insert the session information from the auth function
    cookies = {
        "lecmobile": "0",
        "ASP.NET_SessionId": session["ASP.NET_SessionId"],
        "LastLoginUserName": session["LastLoginUserName"],
        "lectiogsc": session["lectiogsc"],
        "LectioTicket": session["LectioTicket"]
    }

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)

    if soup.find("div", attrs={"id": "s_m_Content_Content_answer_island_pa"
                               }) is None:
        return {"status": False, "error": "Data not found"}
    surveys = []
    ids = []

    openForAnsweringProg = re.compile(
        r"\/lectio\/(?P<school_id>.*)\/spoergeskema_besvar.aspx\?id=(?P<survey_id>.*)&prevurl=(?P<prev_url>.*)"
    )
    ownProg = re.compile(
        r"\/lectio\/(?P<school_id>.*)\/spoergeskema_rediger.aspx\?id=(?P<survey_id>.*)&prevurl=(?P<prev_url>.*)"
    )
    openForReportingProg = re.compile(
        r"\/lectio\/(?P<school_id>.*)\/spoergeskema\/spoergeskemarapportering.aspx\?id=(?P<survey_id>.*)&prevurl=(?P<prev_url>.*)"
    )
    dateTimeProg = re.compile(
        r"(?P<day>.*)/(?P<month>.*)-(?P<year>.*) (?P<hour>.*):(?P<minute>.*)")

    if soup.find(attrs={
            "id": "s_m_Content_Content_answer_island_pa"
    }).find("table").find(attrs={"class": "noRecord"}) is None:
        for row in soup.find(attrs={
                "id": "s_m_Content_Content_answer_island_pa"
        }).findAll("tr")[1:]:
            elements = row.findAll("td")
            if not elements[3].find("span") is None:
                dateGroups = dateTimeProg.match(
                    elements[3].find("span").text.strip())
            else:
                dateGroups = dateTimeProg.match(elements[3].text)
            date = datetime.strptime(
                "%s/%s-%s %s:%s" %
                (functions.zeroPadding(dateGroups.group("day")),
                 functions.zeroPadding(dateGroups.group("month")),
                 dateGroups.group("year"), dateGroups.group("hour"),
                 dateGroups.group("minute")),
                "%d/%m-%Y %H:%M") if not dateGroups is None else ""
            idGroups = openForAnsweringProg.match(
                elements[0].find("a")["href"])
            id = idGroups.group("survey_id") if not idGroups is None else ""
            ids.append(id)
            surveys.append({
                "types": ["open_for_answering"],
                "survey_id":
                id,
                "anonymous":
                True if elements[2].text == "Ja" else False,
                "answer_date":
                date,
                "title":
                elements[0].text.strip().replace("\r",
                                                 "").replace("\n", "").replace(
                                                     "\t", "").encode("utf8")
            })

    if soup.find(attrs={
            "id": "s_m_Content_Content_report_island_pa"
    }).find(attrs={"class": "noRecord"}) is None:
        for row in soup.find(attrs={
                "id": "s_m_Content_Content_report_island_pa"
        }).findAll("tr")[1:]:
            elements = row.findAll("td")
            if not elements[2].find("span") is None:
                dateGroups = dateTimeProg.match(
                    elements[2].find("span").text.strip())
            else:
                dateGroups = dateTimeProg.match(elements[2].text)
            answerDate = datetime.strptime(
                "%s/%s-%s %s:%s" %
                (functions.zeroPadding(dateGroups.group("day")),
                 functions.zeroPadding(dateGroups.group("month")),
                 dateGroups.group("year"), dateGroups.group("hour"),
                 dateGroups.group("minute")),
                "%d/%m-%Y %H:%M") if not dateGroups is None else ""
            dateGroups = dateTimeProg.match(elements[3].text)
            reportDate = datetime.strptime(
                "%s/%s-%s %s:%s" %
                (functions.zeroPadding(dateGroups.group("day")),
                 functions.zeroPadding(dateGroups.group("month")),
                 dateGroups.group("year"), dateGroups.group("hour"),
                 dateGroups.group("minute")),
                "%d/%m-%Y %H:%M") if not dateGroups is None else ""
            dateGroups = dateTimeProg.match(elements[4].text)
            endDate = datetime.strptime(
                "%s/%s-%s %s:%s" %
                (functions.zeroPadding(dateGroups.group("day")),
                 functions.zeroPadding(dateGroups.group("month")),
                 dateGroups.group("year"), dateGroups.group("hour"),
                 dateGroups.group("minute")),
                "%d/%m-%Y %H:%M") if not dateGroups is None else ""
            idGroups = openForReportingProg.match(
                elements[0].find("a")["href"])
            id = idGroups.group("survey_id") if not idGroups is None else ""
            ids.append(id)

            if id in ids:
                for x in surveys:
                    if x["survey_id"] == id:
                        x["answer_date"] = answerDate
                        x["report_date"] = reportDate
                        x["end_date"] = endDate
                        x["types"].append("open_for_reporting")
            else:
                surveys.append({
                    "types":
                    "open_for_reporting",
                    "survey_id":
                    id,
                    "answer_date":
                    answerDate,
                    "report_date":
                    reportDate,
                    "end_date":
                    endDate,
                    "title":
                    elements[0].text.strip().replace("\r", "").replace(
                        "\n", "").replace("\t", "").encode("utf8")
                })

    if soup.find(attrs={
            "id": "s_m_Content_Content_own_island_pa"
    }).find(attrs={"class": "noRecord"}) is None:
        for row in soup.find(attrs={
                "id": "s_m_Content_Content_own_island_pa"
        }).findAll("tr")[1:]:
            elements = row.findAll("td")
            if not elements[1].find("span") is None:
                dateGroups = dateTimeProg.match(
                    elements[1].find("span").text.strip())
            else:
                dateGroups = dateTimeProg.match(elements[1].text)
            answerDate = datetime.strptime(
                "%s/%s-%s %s:%s" %
                (functions.zeroPadding(dateGroups.group("day")),
                 functions.zeroPadding(dateGroups.group("month")),
                 dateGroups.group("year"), dateGroups.group("hour"),
                 dateGroups.group("minute")),
                "%d/%m-%Y %H:%M") if not dateGroups is None else ""
            dateGroups = dateTimeProg.match(elements[2].text)
            reportDate = datetime.strptime(
                "%s/%s-%s %s:%s" %
                (functions.zeroPadding(dateGroups.group("day")),
                 functions.zeroPadding(dateGroups.group("month")),
                 dateGroups.group("year"), dateGroups.group("hour"),
                 dateGroups.group("minute")),
                "%d/%m-%Y %H:%M") if not dateGroups is None else ""
            dateGroups = dateTimeProg.match(elements[3].text)
            endDate = datetime.strptime(
                "%s/%s-%s %s:%s" %
                (functions.zeroPadding(dateGroups.group("day")),
                 functions.zeroPadding(dateGroups.group("month")),
                 dateGroups.group("year"), dateGroups.group("hour"),
                 dateGroups.group("minute")),
                "%d/%m-%Y %H:%M") if not dateGroups is None else ""
            idGroups = ownProg.match(elements[0].find("a")["href"])
            id = idGroups.group("survey_id") if not idGroups is None else ""

            if id in ids:
                for x in surveys:
                    if x["survey_id"] == id:
                        x["owner_id"] = str(config["student_id"])
                        x["answer_date"] = answerDate
                        x["report_date"] = reportDate
                        x["end_date"] = endDate

            else:
                ids.append(id)
                surveys.append({
                    "types": ["closed"],
                    "survey_id":
                    id,
                    "answer_date":
                    answerDate,
                    "report_date":
                    reportDate,
                    "end_date":
                    endDate,
                    "title":
                    elements[0].text.strip().replace("\r", "").replace(
                        "\n", "").replace("\t", "").encode("utf8")
                })

        return {"status": "ok", "surveys": surveys}
Exemplo n.º 25
0
def survey_answer_page(config, session=False):
    url = "https://www.lectio.dk/lectio/%s/spoergeskema_besvar.aspx?id=%s" % (
        str(config["school_id"]), str(config["survey_id"]))

    if session is False:
        session = authenticate.authenticate(config)

    if session == False:
        return {"status": "error", "type": "authenticate"}
    # Insert the session information from the auth function
    cookies = {
        "lecmobile": "0",
        "ASP.NET_SessionId": session["ASP.NET_SessionId"],
        "LastLoginUserName": session["LastLoginUserName"],
        "lectiogsc": session["lectiogsc"],
        "LectioTicket": session["LectioTicket"]
    }

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)

    if soup.find("table", attrs={"id": "m_Content_InfoTable"}) is None:
        return {"status": False, "error": "Data not found"}

    elements = soup.find("table", attrs={
        "id": "m_Content_InfoTable"
    }).findAll("td")

    owner = context_card.user(
        {
            "context_card_id": elements[1].find("span")["lectiocontextcard"],
            "school_id": str(config["school_id"])
        }, session)["user"]

    ownerUser = {
        "context_cards": [
            elements[1].find("span")["lectiocontextcard"],
            owner["context_card_id"]
        ],
        "picture_id":
        owner["picture_id"],
        "name":
        owner["name"],
        "type":
        owner["type"]
    }

    if owner["type"] == "student":
        ownerUser["student_id"] = owner["student_id"]
    else:
        ownerUser["teacher_id"] = owner["teacher_id"]

    information = {
        "title": elements[0].text.encode("utf8"),
        "owner": ownerUser,
        "anonymous": True if elements[2].text == "Ja" else False,
        "teachers": elements[3].text.split(", "),
        "teams": elements[4].text.split(", ")
    }

    sections = []

    section_number = None
    section_title = None
    section_elements = []
    section_description = None

    titleProg = re.compile(r"(?P<number>[\d]*) (?P<title>.*)")
    subTitleProg = re.compile(r"(?P<number>[\d\.\d\S]*) (?P<title>.*)")

    for row in soup.find(attrs={
            "id": "m_Content_questionIsland2_pa"
    }).findAll("table"):
        if row.find("h3") is None:
            if not row.find(attrs={"type": "RADIO"}) is None:
                type = "radio"
            elif not row.find(attrs={"type": "CHECKBOX"}) is None:
                type = "checkbox"
            else:
                type = "text"

            lines = row.find("h4").text.replace("\t", "").replace(
                "\r", "").strip().split("\n")

            titleGroups = subTitleProg.match(
                str(lines[0]) + " " + str(lines[1]))

            options = []

            section_id = None

            if type == "text":
                section_id = row.find("textarea")["name"].replace(
                    "answer_", "")
                options.append({
                    "type": "text",
                    "name": row.find("textarea")["name"]
                })
            else:
                for element in row.findAll("div"):
                    section_id = element.find("input")["name"].replace(
                        "answer_", "")
                    options.append({
                        "title":
                        element.find("label").text.encode("utf8"),
                        "value":
                        element.find("input")["value"],
                        "name":
                        element.find("input")["name"],
                        "type":
                        type
                    })

            section_elements.append({
                "type":
                type,
                "title":
                titleGroups.group("title") if not titleGroups is None else "",
                "description":
                row.find(attrs={
                    "class": "discreteCell"
                }).text.replace("\r", "").replace("\n",
                                                  "").replace("\t",
                                                              "").strip(),
                "number":
                titleGroups.group("number") if not titleGroups is None else "",
                "options":
                options,
                "section_id":
                section_id
            })
        else:
            if not section_number is None:
                sections.append({
                    "number": section_number,
                    "title": section_title,
                    "elements": section_elements,
                    "description": section_description
                })

                section_number = None
                section_title = None
                section_elements = []
                section_description = None

            lines = row.find("h3").text.replace("\t", "").replace(
                "\r", "").strip().split("\n")

            titleGroups = titleProg.match(str(lines[0]) + " " + str(lines[1]))

            section_number = titleGroups.group(
                "number") if not titleGroups is None else None
            section_title = titleGroups.group(
                "title") if not titleGroups is None else None
            section_description = row.find(attrs={
                "class": "discreteCell"
            }).text.replace("\r\n", "").replace("\t", "").strip()

    if section_number == None:
        section_number = 1
        section_title = ""
        section_description = ""

    sections.append({
        "number": section_number,
        "title": section_title,
        "elements": section_elements,
        "description": section_description
    })

    return {"status": "ok", "information": information, "sections": sections}
Exemplo n.º 26
0
def survey_report(config, session=False):
    url = "https://www.lectio.dk/lectio/%s/spoergeskema/spoergeskemarapportering.aspx?id=%s" % (
        str(config["school_id"]), str(config["survey_id"]))

    if session is False:
        session = authenticate.authenticate(config)

    if session == False:
        return {"status": "error", "type": "authenticate"}
    # Insert the session information from the auth function
    cookies = {
        "lecmobile": "0",
        "ASP.NET_SessionId": session["ASP.NET_SessionId"],
        "LastLoginUserName": session["LastLoginUserName"],
        "lectiogsc": session["lectiogsc"],
        "LectioTicket": session["LectioTicket"]
    }

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)

    if soup.find("div", attrs={"id": "m_Content_sdasd_pa"}) is None:
        return {"status": False, "error": "Data not found"}

    dateTimeProg = re.compile(
        r"(?P<day>.*)/(?P<month>.*)-(?P<year>.*) (?P<hour>.*):(?P<minute>.*)")

    informationTables = soup.find("div", attrs={
        "id": "m_Content_sdasd_pa"
    }).findAll("table")
    infoElements = informationTables[0].findAll("td")

    dateGroups = dateTimeProg.match(infoElements[2].text)
    answerDate = datetime.strptime(
        "%s/%s-%s %s:%s" % (functions.zeroPadding(dateGroups.group("day")),
                            functions.zeroPadding(dateGroups.group("month")),
                            dateGroups.group("year"), dateGroups.group("hour"),
                            dateGroups.group("minute")),
        "%d/%m-%Y %H:%M") if not dateGroups is None else ""

    owner = context_card.user(
        {
            "context_card_id":
            infoElements[1].find("span")["lectiocontextcard"],
            "school_id": str(config["school_id"])
        }, session)["user"]

    ownerUser = {
        "context_cards": [
            infoElements[1].find("span")["lectiocontextcard"],
            owner["context_card_id"]
        ],
        "picture_id":
        owner["picture_id"],
        "name":
        owner["name"],
        "type":
        owner["type"]
    }

    if owner["type"] == "student":
        ownerUser["student_id"] = owner["student_id"]
    else:
        ownerUser["teacher_id"] = owner["teacher_id"]

    information = {
        "title": infoElements[0].text.encode("utf8"),
        "answer_date": answerDate,
        "owner": ownerUser
    }

    statElements = informationTables[1].findAll("td")

    stats = {
        "teachers": {
            "registred": statElements[1].text,
            "submitted": statElements[2].text,
            "submitted_with_unsubscribed": statElements[3].text,
            "not_submitted": statElements[4].text
        },
        "students": {
            "registred": statElements[5].text,
            "submitted": statElements[6].text,
            "submitted_with_unsubscribed": statElements[7].text,
            "not_submitted": statElements[8].text
        },
        "total": {
            "registred": statElements[9].text,
            "submitted": statElements[10].text,
            "submitted_with_unsubscribed": statElements[11].text,
            "not_submitted": statElements[12].text
        }
    }

    sections = []

    section_number = None
    section_title = None
    section_elements = []
    section_description = None

    current_question_title = None
    current_question_number = None
    current_question_description = None

    titleProg = re.compile(r"(?P<number>[\d\.\d\S]*) (?P<title>.*)")

    type = "text"
    answerStats = []
    unanswered = 0
    unansweredPercent = 0

    for row in soup.find(attrs={
            "id": "m_Content_ctl00_pa"
    }).find("table").findAll("tr", recursive=False):
        elements = row.findAll("td")

        text = elements[0].text.strip().replace("\r", "").replace("\t", "")

        if len(text) > 0:
            if not elements[0].find("h3") is None:
                titleGroups = titleProg.match(elements[0].find("h3").text)

                if not "." in titleGroups.group("number"):
                    if not section_number is None:
                        sections.append({
                            "number": section_number,
                            "title": section_title,
                            "elements": section_elements,
                            "description": section_description
                        })

                        section_number = None
                        section_title = None
                        section_elements = []
                        section_description = None

                    section_number = titleGroups.group(
                        "number") if not titleGroups is None else None
                    section_title = titleGroups.group(
                        "title") if not titleGroups is None else None
                    elements[0].find("h3").decompose()
                    section_description = elements[0].text.replace(
                        "\r\n", "").replace("\t", "").strip().strip("\n")
                else:
                    current_question_number = titleGroups.group(
                        "number") if not titleGroups is None else None
                    current_question_title = titleGroups.group(
                        "title") if not titleGroups is None else None
                    elements[0].find("h3").decompose()
                    current_question_description = elements[0].text.replace(
                        "\r\n", "").replace("\t", "").strip().strip("\n")
            else:
                tables = row.findAll("table")
                answers = []

                if tables[0].find("img") is None:
                    for x in tables[0].findAll("tr"):
                        xElements = x.findAll("td")

                        if type == "checkbox":
                            options = xElements[3].text.split(", ")
                        else:
                            options = [xElements[3].text]

                        if xElements[2].text == "anonym":
                            answers.append({
                                "anonymous": True,
                                "respondent_id": xElements[0].text,
                                "options": options
                            })
                        else:
                            answers.append({
                                "anonymous":
                                False,
                                "options":
                                options,
                                "user_context_card_id":
                                xElements[0].find("span")["lectiocontextcard"],
                                "user_text_id":
                                xElements[1].text,
                                "user_team_text":
                                xElements[2].text
                            })

                    section_elements.append({
                        "number":
                        current_question_number.encode("utf8"),
                        "title":
                        current_question_title.encode("utf8"),
                        "description":
                        current_question_description.encode("utf8"),
                        "type":
                        type,
                        "answers":
                        answers,
                        "answer_stats":
                        answerStats,
                        "unanswered":
                        str(unanswered),
                        "unanswered_percent":
                        str(unansweredPercent)
                    })

                    type = "text"
                    answerStats = []
                    unanswered = 0
                    unansweredPercent = 0
                else:
                    for x in tables[0].findAll("tr"):
                        xElements = x.findAll("td")
                        if x.find("th").text == "Ubesvaret":
                            type = "radio"
                            unanswered = xElements[1].text
                            unansweredPercent = xElements[2].text.replace(
                                " %", "")
                        else:
                            type = "checkbox"
                            answerStats.append({
                                "text":
                                x.find("th").text.encode("utf8"),
                                "number":
                                xElements[1].text,
                                "percent":
                                xElements[2].text.replace(" %", "").replace(
                                    ",", ".")
                            })

    if section_number == None:
        section_number = 1
        section_title = ""
        section_description = ""

    sections.append({
        "number": section_number,
        "title": section_title,
        "elements": section_elements,
        "description": section_description
    })

    return {
        "status": "ok",
        "information": information,
        "stats": stats,
        "sections": sections
    }
Exemplo n.º 27
0
def messages_headers ( config, session = False ):
	url = "https://www.lectio.dk/lectio/%s/beskeder2.aspx?type=liste&elevid=%s" % ( str(config["school_id"]), str(config["student_id"]) )

	if session is False:
		session = authenticate.authenticate(config)

	if session == False:
		return {"status" : "error", "type" : "authenticate"}

	# Insert the session information from the auth function
	cookies = {
		"lecmobile" : "0",
		"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
		"LastLoginUserName" : session["LastLoginUserName"],
		"lectiogsc" : session["lectiogsc"],
		"LectioTicket" : session["LectioTicket"]
	}

	settings = {}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, data=settings, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("div", attrs={"id" : "s_m_Content_Content_ThreadListPanel"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	headers = soup.findAll("a", attrs={"class": "s_m_Content_Content_ListGridSelectionTree_0"})

	indices = 0, 1, 2, 3, 4, 5, 6
	headers = [i for j, i in enumerate(headers) if j not in indices]

	typeName = "team"

	teams = []
	build_in_groups = []
	own_groups = []

	pageIdProg = re.compile(r"javascript:__doPostBack\('__Page','TREECLICKED_(?P<page_id>.*)'\)")

	for header in headers:
		text = header.text.strip()
		include = True

		if text == "Indbyggede grupper":
			typeName = "build_in_groups"
			include = False
		elif text == "Egne grupper":
			typeName = "own_groups"
			include = False

		if include is True:
			pageGroups = pageIdProg.match(header["href"])
			data = {
				"name" : unicode(text),
				"message_page_id" : pageGroups.group("page_id") if not pageGroups is None else ""
			}

			if typeName == "team":
				team = context_card.team({"school_id" : str(config["school_id"]), "context_card_id" : "H" + data["message_page_id"]} ,session)
				data["team"] = team["team"]
				teams.append(data)
			elif typeName == "own_groups":
				own_groups.append(data)
			else:
				build_in_groups.append(data)

	return {
		"status" : "ok",
		"teams" : teams,
		"build_in_groups" : build_in_groups,
		"own_groups" : own_groups
	}
Exemplo n.º 28
0
def templates(config, session=False):
    url = "https://www.lectio.dk/lectio/%s/spoergeskema/skabeloner.aspx?elevid=%s" % (
        str(config["school_id"]), str(config["student_id"]))

    if session is False:
        session = authenticate.authenticate(config)

    if session == False:
        return {"status": "error", "type": "authenticate"}
    # Insert the session information from the auth function
    cookies = {
        "lecmobile": "0",
        "ASP.NET_SessionId": session["ASP.NET_SessionId"],
        "LastLoginUserName": session["LastLoginUserName"],
        "lectiogsc": session["lectiogsc"],
        "LectioTicket": session["LectioTicket"]
    }

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)

    if soup.find("div",
                 attrs={"id": "s_m_Content_Content_createQueryIsland_pa"
                        }) is None:
        return {"status": False, "error": "Data not found"}

    templates = []

    ownSchoolProg = re.compile(
        r"\/lectio\/(?P<school_id>.*)\/spoergeskema_besvar.aspx\?mode=display&id=(?P<survey_id>.*)&prevurl=(?P<prev_url>.*)"
    )

    if not soup.find("div",
                     attrs={
                         "id": "s_m_Content_Content_createQueryIsland_pa"
                     }).find(attrs={"class": "noRecord"}):
        for row in soup.find("div",
                             attrs={
                                 "id":
                                 "s_m_Content_Content_createQueryIsland_pa"
                             }).findAll("tr")[1:]:
            idGroups = ownSchoolProg.match(row.find("a")["href"])
            templates.append({
                "school_id":
                str(config["school_id"]),
                "branch_id":
                str(config["school_id"]),
                "title":
                row.find("a").text.encode("utf8"),
                "survey_id":
                idGroups.group("survey_id") if not idGroups is None else "",
                "template":
                True
            })

    if not soup.find("div",
                     attrs={
                         "id": "s_m_Content_Content_LectioDetailIsland1_pa"
                     }).find(attrs={"class": "noRecord"}):
        for row in soup.find("div",
                             attrs={
                                 "id":
                                 "s_m_Content_Content_LectioDetailIsland1_pa"
                             }).findAll("tr")[1:]:
            idGroups = ownSchoolProg.match(row.find("a")["href"])
            elements = row.findAll("td")
            templates.append({
                "title":
                row.find("a").text.encode("utf8"),
                "survey_id":
                idGroups.group("survey_id") if not idGroups is None else "",
                "school_name":
                elements[1].text.encode("utf8"),
                "owner_name":
                elements[2].text.encode("utf8"),
                "template":
                True
            })

    return {"status": "ok", "templates": templates}
Exemplo n.º 29
0
def documents(config, session=False):
    if session is False:
        session = authenticate.authenticate(config)

    if session == False:
        return {"status": "error", "type": "authenticate"}

    cookies = {
        "lecmobile": "0",
        "ASP.NET_SessionId": session["ASP.NET_SessionId"],
        "LastLoginUserName": session["LastLoginUserName"],
        "lectiogsc": session["lectiogsc"],
        "LectioTicket": session["LectioTicket"]
    }

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    url = "https://www.lectio.dk/lectio/%s/DokumentOversigt.aspx?elevid=%s&folderid=%s" % (
        str(config["school_id"]), str(
            config["student_id"]), str(config["folder_id"]))

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)

    if soup.find("table",
                 attrs={"id": "s_m_Content_Content_DocumentGridView_ctl00"
                        }) is None:
        return {"status": False, "error": "Data not found"}

    rows = soup.find("table",
                     attrs={
                         "id": "s_m_Content_Content_DocumentGridView_ctl00"
                     }).findAll("tr")
    rows.pop(0)

    shortDayTimeProg = re.compile(
        r"(?P<day_name>.*) (?P<hour>.*):(?P<minute>.*)")
    timeProg = re.compile(
        r"(?P<hour>.*):(?P<minute>.*)")  # Current day, month, year
    dayProg = re.compile(
        r"(?P<day_name>.*) (?P<day>.*)/(?P<month>.*)")  # Current year
    dateProg = re.compile(r"(?P<day>.*)/(?P<month>.*)-(?P<year>.*)")

    dayConversion = {
        u"Ma": "Mon",
        u"Ti": "Tue",
        u"On": "Wed",
        u"To": "Thu",
        u"Fr": "Fri",
        u"Lø": "Sat",
        u"Sø": "Son"
    }

    documents = []
    documentIdProg = re.compile(
        r"\/lectio\/(?P<school_id>.*)\/dokumenthent.aspx\?documentid=(?P<document_id>.*)"
    )

    for row in rows:
        elements = row.findAll("td")
        idGroups = documentIdProg.match(elements[1].find("a")["href"])

        changer = context_card.user(
            {
                "context_card_id": elements[3]["lectiocontextcard"],
                "school_id": config["school_id"]
            }, session)

        if shortDayTimeProg.match(elements[4].text):
            timeGroups = shortDayTimeProg.match(elements[4].text)
            date = datetime.strptime(
                "%s/%s-%s %s:%s" %
                (dayConversion[unicode(
                    timeGroups.group("day_name").capitalize())],
                 today.strftime("%W"), today.strftime("%Y"),
                 timeGroups.group("hour"), timeGroups.group("minute")),
                "%a/%W-%Y %H:%M")
        elif timeProg.match(elements[4].text):
            timeGroups = timeProg.match(elements[4].text)
            date = datetime.strptime(
                "%s/%s-%s %s:%s" %
                (today.strftime("%d"), today.strftime("%m"),
                 today.strftime("%Y"), timeGroups.group("hour"),
                 timeGroups.group("minute")), "%d/%m-%Y %H:%M")
        elif dayProg.match(elements[4].text):
            dayGroups = dayProg.match(elements[4].text)
            date = datetime.strptime(
                "%s/%s-%s %s:%s" %
                (dayGroups.group("day"), dayGroups.group("month"),
                 today.strftime("%Y"), "12", "00"), "%d/%m-%Y %H:%M")
        elif dateProg.match(elements[4].text):
            dateGroups = dateProg.match(elements[4].text)
            date = datetime.strptime(
                "%s/%s-%s %s:%s" %
                (dateGroups.group("day"), dateGroups.group("month"),
                 dateGroups.group("year"), "12", "00"), "%d/%m-%Y %H:%M")

        data = {
            "folder_id":
            str(config["folder_id"]),
            "name":
            unicode(elements[1].find("span")["title"].replace(
                "Fulde filnavn: ", "")),
            "extension":
            os.path.splitext(elements[1].find("span")["title"].replace(
                "Fulde filnavn: ", ""))[1].replace(".", ""),
            "comment":
            unicode(elements[2].find("span").text),
            "document_id":
            idGroups.group("document_id") if not idGroups is None else "",
            "size":
            elements[5].text.replace(",", "."),
            "date":
            date,
            "user":
            changer["user"]
        }

        documents.append(data)

    return {"status": "ok", "documents": documents}
Exemplo n.º 30
0
def assignment_info ( config, session = False ):
	url = urls.assignment_info.replace("{{SCHOOL_ID}}", str(config["school_id"])).replace("{{ASSIGNMENT_ID}}", str(config["assignment_id"])).replace("{{STUDENT_ID}}",str(config["student_id"]))

	if session is False:
		session = authenticate.authenticate(config)

	if session == False:
		return {"status" : "error", "type" : "authenticate"}

	# Insert the session information from the auth function
	cookies = {
		"lecmobile" : "0",
		"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
		"LastLoginUserName" : session["LastLoginUserName"],
		"lectiogsc" : session["lectiogsc"],
		"LectioTicket" : session["LectioTicket"]
	}

	settings = {}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	dateTime = re.compile(r"(?P<day>.*)/(?P<month>.*)-(?P<year>.*) (?P<hour>.*):(?P<minute>.*)")

	if soup.find("div", attrs={"id" : "m_Content_registerAfl_pa"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	teacherProg = re.compile(r"(?P<name>.*) \((?P<abbrevation>.*)\)")
	documentProg = re.compile(r"(?P<name>.*) \((?P<upload_date>.*)\)")
	teamProg = re.compile(r"(?P<class_name>.*) (?P<subject_name>.*)")

	rows = soup.find("div", attrs={"id" : "m_Content_registerAfl_pa"}).find("table").findAll("td")
	headers = soup.find("div", attrs={"id" : "m_Content_registerAfl_pa"}).find("table").findAll("th")
	rowMap = functions.mapRows(headers, rows)

	dateTimeGroups = dateTime.match(rowMap["Afleveringsfrist"].text)

	date = datetime.strptime("%s/%s-%s %s:%s" % (functions.zeroPadding(dateTimeGroups.group("day")), functions.zeroPadding(dateTimeGroups.group("month")), dateTimeGroups.group("year"), dateTimeGroups.group("hour"), dateTimeGroups.group("minute")), "%d/%m-%Y %H:%M")

	group_assignment = False
	members = []
	teachers = []
	teams = []
	documents = []
	comments = []

	uploadRows = soup.find("table", attrs={"id" : "m_Content_RecipientGV"}).findAll("tr")
	uploadRows.pop(0)
	uploadProg = re.compile(r"\/lectio/(?P<school_id>.*)\/ExerciseFileGet.aspx\?type=(?P<type>.*)&entryid=(?P<entry_id>.*)")

	for row in uploadRows:
		elements = row.findAll("td")
		context_card_id = elements[1].find("span")["lectiocontextcard"]
		dateTimeGroups = dateTime.match(elements[0].find("span").text)
		upload_type = ""
		entry_id = ""
		if not elements[3].find("a") is None:
			uploadGroups = uploadProg.match(elements[3].find("a")["href"])
			entry_id = uploadGroups.group("entry_id")
			upload_type = "student_assignment" if uploadGroups.group("type") == "elevopgave" else "other"


		uploadDate = datetime.strptime("%s/%s-%s %s:%s" % (functions.zeroPadding(dateTimeGroups.group("day")), functions.zeroPadding(dateTimeGroups.group("month")), dateTimeGroups.group("year"), dateTimeGroups.group("hour"), dateTimeGroups.group("minute")), "%d/%m-%Y %H:%M")

		comments.append({
			"file" : {
				"name" : elements[3].find("a").text.encode("utf8") if not elements[3].find("a") is None else "",
				"entry_id" : entry_id,
				"type" : upload_type
			},
			"comment" : functions.cleanText(elements[2].text).encode("utf8"),
			"uploader" : {
				"name" : elements[1].find("span")["title"].encode("utf8") if context_card_id[0] == "T" else elements[1].find("span").text.encode("utf8"),
				"type" : "teacher" if context_card_id[0] == "T" else "student",
				"person_id" : context_card_id.replace("T", "") if context_card_id[0] == "T" else context_card_id.replace("S", ""),
				"context_card_id" : context_card_id,
				"abbrevation" : elements[1].find("span").text.encode("utf8") if context_card_id[0] == "T" else ""
			},
			"date" : uploadDate
		})

	documentIdProg = re.compile(r"\/lectio\/(?P<school_id>.*)\/ExerciseFileGet.aspx\?type=(?P<type>.*)&exercisefileid=(?P<exercise_file_id>.*)")

	statusProg = re.compile(r"(?P<status>.*)\/ (.*): (?P<leave>.*)%")
	studentDataElements = soup.find("table", attrs={"id" : "m_Content_StudentGV"}).findAll("tr")[1].findAll("td")
	statusGroups = statusProg.match(functions.cleanText(studentDataElements[3].text).encode("utf8"))
	status = functions.cleanText(statusGroups.group("status")) if not statusGroups is None else ""
	studentData = {
		"student" : {
			"context_card_id" : studentDataElements[0].find("img")["lectiocontextcard"],
			"student_id" : studentDataElements[0].find("img")["lectiocontextcard"].replace("S", ""),
		},
		"status" : "handed" if status.strip() == "Afleveret" else "missing",
		"waiting_for" : "student" if functions.cleanText(studentDataElements[2].text) == "Elev" else "teacher" if unicode(functions.cleanText(studentDataElements[2].text)) == u"Lærer" else "none",
		"leave" : functions.cleanText(statusGroups.group("leave")) if not statusGroups is None else 0,
		"finished" : True if soup.find("input", attrs={"id" : "m_Content_StudentGV_ctl02_CompletedCB"}).has_attr("checked") and soup.find("input", attrs={"id" : "m_Content_StudentGV_ctl02_CompletedCB"})["checked"] == "checked" else False,
		"grade" : functions.cleanText(studentDataElements[5].text).encode("utf8"),
		"grade_note" : functions.cleanText(studentDataElements[6].text).encode("utf8"),
		"student_note" : functions.cleanText(studentDataElements[7].text).encode("utf8")
	}

	if u"Opgavebeskrivelse" in rowMap:
		for row in rowMap[u"Opgavebeskrivelse"].findAll("a"):
			fileNameGroups = documentProg.match(functions.cleanText(row.text.strip()))
			fileIdGroups = documentIdProg.match(row["href"])
			documentType = fileIdGroups.group("type") if not fileIdGroups is None else "",
			documents.append({
				"name" : fileNameGroups.group("name") if not fileNameGroups is None else "",
				"exercise_file_id" : fileIdGroups.group("exercise_file_id") if not fileIdGroups is None else "",
				"uploaded_date_string" : fileNameGroups.group("upload_date") if not fileNameGroups is None else "",
				"type" : "exercise_description",
				"school_id" : fileIdGroups.group("school_id") if not fileIdGroups is None else ""
			})

	for row in rowMap["Hold"].findAll("span"):
		#teamGroups = teamProg.match(row.text)
		teams.append({
			#"class_name" : unicode(teamGroups.group("class_name")) if not teamGroups is None else "",
			#"subject_name" : unicode(teamGroups.group("subject_name")) if not teamGroups is None else "",
			"team_element_name" : row.text,
			"team_element_id" : rowMap["Hold"].find("span")["lectiocontextcard"].replace("HE", ""),
			"context_card_id" : rowMap["Hold"].find("span")["lectiocontextcard"]
		})

	for row in rowMap["Ansvarlig"].findAll("span"):
		teacherGroups = teacherProg.match(row.text)
		teachers.append({
			"teacher_id" : row["lectiocontextcard"].replace("T", ""),
			"name" : teacherGroups.group("name").encode("utf8") if not teacherGroups is None else "",
			"context_card_id" : row["lectiocontextcard"],
			"abbrevation" : teacherGroups.group("abbrevation").encode("utf8") if not teacherGroups is None else ""
		})

	if soup.find("div", attrs={"id" : "m_Content_groupIsland_pa"}):
		group_assignment = True
		memberRows = soup.find("table", attrs={"id" : "m_Content_groupMembersGV"}).findAll("tr")
		memberRows.pop(0)
		memberProg = re.compile(r"(?P<name>.*), (?P<code>.*)")

		for row in memberRows:
			elements = row.findAll("td")
			memberGroups = memberProg.match(elements[0].find("span").text)
			members.append({
				"name" : memberGroups.group("name") if not memberGroups is None else "",
				"student_id" : elements[0].find("span")["lectiocontextcard"].replace("S", ""),
				"context_card_id" : elements[0].find("span")["lectiocontextcard"],
				"student_class_code" : memberGroups.group("code") if not memberGroups is None else ""
			})
	else:
		memberProg = re.compile(r"Eleven (?P<name>.*) \((?P<code>.*)\) - Opgaveaflevering")
		memberGroups = memberProg.match(soup.find(attrs={"id" : "m_HeaderContent_pageHeader"}).find("div").text)
		members.append({
			"student_id" : config["student_id"],
			"context_card_id" : soup.find(attrs={"id" : "m_HeaderContent_pageHeader"}).find("div")["lectiocontextcard"],
			"student_class_code" : memberGroups.group("code") if not memberGroups is None else "",
			"name" : memberGroups.group("name") if not memberGroups is None else "",
		})

	availableStudents = []
	availableStudentProg = re.compile(r"(?P<name>.*) \((?P<code>.*)\)")

	if not soup.find("select", attrs={"id" : "m_Content_groupStudentAddDD"}) is None:

		for row in soup.find("select", attrs={"id" : "m_Content_groupStudentAddDD"}).findAll("option"):
			progGroups = availableStudentProg.match(row.text)
			availableStudents.append({
				"name" : str(progGroups.group("name")).decode("utf8"),
				"student_id" : row["value"],
				"student_class_code" : progGroups.group("code"),
			})

	infomation = {
		"documents" : documents,
		"title" : rowMap[r"Opgavetitel"].find("span").text.encode("utf8"),
		"group_assignment" : group_assignment,
		"members" : members,
		"note" : rowMap[u"Opgavenote"].text.encode("utf8"),
		"team" : teams,
		"grading_scale" : "7-step" if rowMap[u"Karakterskala"].text == "7-trinsskala" else "13-step",
		"teachers" : teachers,
		"student_time" : rowMap[u"Elevtid"].text.replace(",", ".").replace("timer", ""),
		"date" : date,
		"in_instruction_detail" : True if rowMap[u"Iundervisningsbeskrivelse"].text == "Ja" else False,
		"comments" : comments,
		"group" : {
			"available_students" : availableStudents
		},
		"student" : studentData
	}

	#Delivered by, grade, grade_note, student_note, ended, awaiting, uploaded-documents

	return {
		"status" : "ok",
		"information" : infomation
	}
Exemplo n.º 31
0
def books( config, session = False ):
	if session is False:
		session = authenticate.authenticate(config)

	if session == False:
		return {"status" : "error", "type" : "authenticate"}
	else:
		url = urls.books_list.replace("{{SCHOOL_ID}}", str(config["school_id"])).replace("{{STUDENT_ID}}", str(config["student_id"])).replace("{{BRANCH_ID}}", str(config["branch_id"]))

		# Insert the session information from the auth function
		cookies = {
			"lecmobile" : "0",
			"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
			"LastLoginUserName" : session["LastLoginUserName"],
			"lectiogsc" : session["lectiogsc"],
			"LectioTicket" : session["LectioTicket"]
		}

		# Sorting settings
		settings = {
			"time" : "0",
			"__EVENTTARGET" : "s$m$Content$Content$ShowActiveOnlyCB",
			"__EVENTARGUMENT:" : "",
			"__VIEWSTATEX" : "kwUAAGlpZQk2Mjc5NjMyNjBpbARrAG1rAWcBbAJoaWlsBmsCfQGbc34dAQAAAGsDZwFrBGUaRWxldmVuIEJvIEhhbnMgVGhvbXNlbiwgMnFkbAJoaWRsAmcCaWwCawVlA29mZmwEZwJpZGwCZxVpZGwCZwFpbAJrBnFkZwNpZGwCZwFpZGwCZwVpZGwCZwFpZGwCZwNpbAJrB3BsBmhpZGwMZwFpZGwCZwFpbAJrCG1kZwNpbARrCG1rBnBkZwVpaWwCawZxZGwIZwFpbAJrCG1kZwNpbAJrCWwAZGcFaXMRaWwCawpxZGpsAGwAbABkZGRkZGRkZGRkcwBkZGRkZGcHaXMRaWwGawtxawpwawxoZGpsAGwAbABkZGRkZGRkZGRkcwBkZGRkbAJoaWlsBGsNZQlsaXN0IG1heFdrDmcCZGRnB2lkbAJnAWlsBGsIbWsGcGRnCWlpbAJrBnBkbAJnA2lzEWlsBGsKcWsGcGRqbABsAGwAZGRkZGRkZGRkZHMAZGRkZGRnC2lpbARrD2RrBnBkZGcBaWRsAmcBaWRsBGcBaWwCawhtZGcFaXMRaWwCawpxZGpsAGwAbABkZGRkZGRkZGRkcwBkZGRkZGcCaWRsAmcBaWRsCmcBaWpkZGwBZwFkZwNpamRkbAFoZGcFaWRsAmcDaXMRaWwCawpxZGpsAGwAbABkZGRkZGRkZGRkcwBkZGRkZGcHaWRsAmcDaXMRaWwCawpxZGpsAGwAbABkZGRkZGRkZGRkcwBkZGRkZGcJaWRsAmcDaXMRaWwCawpxZGpsAGwAbABkZGRkZGRkZGRkcwBkZGRkZHIKZRxzJG0kQ29udGVudCRDb250ZW50JG15cHVic0dWaXMMZGRkZGRkbgFlB0R1bW15SURkZ/////8PZGRkZGUeX19Db250cm9sc1JlcXVpcmVQb3N0QmFja0tleV9fbAFlJHMkbSRDb250ZW50JENvbnRlbnQkU2hvd0FjdGl2ZU9ubHlDQmUgcyRtJENvbnRlbnQkQ29udGVudCRCZEJvb2tMb2FuR1ZpcwxkZGRkZGRuAWUHRHVtbXlJRHMKcwF7NCcBAQAAAABzAXsvJwEBAAAAAHMBezYnAQEAAAAAcwF7OCcBAQAAAABzAXswJwEBAAAAAHMBezEnAQEAAAAAcwF7MicBAQAAAABzAXszJwEBAAAAAHMBezUnAQEAAAAAcwF7NycBAQAAAABnAWRkZGRlNXMkbSRDb250ZW50JENvbnRlbnQkcHVibGlzaGVyU2FsZXNDdHJsJHRvdGFsU2FsZXNHcmlkaXMMZGRkZGRkbgFlB0R1bW15SURkZ/////8PZGRkZGUccyRtJENvbnRlbnQkQ29udGVudCR0YWJzdHJpcGlpZGhkZSJzJG0kQ29udGVudCRDb250ZW50JHJlc2VydmF0aW9uc0dWaXMMZGRkZGRkbgFlB0R1bW15SURkZ/////8PZGRkZGUxcyRtJENvbnRlbnQkQ29udGVudCRwdWJsaXNoZXJTYWxlc0N0cmwkc2Nob29sR3JpZGlzDGRkZGRkZG4BZQdEdW1teUlEZGf/////D2RkZGRlKHMkbSRDb250ZW50JENvbnRlbnQkZWJvb2tzRm9yRmF2b3JpdGhvbGRpcwxkZGRkZGRuAWUHRHVtbXlJRGRn/////w9kZGRkZS9zJG0kQ29udGVudCRDb250ZW50JHB1Ymxpc2hlclNhbGVzQ3RybCRib29rR3JpZGlzDGRkZGRkZG4BZQdEdW1teUlEZGf/////D2RkZGRlL3MkbSRDb250ZW50JENvbnRlbnQkZWJvb2tzRm9yRmF2b3JpdEhvbGRTdHVkZW50aXMMZGRkZGRkbgFlB0R1bW15SURkaGRkZGQQAAAABXVzcmlkE1ZhbGlkYXRlUmVxdWVzdE1vZGUIbGVjdGlvaWQFbXR5cGUKRW50aXR5TmFtZQxhdXRvY29tcGxldGUHVmlzaWJsZQtzaG93aGVhZGVycxFOYXZpZ2F0ZVVybExlY3RpbwtDdXJyZW50RGF0YRRfUmVxdWlyZXNEYXRhQmluZGluZwtfIURhdGFCb3VuZAtfIUl0ZW1Db3VudAhDc3NDbGFzcwRfIVNCCVRlYWNoZXJJZAEAAQAAAP////8BAAAAAAAAAAQBAAAAf1N5c3RlbS5Db2xsZWN0aW9ucy5HZW5lcmljLkxpc3RgMVtbU3lzdGVtLk9iamVjdCwgbXNjb3JsaWIsIFZlcnNpb249NC4wLjAuMCwgQ3VsdHVyZT1uZXV0cmFsLCBQdWJsaWNLZXlUb2tlbj1iNzdhNWM1NjE5MzRlMDg5XV0DAAAABl9pdGVtcwVfc2l6ZQhfdmVyc2lvbgUAAAgICQIAAAAGAAAABgAAABACAAAACAAAAAkDAAAACQQAAAAJBQAAAAkGAAAACQcAAAAJCAAAAA0CDAkAAABKTWFjb20uTGVjdGlvLkNvbW1vbiwgVmVyc2lvbj0xLjAuMC4wLCBDdWx0dXJlPW5ldXRyYWwsIFB1YmxpY0tleVRva2VuPW51bGwFAwAAABJNYWNvbS5MZWN0aW8uVXNySUQBAAAABl92YWx1ZQAJCQAAAJ9zfh0BAAAADAoAAABDTWFjb20uTGVjdGlvLCBWZXJzaW9uPTEuMC4wLjAsIEN1bHR1cmU9bmV1dHJhbCwgUHVibGljS2V5VG9rZW49bnVsbAUEAAAAGU1hY29tLkxlY3Rpby5MZWN0aW9SZWZVcmwBAAAABGRhdGEEKk1hY29tLkxlY3Rpby5MZWN0aW9SZWZVcmwrTGVjdGlvUmVmVXJsRGF0YQoAAAAKAAAACQsAAAABBQAAAAQAAAAJDAAAAAEGAAAABAAAAAkNAAAAAQcAAAAEAAAACQ4AAAABCAAAAAQAAAAJDwAAAAULAAAAKk1hY29tLkxlY3Rpby5MZWN0aW9SZWZVcmwrTGVjdGlvUmVmVXJsRGF0YQQAAAAGcmVmdXJsEHJlZnVybFVybEVuY29kZWQSX2lzU3RhdGljUmVmZXJlbmNlCkZvcmNlSFRUUFMBAQAAAQEKAAAABhAAAABjRWJvZy9EZWZhdWx0LmFzcHg/cHJldnVybD1CRCUyZlVzZXJSZXNlcnZhdGlvbnMuYXNweCUzZkVsZXZJRCUzZDQ3ODk3OTM2OTElMjZwcmV2dXJsJTNkZm9yc2lkZS5hc3B4CgAAAQwAAAALAAAABhEAAABfQkQvQm9va3MuYXNweD9wcmV2dXJsPUJEJTJmVXNlclJlc2VydmF0aW9ucy5hc3B4JTNmRWxldklEJTNkNDc4OTc5MzY5MSUyNnByZXZ1cmwlM2Rmb3JzaWRlLmFzcHgKAAABDQAAAAsAAAAGEgAAAGdFYm9nL0NyZWF0ZUVib29rLmFzcHg/cHJldnVybD1CRCUyZlVzZXJSZXNlcnZhdGlvbnMuYXNweCUzZkVsZXZJRCUzZDQ3ODk3OTM2OTElMjZwcmV2dXJsJTNkZm9yc2lkZS5hc3B4CgAAAQ4AAAALAAAABhMAAAB/QkQvU3R1ZGVudFJlc2VydmF0aW9ucy5hc3B4P0VsZXZJRD00Nzg5NzkzNjkxJnByZXZ1cmw9QkQlMmZVc2VyUmVzZXJ2YXRpb25zLmFzcHglM2ZFbGV2SUQlM2Q0Nzg5NzkzNjkxJTI2cHJldnVybCUzZGZvcnNpZGUuYXNweAoAAAEPAAAACwAAAAYUAAAAZ0Vib2cvQ3JlYXRlRWJvb2suYXNweD9wcmV2dXJsPUJEJTJmVXNlclJlc2VydmF0aW9ucy5hc3B4JTNmRWxldklEJTNkNDc4OTc5MzY5MSUyNnByZXZ1cmwlM2Rmb3JzaWRlLmFzcHgKAAAL/HO0KMmthSVEyM2CYSzxt4utYL4=",
			"__VIEWSTATE:" : "",
			"__EVENTVALIDATION" : "6OqghX6xZOeeKUKrRIw7ICUy+1VPS7casHOeKUMsfDj6sV1LAjCrokugRNNSYnB3AtE4D/xEDXGXEUMRV8fCTtH6dqeuxRYS3AAMtYA04et59bwlJNT3c0QGpPiKz05X2fng07YiA1EvrNnE7J2D0smysbBQFSyR2nfIDZ8f6eWBxsh2hXpJJp7aM7qydcWFGsMBDBO3OCBWoLKpI/4bHnW/GiUzfgOYqQ8qCIe91qkwdY2JO1s/Szu1CkSeWJPjohv4N2UiRhGyxtJDEXUAho/DmOUtCRMrJliE7WJBf6rGpwELcczgLirEbiGyhY9b2HAkUorn0H6Kc6FL/iHvAZNQqalZQJDp0RK0QCi/6Qzo1rrCU/sVzkB7S4zDO23hyoLFmOkwm6ILTN3hehLMVOgvq9TJqhcxUFHb47KrVEI2l94BmOGnWH1eiQlr24I35qsepc7/nNp5UvL5GX/MHg==",
			"__LASTFOCUS:" : "",
			"__VIEWSTATEENCRYPTED:" : "",
			"LectioPostbackId:" : "",
			"s$m$searchinputfield:" : ""
		}

		# Insert User-agent headers and the cookie information
		headers = {
			"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
			"Content-Type" : "application/x-www-form-urlencoded",
			"Host" : "www.lectio.dk",
			"Origin" : "https://www.lectio.dk",
			"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
		}

		response = proxy.session.post(url, data=settings, headers=headers)

		html = response.text

		soup = Soup(html)

		loans = soup.find("table", attrs={"id" : "s_m_Content_Content_BdBookLoanGV"}).findAll("tr")
		loansList = []

		for loan in loans:
			columns = loan.findAll("td")
			if len(columns) > 0:
				# Define the short team pattern "2q KE" etc..
				shortTeamPreg = re.compile(r"(?P<class>\w.) (?P<team>\w.)")

				# Define the long team pattern "2012 FY/i" etc...
				yearTeamPreg = re.compile(r"(?P<year>[0-9]*) (?P<team>[0-9A-Z]*)\/(?P<class>\w*)")

				# Find and prepare the team string text
				teamText = columns[1].text.replace("\n", "").replace("\t", "").strip()
				teamRows = None
				preg = ""

				# Check which and if any pattern matches the string
				if shortTeamPreg.match(teamText):
					preg = "short"
					teamRows = shortTeamPreg.match(teamText)
				elif yearTeamPreg.match(teamText):
					preg = "long"
					teamRows = yearTeamPreg.match(teamText)

				delivery_reg_date = columns[3].text.replace("\n","").strip()
				if not delivery_reg_date == "":
					delivery_reg_date = datetime.strptime(delivery_reg_date, "%d-%m-%Y")

				loansList.append({
					"title" : columns[0].text.replace("\n", "").replace("\t", "").strip(),
					"team" : teamRows.group("team") if not teamRows == None else  "",
					"year" : teamRows.group("year") if not teamRows == None and preg == "long" else  "",
					"class" : teamRows.group("class") if not teamRows == None else "",
					"lending_reg_date" : datetime.strptime(columns[2].text.replace("\n","").strip(), "%d-%m-%Y"),
					"delivery_reg_date" : delivery_reg_date,
					"delivery_date" : datetime.strptime(columns[4].text.replace("\n","").strip(), "%d-%m-%Y"),
					"price" : columns[5].text.replace("\n", "").replace("\t", "").replace(",", ".").strip(),
					"team_name" : teamText
				})

		return {
			"status" : "ok",
			"loans" : loansList,
			"term" : {
				"value" : soup.find("select", attrs={"id" : "s_m_ChooseTerm_term"}).select('option[selected="selected"]')[0]["value"],
				"years_string" : soup.find("select", attrs={"id" : "s_m_ChooseTerm_term"}).select('option[selected="selected"]')[0].text
			}
		}
Exemplo n.º 32
0
def leave_reasons ( config, session = False ):
	url = "https://www.lectio.dk/lectio/%s/subnav/fravaerelev.aspx?elevid=%s&lectab=aarsager" % ( str(config["school_id"]), str(config["student_id"]) )

	if session is False:
		session = authenticate.authenticate(config)

	if session == False:
		return {"status" : "error", "type" : "authenticate"}

	cookies = {
		"lecmobile" : "0",
		"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
		"LastLoginUserName" : session["LastLoginUserName"],
		"lectiogsc" : session["lectiogsc"],
		"LectioTicket" : session["LectioTicket"]
	}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("table", attrs={"id" : "s_m_Content_Content_FatabAbsenceFravaerGV"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	missing = []
	reasons = []

	reasonKeys = {
		u"Andet" : "other",
		u"Kom for sent" : "too_late",
		u"Skolerelaterede aktiviteter" : "school_related",
		u"Private forhold" : "private",
		u"Sygdom" : "sick"
	}

	# TODO: Add Missing
	if soup.find(attrs={"id" : "s_m_Content_Content_FatabMissingAarsagerGV"}).find(attrs={"class" : "noRecord"}) is None:
		print "missing"

	if soup.find(attrs={"id" : "s_m_Content_Content_FatabAbsenceFravaerGV"}).find(attrs={"class" : "noRecord"}) is None:
		rows = soup.find(attrs={"id" : "s_m_Content_Content_FatabAbsenceFravaerGV"}).findAll("tr")
		rows.pop(0)

		activityProg = re.compile(r"\/lectio\/(?P<school_id>.*)\/aktivitet\/aktivitetinfo.aspx\?id=(?P<activity_id>.*)&prevurl=(?P<prev_url>.*)")
		datetimeProg = re.compile(r"(?P<day>.*)\/(?P<month>.*)-(?P<year>.*) (?P<time>.*)")

		for row in rows:
			elements = row.findAll("td")
			activityGroups = activityProg.match(elements[2].find("a")["href"])
			dateGroups = datetimeProg.match(elements[5].find("span").text.strip().replace("\r\n", "").replace("\t", ""))
			reasons.append({
				"type" : "lesson" if elements[0].text.strip().replace("\r\n", "").replace("\t", "") == "Lektion" else "other",
				"week" : elements[1].text.strip().replace("\r\n", "").replace("\t", ""),
				"activity_id" : activityGroups.group("activity_id") if not activityGroups is None else "",
				"leave" : elements[3].text.strip().replace("\r\n", "").replace("\t", "").replace("%", ""),
				"creadited" :True if elements[4].text.strip().replace("\r\n", "").replace("\t", "") == "Ja" else False,
				"registred" : datetime.strptime("%s/%s-%s %s" % (functions.zeroPadding(dateGroups.group("day")), functions.zeroPadding(dateGroups.group("month")), dateGroups.group("year"), dateGroups.group("time")), "%d/%m-%Y %H:%M"),
				"teacher" : {
					"abbrevation" : unicode(elements[6].text.strip().replace("\r\n", "").replace("\t", ""))
				},
				"team" : {
					"name" : unicode(elements[7].text.strip().replace("\r\n", "").replace("\t", ""))
				},
				"comment" : unicode(elements[8].text.strip().replace("\r\n", "").replace("\t", "")),
				"reason" : {
					"value" : unicode(elements[9].text.strip().replace("\r\n", "").replace("\t", "")),
					"key" : reasonKeys[unicode(elements[9].text.strip().replace("\r\n", "").replace("\t", ""))] if unicode(elements[9].text.strip().replace("\r\n", "").replace("\t", "")) in reasonKeys else "other",
					"note": unicode(elements[10].text.strip().replace("\r\n", "").replace("\t", ""))
				},

			})

	return {
		"status" : "ok",
		"reasons" : reasons,
		"missing" : missing
	}
Exemplo n.º 33
0
def assignment_info(config, session=False):
    url = urls.assignment_info.replace("{{SCHOOL_ID}}", str(
        config["school_id"])).replace("{{ASSIGNMENT_ID}}",
                                      str(config["assignment_id"])).replace(
                                          "{{STUDENT_ID}}",
                                          str(config["student_id"]))

    if session is False:
        session = authenticate.authenticate(config)

    if session == False:
        return {"status": "error", "type": "authenticate"}

    # Insert the session information from the auth function
    cookies = {
        "lecmobile": "0",
        "ASP.NET_SessionId": session["ASP.NET_SessionId"],
        "LastLoginUserName": session["LastLoginUserName"],
        "lectiogsc": session["lectiogsc"],
        "LectioTicket": session["LectioTicket"]
    }

    settings = {}

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)

    dateTime = re.compile(
        r"(?P<day>.*)/(?P<month>.*)-(?P<year>.*) (?P<hour>.*):(?P<minute>.*)")

    if soup.find("div", attrs={"id": "m_Content_registerAfl_pa"}) is None:
        return {"status": False, "error": "Data not found"}

    teacherProg = re.compile(r"(?P<name>.*) \((?P<abbrevation>.*)\)")
    documentProg = re.compile(r"(?P<name>.*) \((?P<upload_date>.*)\)")
    teamProg = re.compile(r"(?P<class_name>.*) (?P<subject_name>.*)")

    rows = soup.find("div", attrs={
        "id": "m_Content_registerAfl_pa"
    }).find("table").findAll("td")
    headers = soup.find("div", attrs={
        "id": "m_Content_registerAfl_pa"
    }).find("table").findAll("th")
    rowMap = functions.mapRows(headers, rows)

    dateTimeGroups = dateTime.match(rowMap["Afleveringsfrist"].text)

    date = datetime.strptime(
        "%s/%s-%s %s:%s" %
        (functions.zeroPadding(dateTimeGroups.group("day")),
         functions.zeroPadding(dateTimeGroups.group("month")),
         dateTimeGroups.group("year"), dateTimeGroups.group("hour"),
         dateTimeGroups.group("minute")), "%d/%m-%Y %H:%M")

    group_assignment = False
    members = []
    teachers = []
    teams = []
    documents = []
    comments = []

    uploadRows = soup.find("table", attrs={
        "id": "m_Content_RecipientGV"
    }).findAll("tr")
    uploadRows.pop(0)
    uploadProg = re.compile(
        r"\/lectio/(?P<school_id>.*)\/ExerciseFileGet.aspx\?type=(?P<type>.*)&entryid=(?P<entry_id>.*)"
    )

    for row in uploadRows:
        elements = row.findAll("td")
        context_card_id = elements[1].find("span")["lectiocontextcard"]
        dateTimeGroups = dateTime.match(elements[0].find("span").text)
        upload_type = ""
        entry_id = ""
        if not elements[3].find("a") is None:
            uploadGroups = uploadProg.match(elements[3].find("a")["href"])
            entry_id = uploadGroups.group("entry_id")
            upload_type = "student_assignment" if uploadGroups.group(
                "type") == "elevopgave" else "other"

        uploadDate = datetime.strptime(
            "%s/%s-%s %s:%s" %
            (functions.zeroPadding(dateTimeGroups.group("day")),
             functions.zeroPadding(dateTimeGroups.group("month")),
             dateTimeGroups.group("year"), dateTimeGroups.group("hour"),
             dateTimeGroups.group("minute")), "%d/%m-%Y %H:%M")

        comments.append({
            "file": {
                "name":
                elements[3].find("a").text.encode("utf8")
                if not elements[3].find("a") is None else "",
                "entry_id":
                entry_id,
                "type":
                upload_type
            },
            "comment":
            functions.cleanText(elements[2].text).encode("utf8"),
            "uploader": {
                "name":
                elements[1].find("span")["title"].encode("utf8")
                if context_card_id[0] == "T" else
                elements[1].find("span").text.encode("utf8"),
                "type":
                "teacher" if context_card_id[0] == "T" else "student",
                "person_id":
                context_card_id.replace("T", "") if context_card_id[0] == "T"
                else context_card_id.replace("S", ""),
                "context_card_id":
                context_card_id,
                "abbrevation":
                elements[1].find("span").text.encode("utf8")
                if context_card_id[0] == "T" else ""
            },
            "date":
            uploadDate
        })

    documentIdProg = re.compile(
        r"\/lectio\/(?P<school_id>.*)\/ExerciseFileGet.aspx\?type=(?P<type>.*)&exercisefileid=(?P<exercise_file_id>.*)"
    )

    statusProg = re.compile(r"(?P<status>.*)\/ (.*): (?P<leave>.*)%")
    studentDataElements = soup.find("table",
                                    attrs={
                                        "id": "m_Content_StudentGV"
                                    }).findAll("tr")[1].findAll("td")
    statusGroups = statusProg.match(
        functions.cleanText(studentDataElements[3].text).encode("utf8"))
    status = functions.cleanText(
        statusGroups.group("status")) if not statusGroups is None else ""
    studentData = {
        "student": {
            "context_card_id":
            studentDataElements[0].find("img")["lectiocontextcard"],
            "student_id":
            studentDataElements[0].find("img")["lectiocontextcard"].replace(
                "S", ""),
        },
        "status":
        "handed" if status.strip() == "Afleveret" else "missing",
        "waiting_for":
        "student"
        if functions.cleanText(studentDataElements[2].text) == "Elev" else
        "teacher" if unicode(functions.cleanText(
            studentDataElements[2].text)) == u"Lærer" else "none",
        "leave":
        functions.cleanText(statusGroups.group("leave"))
        if not statusGroups is None else 0,
        "finished":
        True if soup.find(
            "input", attrs={
                "id": "m_Content_StudentGV_ctl02_CompletedCB"
            }).has_attr("checked") and soup.find(
                "input", attrs={"id": "m_Content_StudentGV_ctl02_CompletedCB"
                                })["checked"] == "checked" else False,
        "grade":
        functions.cleanText(studentDataElements[5].text).encode("utf8"),
        "grade_note":
        functions.cleanText(studentDataElements[6].text).encode("utf8"),
        "student_note":
        functions.cleanText(studentDataElements[7].text).encode("utf8")
    }

    if u"Opgavebeskrivelse" in rowMap:
        for row in rowMap[u"Opgavebeskrivelse"].findAll("a"):
            fileNameGroups = documentProg.match(
                functions.cleanText(row.text.strip()))
            fileIdGroups = documentIdProg.match(row["href"])
            documentType = fileIdGroups.group(
                "type") if not fileIdGroups is None else "",
            documents.append({
                "name":
                fileNameGroups.group("name")
                if not fileNameGroups is None else "",
                "exercise_file_id":
                fileIdGroups.group("exercise_file_id")
                if not fileIdGroups is None else "",
                "uploaded_date_string":
                fileNameGroups.group("upload_date")
                if not fileNameGroups is None else "",
                "type":
                "exercise_description",
                "school_id":
                fileIdGroups.group("school_id")
                if not fileIdGroups is None else ""
            })

    for row in rowMap["Hold"].findAll("span"):
        #teamGroups = teamProg.match(row.text)
        teams.append({
            #"class_name" : unicode(teamGroups.group("class_name")) if not teamGroups is None else "",
            #"subject_name" : unicode(teamGroups.group("subject_name")) if not teamGroups is None else "",
            "team_element_name":
            row.text,
            "team_element_id":
            rowMap["Hold"].find("span")["lectiocontextcard"].replace("HE", ""),
            "context_card_id":
            rowMap["Hold"].find("span")["lectiocontextcard"]
        })

    for row in rowMap["Ansvarlig"].findAll("span"):
        teacherGroups = teacherProg.match(row.text)
        teachers.append({
            "teacher_id":
            row["lectiocontextcard"].replace("T", ""),
            "name":
            teacherGroups.group("name").encode("utf8")
            if not teacherGroups is None else "",
            "context_card_id":
            row["lectiocontextcard"],
            "abbrevation":
            teacherGroups.group("abbrevation").encode("utf8")
            if not teacherGroups is None else ""
        })

    if soup.find("div", attrs={"id": "m_Content_groupIsland_pa"}):
        group_assignment = True
        memberRows = soup.find("table",
                               attrs={
                                   "id": "m_Content_groupMembersGV"
                               }).findAll("tr")
        memberRows.pop(0)
        memberProg = re.compile(r"(?P<name>.*), (?P<code>.*)")

        for row in memberRows:
            elements = row.findAll("td")
            memberGroups = memberProg.match(elements[0].find("span").text)
            members.append({
                "name":
                memberGroups.group("name") if not memberGroups is None else "",
                "student_id":
                elements[0].find("span")["lectiocontextcard"].replace("S", ""),
                "context_card_id":
                elements[0].find("span")["lectiocontextcard"],
                "student_class_code":
                memberGroups.group("code") if not memberGroups is None else ""
            })
    else:
        memberProg = re.compile(
            r"Eleven (?P<name>.*) \((?P<code>.*)\) - Opgaveaflevering")
        memberGroups = memberProg.match(
            soup.find(attrs={
                "id": "m_HeaderContent_pageHeader"
            }).find("div").text)
        members.append({
            "student_id":
            config["student_id"],
            "context_card_id":
            soup.find(attrs={
                "id": "m_HeaderContent_pageHeader"
            }).find("div")["lectiocontextcard"],
            "student_class_code":
            memberGroups.group("code") if not memberGroups is None else "",
            "name":
            memberGroups.group("name") if not memberGroups is None else "",
        })

    availableStudents = []
    availableStudentProg = re.compile(r"(?P<name>.*) \((?P<code>.*)\)")

    if not soup.find("select", attrs={"id": "m_Content_groupStudentAddDD"
                                      }) is None:

        for row in soup.find("select",
                             attrs={
                                 "id": "m_Content_groupStudentAddDD"
                             }).findAll("option"):
            progGroups = availableStudentProg.match(row.text)
            availableStudents.append({
                "name":
                str(progGroups.group("name")).decode("utf8"),
                "student_id":
                row["value"],
                "student_class_code":
                progGroups.group("code"),
            })

    infomation = {
        "documents":
        documents,
        "title":
        rowMap[r"Opgavetitel"].find("span").text.encode("utf8"),
        "group_assignment":
        group_assignment,
        "members":
        members,
        "note":
        rowMap[u"Opgavenote"].text.encode("utf8"),
        "team":
        teams,
        "grading_scale":
        "7-step"
        if rowMap[u"Karakterskala"].text == "7-trinsskala" else "13-step",
        "teachers":
        teachers,
        "student_time":
        rowMap[u"Elevtid"].text.replace(",", ".").replace("timer", ""),
        "date":
        date,
        "in_instruction_detail":
        True if rowMap[u"Iundervisningsbeskrivelse"].text == "Ja" else False,
        "comments":
        comments,
        "group": {
            "available_students": availableStudents
        },
        "student":
        studentData
    }

    #Delivered by, grade, grade_note, student_note, ended, awaiting, uploaded-documents

    return {"status": "ok", "information": infomation}
Exemplo n.º 34
0
def leave(config, session=False):
    url = "https://www.lectio.dk/lectio/%s/subnav/fravaerelev.aspx?elevid=%s" % (
        str(config["school_id"]), str(config["student_id"]))

    if session is False:
        session = authenticate.authenticate(config)

    # Insert the session information from the auth function
    cookies = {
        "lecmobile": "0",
        "ASP.NET_SessionId": session["ASP.NET_SessionId"],
        "LastLoginUserName": session["LastLoginUserName"],
        "lectiogsc": session["lectiogsc"],
        "LectioTicket": session["LectioTicket"]
    }

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)

    if soup.find("table",
                 attrs={
                     "id": "s_m_Content_Content_SFTabStudentAbsenceDataTable"
                 }) is None:
        return {"status": False, "error": "Data not found"}

    rows = soup.find("table",
                     attrs={
                         "id":
                         "s_m_Content_Content_SFTabStudentAbsenceDataTable"
                     }).findAll("tr")
    rows.pop(0)
    rows.pop(0)
    rows.pop(0)
    rows.pop(0)
    rows.pop(len(rows) - 1)

    leaveRows = []
    teamIdProg = re.compile(
        r"\/lectio\/(?P<school_id>.*)\/fravaer_elevhold.aspx\?holdelementid=(?P<team_element_id>.*)"
    )
    leaveProg = re.compile(r"(?P<number>.*)\/(?P<modules>.*)")

    for row in rows:
        elements = row.findAll("td")
        teamIdGroups = teamIdProg.match(elements[0].find("a")["href"])
        periodeGroups = leaveProg.match(elements[2].text.replace(",", "."))
        calculatedGroups = leaveProg.match(elements[4].text.replace(",", "."))
        yearGroups = leaveProg.match(elements[6].text.replace(",", "."))

        writtenPeriodeGroups = leaveProg.match(elements[8].text.replace(
            ",", "."))
        writtenCalculatedGroups = leaveProg.match(elements[10].text.replace(
            ",", "."))
        writtenYearGroups = leaveProg.match(elements[12].text.replace(
            ",", "."))
        data = {
            "team": {
                "name":
                unicode(elements[0].find("a").text),
                "team_id":
                teamIdGroups.group("team_element_id")
                if not teamIdGroups is None else ""
            },
            "leave": {
                "period": {
                    "end_date":
                    datetime.strptime(
                        soup.find(
                            "input",
                            attrs={
                                "id":
                                "s_m_Content_Content_SFTabPeriodChooserCtrl_end__date_tb"
                            })["value"], "%d/%m-%Y"),
                    "start_date":
                    datetime.strptime(
                        soup.find(
                            "input",
                            attrs={
                                "id":
                                "s_m_Content_Content_SFTabPeriodChooserCtrl_start__date_tb"
                            })["value"], "%d/%m-%Y"),
                    "percent":
                    elements[1].text.replace(",", ".").replace("%", ""),
                    "modules":
                    periodeGroups.group("modules")
                    if not periodeGroups is None else "",
                    "leave":
                    periodeGroups.group("number")
                    if not periodeGroups is None else ""
                },
                "calculated": {
                    "percent":
                    elements[3].text.replace(",", ".").replace("%", ""),
                    "modules":
                    calculatedGroups.group("modules")
                    if not calculatedGroups is None else "",
                    "leave":
                    calculatedGroups.group("number")
                    if not calculatedGroups is None else ""
                },
                "year": {
                    "percent":
                    elements[5].text.replace(",", ".").replace("%", ""),
                    "modules":
                    yearGroups.group("modules")
                    if not yearGroups is None else "",
                    "leave":
                    yearGroups.group("number")
                    if not yearGroups is None else ""
                }
            },
            "written": {
                "period": {
                    "end_date":
                    datetime.strptime(
                        soup.find(
                            "input",
                            attrs={
                                "id":
                                "s_m_Content_Content_SFTabPeriodChooserCtrl_end__date_tb"
                            })["value"], "%d/%m-%Y"),
                    "start_date":
                    datetime.strptime(
                        soup.find(
                            "input",
                            attrs={
                                "id":
                                "s_m_Content_Content_SFTabPeriodChooserCtrl_start__date_tb"
                            })["value"], "%d/%m-%Y"),
                    "percent":
                    elements[7].text.replace(",", ".").replace("%", ""),
                    "hours":
                    writtenPeriodeGroups.group("modules")
                    if not writtenPeriodeGroups is None else "",
                    "leave":
                    writtenPeriodeGroups.group("number")
                    if not writtenPeriodeGroups is None else ""
                },
                "calculated": {
                    "percent":
                    elements[9].text.replace(",", ".").replace("%", ""),
                    "hours":
                    writtenCalculatedGroups.group("modules")
                    if not writtenCalculatedGroups is None else "",
                    "leave":
                    writtenCalculatedGroups.group("number")
                    if not writtenCalculatedGroups is None else ""
                },
                "year": {
                    "percent":
                    elements[11].text.replace(",", ".").replace("%", ""),
                    "hours":
                    writtenYearGroups.group("modules")
                    if not writtenYearGroups is None else "",
                    "leave":
                    writtenYearGroups.group("number")
                    if not writtenYearGroups is None else ""
                }
            }
        }

        leaveRows.append(data)

    return {
        "status": "ok",
        "leave": leaveRows,
        "term": {
            "value":
            soup.find("select", attrs={
                "id": "s_m_ChooseTerm_term"
            }).select('option[selected="selected"]')[0]["value"],
            "years_string":
            soup.find("select", attrs={
                "id": "s_m_ChooseTerm_term"
            }).select('option[selected="selected"]')[0].text,
            "start_date":
            datetime.strptime(
                soup.find(
                    "input",
                    attrs={
                        "id":
                        "s_m_Content_Content_SFTabPeriodChooserCtrl_start__date_tb"
                    })["value"], "%d/%m-%Y")
        },
    }
Exemplo n.º 35
0
def survey_report ( config, session = False ):
	url = "https://www.lectio.dk/lectio/%s/spoergeskema/spoergeskemarapportering.aspx?id=%s" % ( str(config["school_id"]), str(config["survey_id"]) )

	if session is False:
		session = authenticate.authenticate(config)

	if session == False:
		return {"status" : "error", "type" : "authenticate"}
	# Insert the session information from the auth function
	cookies = {
		"lecmobile" : "0",
		"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
		"LastLoginUserName" : session["LastLoginUserName"],
		"lectiogsc" : session["lectiogsc"],
		"LectioTicket" : session["LectioTicket"]
	}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("div", attrs={"id" : "m_Content_sdasd_pa"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	dateTimeProg = re.compile(r"(?P<day>.*)/(?P<month>.*)-(?P<year>.*) (?P<hour>.*):(?P<minute>.*)")

	informationTables = soup.find("div", attrs={"id" : "m_Content_sdasd_pa"}).findAll("table")
	infoElements = informationTables[0].findAll("td")

	dateGroups = dateTimeProg.match(infoElements[2].text)
	answerDate = datetime.strptime("%s/%s-%s %s:%s" % (functions.zeroPadding(dateGroups.group("day")), functions.zeroPadding(dateGroups.group("month")), dateGroups.group("year"), dateGroups.group("hour"), dateGroups.group("minute")), "%d/%m-%Y %H:%M") if not dateGroups is None else ""

	owner = context_card.user({
		"context_card_id" : infoElements[1].find("span")["lectiocontextcard"],
		"school_id" : str(config["school_id"])
	}, session)["user"]

	ownerUser = {
		"context_cards" : [infoElements[1].find("span")["lectiocontextcard"], owner["context_card_id"]],
		"picture_id" : owner["picture_id"],
		"name" : owner["name"],
		"type" : owner["type"]
	}

	if owner["type"] == "student":
		ownerUser["student_id"] = owner["student_id"]
	else:
		ownerUser["teacher_id"] = owner["teacher_id"]

	information = {
		"title" : infoElements[0].text.encode("utf8"),
		"answer_date" : answerDate,
		"owner" : ownerUser
	}

	statElements = informationTables[1].findAll("td")

	stats = {
		"teachers" : {
			"registred" : statElements[1].text,
			"submitted" : statElements[2].text,
			"submitted_with_unsubscribed" : statElements[3].text,
			"not_submitted" : statElements[4].text
		},
		"students" : {
			"registred" : statElements[5].text,
			"submitted" : statElements[6].text,
			"submitted_with_unsubscribed" : statElements[7].text,
			"not_submitted" : statElements[8].text
		},
		"total" : {
			"registred" : statElements[9].text,
			"submitted" : statElements[10].text,
			"submitted_with_unsubscribed" : statElements[11].text,
			"not_submitted" : statElements[12].text
		}
	}

	sections = []

	section_number = None
	section_title = None
	section_elements = []
	section_description = None

	current_question_title = None
	current_question_number = None
	current_question_description = None

	titleProg = re.compile(r"(?P<number>[\d\.\d\S]*) (?P<title>.*)")

	type = "text"
	answerStats = []
	unanswered = 0
	unansweredPercent = 0

	for row in soup.find(attrs={"id" : "m_Content_ctl00_pa"}).find("table").findAll("tr", recursive=False):
		elements = row.findAll("td")

		text = elements[0].text.strip().replace("\r", "").replace("\t", "")

		if len(text) > 0:
			if not elements[0].find("h3") is None:
				titleGroups = titleProg.match(elements[0].find("h3").text)

				if not "." in titleGroups.group("number"):
					if not section_number is None:
						sections.append({
							"number" : section_number,
							"title" : section_title,
							"elements" : section_elements,
							"description" : section_description
						})

						section_number = None
						section_title = None
						section_elements = []
						section_description = None

					section_number = titleGroups.group("number") if not titleGroups is None else None
					section_title = titleGroups.group("title") if not titleGroups is None else None
					elements[0].find("h3").decompose()
					section_description = elements[0].text.replace("\r\n", "").replace("\t", "").strip().strip("\n")
				else:
					current_question_number = titleGroups.group("number") if not titleGroups is None else None
					current_question_title = titleGroups.group("title") if not titleGroups is None else None
					elements[0].find("h3").decompose()
					current_question_description = elements[0].text.replace("\r\n", "").replace("\t", "").strip().strip("\n")
			else:
				tables = row.findAll("table")
				answers = []

				if tables[0].find("img") is None:
					for x in tables[0].findAll("tr"):
						xElements = x.findAll("td")

						if type == "checkbox":
							options = xElements[3].text.split(", ")
						else:
							options = [xElements[3].text]

						if xElements[2].text == "anonym":
							answers.append({
								"anonymous" : True,
								"respondent_id" : xElements[0].text,
								"options" : options
							})
						else:
							answers.append({
								"anonymous" : False,
								"options" : options,
								"user_context_card_id" : xElements[0].find("span")["lectiocontextcard"],
								"user_text_id" : xElements[1].text,
								"user_team_text" : xElements[2].text
							})


					section_elements.append({
						"number" : current_question_number.encode("utf8"),
						"title" : current_question_title.encode("utf8"),
						"description" : current_question_description.encode("utf8"),
						"type" : type,
						"answers" : answers,
						"answer_stats" : answerStats,
						"unanswered" : str(unanswered),
						"unanswered_percent" : str(unansweredPercent)
					})

					type = "text"
					answerStats = []
					unanswered = 0
					unansweredPercent = 0
				else:
					for x in tables[0].findAll("tr"):
						xElements = x.findAll("td")
						if x.find("th").text == "Ubesvaret":
							type = "radio"
							unanswered = xElements[1].text
							unansweredPercent = xElements[2].text.replace(" %", "")
						else:
							type = "checkbox"
							answerStats.append({
								"text" : x.find("th").text.encode("utf8"),
								"number" : xElements[1].text,
								"percent" : xElements[2].text.replace(" %", "").replace(",", ".")
							})

	if section_number == None:
		section_number = 1
		section_title = ""
		section_description = ""

	sections.append({
		"number" : section_number,
		"title" : section_title,
		"elements" : section_elements,
		"description" : section_description
	})

	return {
		"status" : "ok",
		"information" : information,
		"stats" : stats,
		"sections" : sections
	}
Exemplo n.º 36
0
def phase(config, session=False):
    url = "https://www.lectio.dk/lectio/%s/studieplan/forloeb_vis.aspx?phaseid=%s" % (
        str(config["school_id"]), str(config["phase_id"]))

    if session is False:
        session = authenticate.authenticate(config)

    if session == False:
        return {"status": "error", "type": "authenticate"}

    cookies = {
        "lecmobile": "0",
        "ASP.NET_SessionId": session["ASP.NET_SessionId"],
        "LastLoginUserName": session["LastLoginUserName"],
        "lectiogsc": session["lectiogsc"],
        "LectioTicket": session["LectioTicket"]
    }

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)

    if soup.find("div", attrs={"id": "m_Content_islandViewForløb_pa"}) is None:
        return {"status": False, "error": "Data not found"}

    headers = []
    elements = []

    for row in soup.find("div", attrs={
            "id": "m_Content_islandViewForløb_pa"
    }).find("table").findAll("tr", recursive=False):
        headers.append(row.find("th", recursive=False))
        elements.append(row.find("td", recursive=False))

    rows = functions.mapRows(headers, elements)

    changeProg = re.compile(
        r"(?P<date>.*) af (?P<teacher>.*) \((?P<abbrevation>.*)\)")
    teamProg = re.compile(ur"(?P<term>.*): (?P<team>.*)")

    teams = []
    periods = []
    focusPoints = []
    workMethods = []
    activities = []
    assignments = []

    periodeProg = re.compile(r"(?P<start>.*)	-	(?P<end>.*)")
    activityProg = re.compile(
        r"\/lectio\/(?P<school_id>.*)\/aktivitet\/aktivitetinfo.aspx\?id=(?P<activity_id>.*)&prevurl=(?P<prev_url>.*)"
    )

    if not rows["Aktiviteter"].find(
            attrs={"id": "m_Content_ActivitiesGV"}) is None:
        for row in rows["Aktiviteter"].find(attrs={
                "id": "m_Content_ActivitiesGV"
        }).findAll("tr")[1:]:
            elements = row.findAll("td")
            activityGroups = activityProg.match(elements[1].find("a")["href"])
            activities.append({
                "activity_id":
                activityGroups.group("activity_id")
                if not activityGroups is None else ""
            })

    if not rows["Skriftligtarbejde"].find(
            attrs={"id": "m_Content_ExercisesGrid"}) is None:
        for row in rows["Skriftligtarbejde"].find(
                attrs={
                    "id": "m_Content_ExercisesGrid"
                }).findAll("tr")[1:]:
            elements = row.findAll("td")

            assignments.append({
                "name":
                unicode(elements[0].text),
                "date":
                datetime.strptime(elements[1].text.strip(), "%d-%m-%Y")
            })

    for row in rows["Periode(r)"].text.strip().replace("\r\n", "").split("\n"):
        periodeGroups = periodeProg.match(row)

        periods.append({
            "start":
            datetime.strptime(
                periodeGroups.group("start").strip(), "%d-%m-%Y")
            if not periodeGroups is None else "",
            "end":
            datetime.strptime(periodeGroups.group("end").strip(), "%d-%m-%Y")
            if not periodeGroups is None else ""
        })

    for row in rows["Arbejdsformer"].findAll("span"):
        workMethods.append({"text": unicode(functions.cleanText(row.text))})

    termProg = re.compile(r"(?P<value>.*)\/(?P<end>.*)")

    for row in rows["Hold"].findAll("span"):
        teamGroups = teamProg.match(row.text)
        termGroups = termProg.match(
            teamGroups.group("term") if not teamGroups is None else "")
        teams.append({
            "context_card_id":
            row["lectiocontextcard"],
            "team_element_id":
            row["lectiocontextcard"].replace("HE", ""),
            "name":
            teamGroups.group("team") if not teamGroups is None else "",
            "term": {
                "years_string":
                teamGroups.group("term") if not teamGroups is None else "",
                "value":
                termGroups.group("value") if not termGroups is None else ""
            }
        })

    if not rows["Saerligefokuspunkter"].find("ul") is None:
        focusRows = rows["Saerligefokuspunkter"].find("ul").findAll(
            "li", recursive=False)

        if len(focusRows) > 0:
            for row in focusRows:
                header = unicode(row.text)
                focusPointElements = []
                if row.find_next().name == "ul":
                    for focusElement in row.find_next().findAll("li"):
                        focusPointElements.append(
                            focusElement.text.encode("utf8"))

                focusPoints.append({
                    "header": header,
                    "elements": focusPointElements
                })

    changedGroups = changeProg.match(rows["Sidstaendret"].text.strip().replace(
        "\r\n", "").replace("\t", ""))
    createdGroups = changeProg.match(rows["Oprettet"].text.strip().replace(
        "\r\n", "").replace("\t", ""))

    estimate = rows["Estimat"].text.strip().replace("\r\n", "").replace(
        "\t", "").replace(" moduler", "").replace(",", ".")

    information = {
        "title":
        rows["Titel"].text.strip().replace("\r\n",
                                           "").replace("\t",
                                                       "").encode("utf8"),
        "note":
        rows["Note"].text.strip().replace("\r\n",
                                          "").replace("\t", "").encode("utf8"),
        "estimate": {
            "type": "modules",
            "length": "none" if estimate == "ingen" else estimate
        },
        "changed": {
            "date":
            datetime.strptime(changedGroups.group("date"), "%d/%m-%Y")
            if not changedGroups is None else "",
            "teacher": {
                "name":
                unicode(changedGroups.group("teacher"))
                if not changedGroups is None else "",
                "abbrevation":
                unicode(changedGroups.group("abbrevation"))
                if not changedGroups is None else ""
            }
        },
        "teams":
        teams,
        "created": {
            "date":
            datetime.strptime(createdGroups.group("date"), "%d/%m-%Y")
            if not createdGroups is None else "",
            "teacher": {
                "name":
                unicode(createdGroups.group("teacher"))
                if not createdGroups is None else "",
                "abbrevation":
                unicode(createdGroups.group("abbrevation"))
                if not createdGroups is None else ""
            }
        },
        "periods":
        periods,
        "focus_points":
        focusPoints,
        "methods":
        workMethods,
        "activities":
        activities,
        "assignments":
        assignments
    }

    return {"status": "ok", "phase": information}
Exemplo n.º 37
0
def messages ( config, session = False ):
	url = "https://www.lectio.dk/lectio/%s/beskeder2.aspx?type=liste&elevid=%s" % ( str(config["school_id"]), str(config["student_id"]) )

	if session is False:
		session = authenticate.authenticate(config)

	if session == False:
		return {"status" : "error", "type" : "authenticate"}

	# Insert the session information from the auth function
	cookies = {
		"lecmobile" : "0",
		"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
		"LastLoginUserName" : session["LastLoginUserName"],
		"lectiogsc" : session["lectiogsc"],
		"LectioTicket" : session["LectioTicket"]
	}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	viewStateX = soup.find("input", attrs={"id" : "__VIEWSTATEX"})["value"]

	settings = {
		"__EVENTTARGET" : "__Page",
		"__EVENTARGUMENT" : "TREECLICKED_%s" % ( str(config["page_id"]) ),
		"__VIEWSTATEX" : viewStateX,
		#"s_m_Content_Content_ListGridSelectionTree_ExpandState" : "nnnnnnennnnnnnnnnnennenn",
		"s_m_Content_Content_ListGridSelectionTree_SelectedNode" : "s_m_Content_Content_ListGridSelectionTreet8",
		"s_m_Content_Content_ListGridSelectionTree_PopulateLog" : "",
		"s$m$Content$Content$MarkChkDD" : "-1"
	}

	response = proxy.session.post(url, data=settings, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("div", attrs={"id" : "s_m_Content_Content_ThreadListPanel"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	rows = soup.find("table", attrs={"id" : "s_m_Content_Content_threadGV_ctl00"}).findAll("tr")
	if len(rows[0].findAll("td")) > 3 and not rows[0].findAll("td")[3].find("a") is None:
		viewStateKey = soup.find("input", attrs={"id" : "__VIEWSTATEY_KEY"})["value"]
		target = rows[0].findAll("td")[3].find("a")["href"]

		targetProg = re.compile(r'javascript:WebForm_DoPostBackWithOptions\(new WebForm_PostBackOptions\("(?P<target>.*)", "", true, "", "", false, true\)\)')
		targetGroups = targetProg.match(target)

		if targetGroups is None:
			return {
				"status" : False,
				"error" : "Missing target"
			}

		settings = {
			"__EVENTTARGET" : targetGroups.group("target"),
			"__EVENTARGUMENT" : "TREECLICKED_%s" % ( str(config["page_id"]) ),
			"__VIEWSTATEY_KEY" : viewStateKey,
			"s_m_Content_Content_ListGridSelectionTree_ExpandState" : "nnnnnnennnnnnnnnnnennenn",
			"s_m_Content_Content_ListGridSelectionTree_SelectedNode" : "s_m_Content_Content_ListGridSelectionTreet8",
			"s_m_Content_Content_ListGridSelectionTree_PopulateLog" : "",
			"s$m$Content$Content$MarkChkDD" : "-1"
		}

		response = proxy.session.post(url, data=settings, headers=headers)

		html = response.text

		soup = Soup(html)

	if soup.find("div", attrs={"id" : "s_m_Content_Content_ThreadListPanel"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	if soup.find("table", attrs={"id" : "s_m_Content_Content_threadGV_ctl00"}) is None:
		return {
			"status" : "ok",
			"messages" : []
		}

	rows = soup.find("table", attrs={"id" : "s_m_Content_Content_threadGV_ctl00"}).findAll("tr")
	rows.pop(0)

	messages = []

	today = datetime.now()
	shortDayTimeProg = re.compile(r"(?P<day_name>.*) (?P<hour>.*):(?P<minute>.*)")
	timeProg = re.compile(r"(?P<hour>.*):(?P<minute>.*)") # Current day, month, year
	dayProg = re.compile(r"(?P<day_name>.*) (?P<day>.*)/(?P<month>.*)") # Current year
	dateProg = re.compile(r"(?P<day>.*)/(?P<month>.*)-(?P<year>.*)")

	studentProg = re.compile(r"(?P<name>.*) \((?P<class_name>.*)\)")
	teacherProg = re.compile(r"(?P<name>.*) \((?P<abbrevation>.*)\)")

	messageIdProg = re.compile(r"__doPostBack\('__Page','\$LB2\$_MC_\$_(?P<message_id>.*)'\); return false;")

	shownIn = []

	shownInMappings = {
		"-40" : "all_unread",
		"-50" : "all_with_flag",
		"-70" : "newest",
		"-60" : "all_deleted",
		"-10" : "own_messages",
		"-80" : "sent_messages",
		"-20" : "teams",
		"-30" : "build_in_groups",
		"-35" : "own_groups"
	}

	dayConversion = {
		u"Ma" : "Mon",
		u"Ti" : "Tue",
		u"On" : "Wed",
		u"To" : "Thu",
		u"Fr" : "Fri",
		u"Lø" : "Sat",
		u"Sø" : "Son"
	}

	if str(config["page_id"]) in shownInMappings:
		shownIn.append(shownInMappings[str(config["page_id"])])
	else:
		shownIn.append(str(config["page_id"]))

	for row in rows:
		elements = row.findAll("td")
		if not elements is None and len(elements) > 0 and not elements[1].find("img") is None:
			flagOn = True if elements[1].find("img")["src"] == "/lectio/img/flagon.gif" else False
			read = False if elements[2].find("img")["src"] == "/lectio/img/munread.gif" else True
			subject = unicode(elements[3].find("a").text)
			documentsAttached = True if not elements[3].find("img") is None else False
			deleted = True if elements[8].find("img")["src"] == "/lectio/img/add.auto" else False

			date = None

			messageGroups = messageIdProg.match(elements[3].find("a")["onclick"])
			message_id = messageGroups.group("message_id") if not messageGroups is None else ""

			if shortDayTimeProg.match(elements[7].text):
				timeGroups = shortDayTimeProg.match(elements[7].text)
				date = datetime.strptime("%s/%s-%s %s:%s" % (dayConversion[unicode(timeGroups.group("day_name").capitalize())], today.strftime("%W"), today.strftime("%Y"), timeGroups.group("hour"), timeGroups.group("minute")), "%a/%W-%Y %H:%M")
			elif timeProg.match(elements[7].text):
				timeGroups = timeProg.match(elements[7].text)
				date = datetime.strptime("%s/%s-%s %s:%s" % (today.strftime("%d"), today.strftime("%m"), today.strftime("%Y"), timeGroups.group("hour"), timeGroups.group("minute")), "%d/%m-%Y %H:%M")
			elif dayProg.match(elements[7].text):
				dayGroups = dayProg.match(elements[7].text)
				date = datetime.strptime("%s/%s-%s %s:%s" % (dayGroups.group("day"), dayGroups.group("month"), today.strftime("%Y"), "12", "00"), "%d/%m-%Y %H:%M")
			elif dateProg.match(elements[7].text):
				dateGroups = dateProg.match(elements[7].text)
				date = datetime.strptime("%s/%s-%s %s:%s" % (dateGroups.group("day"), dateGroups.group("month"), dateGroups.group("year"), "12", "00"), "%d/%m-%Y %H:%M")

			lastSenderType = "teacher" if elements[4].find("img")["src"] == "/lectio/img/teacher.auto" else "student"
			firstSenderType = "teacher" if elements[5].find("img")["src"] == "/lectio/img/teacher.auto" else "student"
			recipientsType = "student" if elements[6].find("img")["src"] == "/lectio/img/student.auto" else "teacher" if elements[6].find("img")["src"] == "/lectio/img/teacher.auto" else "class"

			lastSender = {}
			firstSender = {}

			if lastSenderType == "teacher":
				teacherGroups = teacherProg.match(elements[4].find("span")["title"])
				lastSender["name"] = unicode(teacherGroups.group("name")) if not teacherGroups is None else ""
				lastSender["abbrevation"] = unicode(teacherGroups.group("abbrevation")) if not teacherGroups is None else ""
			else:
				studentGroups = studentProg.match(elements[4].find("span")["title"])
				lastSender["name"] = unicode(studentGroups.group("name")) if not studentGroups is None else ""
				lastSender["class_name"] = unicode(studentGroups.group("class_name")) if not studentGroups is None else ""

			if firstSenderType == "teacher":
				teacherGroups = teacherProg.match(elements[5].find("span")["title"])
				firstSender["name"] = unicode(teacherGroups.group("name")) if not teacherGroups is None else ""
				firstSender["abbrevation"] = unicode(teacherGroups.group("abbrevation")) if not teacherGroups is None else ""
			else:
				studentGroups = studentProg.match(elements[5].find("span")["title"])
				firstSender["name"] = unicode(studentGroups.group("name")) if not studentGroups is None else ""
				firstSender["class_name"] = unicode(studentGroups.group("class_name")) if not studentGroups is None else ""

			messages.append({
				"flagged" : flagOn,
				"read" : read,
				"documents_attached" : documentsAttached,
				"subject" : subject,
				"last_message" : {
					"sender_user_type" : lastSenderType,
					"sender" : lastSender
				},
				"first_message" : {
					"sender_user_type" : firstSenderType,
					"sender" : firstSender
				},
				"recipients" : {
					"type" : recipientsType
				},
				"changed" : date,
				"thread_id" : message_id,
				"shown_in" : shownIn,
				"deleted" : deleted
			})

	return {
		"status" : "ok",
		"messages" : messages
	}
Exemplo n.º 38
0
def class_members(config, session=False):
    if session == False:
        url = "https://www.lectio.dk/lectio/%s/subnav/members.aspx?klasseid=%s&showteachers=1&showstudents=1&reporttype=std" % (
            str(config["school_id"]), str(config["class_id"]))
        cookies = {}
    else:
        if session == True:
            session = authenticate.authenticate(config)

        url = "https://www.lectio.dk/lectio/%s/subnav/members.aspx?klasseid=%s&showteachers=1&showstudents=1" % (
            str(config["school_id"]), str(config["class_id"]))
        # Insert the session information from the auth function
        cookies = {
            "lecmobile": "0",
            "ASP.NET_SessionId": session["ASP.NET_SessionId"],
            "LastLoginUserName": session["LastLoginUserName"],
            "lectiogsc": session["lectiogsc"],
            "LectioTicket": session["LectioTicket"]
        }

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)

    if soup.find("table",
                 attrs={"id": "s_m_Content_Content_laerereleverpanel_alm_gv"
                        }) is None:
        return {"status": False, "error": "Data not found"}

    rows = soup.find("table",
                     attrs={
                         "id": "s_m_Content_Content_laerereleverpanel_alm_gv"
                     }).findAll("tr")
    headers = rows[0].findAll("th")
    rows.pop(0)

    teachers = []
    students = []
    pictureOffset = 1 if len(headers) == 7 else 0
    pictureProg = re.compile(
        r"\/lectio\/(?P<school_id>.*)\/GetImage.aspx\?pictureid=(?P<picture_id>.*)"
    )

    if len(rows) > 0:
        for row in rows:
            elements = row.findAll("td")
            personType = "teacher" if unicode(
                elements[0 + pictureOffset].text) == u"Lærer" else "student"
            data = {
                "type":
                personType,
                "first_name":
                unicode(elements[2 +
                                 pictureOffset].find("a").text).encode("utf8"),
                "person_text_id":
                elements[1 + pictureOffset].find("span").text.encode("utf8"),
                "last_name":
                elements[3 + pictureOffset].find("span").text.strip().encode(
                    "utf8"),
                "full_name":
                unicode(
                    unicode(elements[2 + pictureOffset].find("a").text) + " " +
                    unicode(elements[3 + pictureOffset].find("span").text)).
                encode("utf8"),
                "person_id":
                elements[1 + pictureOffset]["lectiocontextcard"].replace(
                    "T", "") if personType == "teacher" else
                elements[1 +
                         pictureOffset]["lectiocontextcard"].replace("S", ""),
                "context_card_id":
                elements[1 + pictureOffset]["lectiocontextcard"]
            }
            if pictureOffset == 1:
                pictureGroups = pictureProg.match(
                    elements[0].find("img")["src"])
                data["picture_id"] = pictureGroups.group(
                    "picture_id") if not pictureGroups is None else ""

            if personType == "teacher":
                data["teams"] = elements[5 + pictureOffset].text.split(", ")
                teachers.append(data)
            else:
                data["field_of_study"] = {
                    "name":
                    unicode(elements[4 + pictureOffset].find(
                        "span").text).encode("utf8"),
                    "context_card_id":
                    elements[4 +
                             pictureOffset].find("span")["lectiocontextcard"],
                    "field_of_study_id":
                    elements[4 + pictureOffset].find("span")
                    ["lectiocontextcard"].replace("SR", "")
                }
                students.append(data)

    return {"status": "ok", "teachers": teachers, "students": students}
Exemplo n.º 39
0
def exams(config, session=False):
    url = "https://www.lectio.dk/lectio/%s/proevehold.aspx?type=elev&studentid=%s" % (
        str(config["school_id"]), str(config["student_id"]))

    if session is False:
        session = authenticate.authenticate(config)

    if session == False:
        return {"status": "error", "type": "authenticate"}

    # Insert the session information from the auth function
    cookies = {
        "lecmobile": "0",
        "ASP.NET_SessionId": session["ASP.NET_SessionId"],
        "LastLoginUserName": session["LastLoginUserName"],
        "lectiogsc": session["lectiogsc"],
        "LectioTicket": session["LectioTicket"]
    }

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)

    if soup.find("div",
                 attrs={"id": "m_Content_LectioDetailIslandProevehold_pa"
                        }) is None:
        return {"status": False, "error": "Data not found"}

    exams = []
    tables = soup.find("div",
                       attrs={
                           "id": "m_Content_LectioDetailIslandProevehold_pa"
                       }).findAll("table")
    informationRows = tables[0].findAll("td")

    classProg = re.compile(r"(?P<class_name>.*) (?P<student_class_id>.*)")
    classGroups = classProg.match(informationRows[1].text)

    information = {
        "name":
        unicode(informationRows[3].text),
        "student_class_id_full":
        informationRows[1].text,
        "base_class":
        informationRows[5].text,
        "class_name":
        classGroups.group("class_name") if not classGroups is None else "",
        "student_class_id":
        classGroups.group("student_class_id")
        if not classGroups is None else "",
    }

    return {"status": "ok", "exams": exams, "information": information}
Exemplo n.º 40
0
def message(config, session=False):
    url = "https://www.lectio.dk/lectio/%s/beskeder2.aspx?type=liste&elevid=%s" % (
        str(config["school_id"]), str(config["student_id"]))

    if session is False:
        session = authenticate.authenticate(config)

    if session == False:
        return {"status": "error", "type": "authenticate"}

    # Insert the session information from the auth function
    cookies = {
        "lecmobile": "0",
        "ASP.NET_SessionId": session["ASP.NET_SessionId"],
        "LastLoginUserName": session["LastLoginUserName"],
        "lectiogsc": session["lectiogsc"],
        "LectioTicket": session["LectioTicket"]
    }

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)

    viewStateX = soup.find("input", attrs={"id": "__VIEWSTATEX"})["value"]

    settings = {
        "__EVENTTARGET": "__Page",
        "__EVENTARGUMENT": "$LB2$_MC_$_%s" % (str(config["thread_id"])),
        "__VIEWSTATEX": viewStateX,
    }

    response = proxy.session.post(url, data=settings, headers=headers)

    html = response.text

    soup = Soup(html)

    if soup.find("div",
                 attrs={"id": "s_m_Content_Content_ViewThreadPagePanel"
                        }) is None:
        return {"status": False, "error": "Data not found"}

    flagged = False if soup.find(
        "input", attrs={"id": "s_m_Content_Content_FlagThisThreadBox"
                        })["src"] == "/lectio/img/flagoff.gif" else True

    originalElements = soup.find("table",
                                 attrs={
                                     "class": "ShowMessageRecipients"
                                 }).findAll("td")

    originalSenderUser = context_card.user(
        {
            "context_card_id":
            originalElements[8].find("span")["lectiocontextcard"],
            "school_id":
            config["school_id"]
        }, session)

    originalSenderUser["user"]["user_context_card_id"] = originalElements[
        8].find("span")["lectiocontextcard"]
    originalSenderUser["user"]["person_id"] = originalElements[8].find(
        "span")["lectiocontextcard"].replace("U", "")

    originalSubject = unicode(functions.cleanText(originalElements[2].text))

    recipients = []

    studentRecipientProg = re.compile(
        r"(?P<name>.*) \((?P<student_class_id>.*)\)")
    teacherRecipientProg = re.compile(r"(?P<name>.*) \((?P<abbrevation>.*)\)")

    # Fill in the single users, added as recipients
    for row in originalElements[11].findAll("span"):
        context_card_id = row["lectiocontextcard"]
        userType = ""
        data = {"context_card_id": context_card_id}

        if "S" in context_card_id:
            userType = "student"
            studentGroups = studentRecipientProg.match(row.text)
            data["person_id"] = context_card_id.replace("S", "")
            data["student_id"] = context_card_id.replace("S", "")
            data["name"] = unicode(studentGroups.group(
                "name")) if not studentGroups is None else ""
            data["student_class_id"] = studentGroups.group(
                "student_class_id") if not studentGroups is None else ""

        elif "T" in context_card_id:
            userType = "teacher"
            teacherGroups = teacherRecipientProg.match(row.text)
            data["person_id"] = context_card_id.replace("T", "")
            data["teacher_id"] = context_card_id.replace("T", "")
            data["abbrevation"] = unicode(teacherGroups.group(
                "abbrevation")) if not teacherGroups is None else ""
            data["name"] = unicode(teacherGroups.group(
                "name")) if not teacherGroups is None else ""

        data["type"] = userType

        recipients.append(data)

        row.decompose()

    recipientRows = originalElements[11].text.split(", ")

    for row in recipientRows:
        text = row.replace("\n", "").replace("\r", "").replace("\t", "")

        if "Holdet" in text:
            text = text.replace("Holdet ", "")

            recipients.append({"type": "team", "name": unicode(text)})
        elif "Gruppen" in text:
            text = text.replace("Gruppen ", "")
            recipients.append({"type": "group", "name": unicode(text)})

    messages = []

    answerProg = re.compile(
        r"javascript:__doPostBack\('__Page','ANSWERMESSAGE_(?P<message_id>.*)'\);"
    )
    dateTimeProg = re.compile(
        r"(?P<day>.*)\/(?P<month>.*)-(?P<year>.*) (?P<hour>.*):(?P<minute>.*)")
    messageLevels = {}

    for row in soup.find("table",
                         attrs={
                             "id": "s_m_Content_Content_ThreadTable"
                         }).findAll("tr"):
        if not row.find("table") is None:
            level = row.findAll(has_colspan)[0]["colspan"]
            data = {}
            messageDetailElements = row.find("table").findAll("td")

            # Subject
            data["subject"] = unicode(messageDetailElements[0].find("h4").text)
            messageDetailElements[0].find("h4").decompose()

            # Sender
            messageSender = context_card.user(
                {
                    "context_card_id":
                    messageDetailElements[0].find("span")["lectiocontextcard"],
                    "school_id":
                    config["school_id"]
                }, session)

            messageSender["user"]["user_context_card_id"] = originalElements[
                8].find("span")["lectiocontextcard"]
            messageSender["user"]["person_id"] = originalElements[8].find(
                "span")["lectiocontextcard"].replace("U", "")
            data["sender"] = messageSender["user"]

            messageDetailElements[0].find("span").decompose()

            # Time
            timeText = messageDetailElements[0].text.replace(
                "Af , ", "").strip().replace("\n", "").replace("\t", "")
            dateGroups = dateTimeProg.match(timeText)
            data["date"] = datetime.strptime(
                "%s/%s-%s %s:%s" %
                (functions.zeroPadding(dateGroups.group("day")),
                 functions.zeroPadding(dateGroups.group("month")),
                 dateGroups.group("year"), dateGroups.group("hour"),
                 dateGroups.group("minute")),
                "%d/%m-%Y %H:%M") if not dateGroups is None else ""

            # Message id
            answerGroups = answerProg.match(
                messageDetailElements[1].find("button")["onclick"])
            message_id = answerGroups.group(
                "message_id") if not answerGroups is None else ""
            data["message_id"] = message_id

            row.find("table").decompose()

            # Get message text
            data["message"] = unicode(row.text.strip())

            # Get parent
            if str(int(level) + 1) in messageLevels:
                data["parrent_id"] = messageLevels[str(int(level) + 1)]

            messageLevels[level] = message_id

            messages.append(data)

    messageInfo = {
        "original_subject": originalSubject,
        "flagged": flagged,
        "original_sender": originalSenderUser["user"],
        "recipients": recipients,
        "messages": messages
    }

    return {
        "status": "ok",
        "message": messageInfo,
    }
Exemplo n.º 41
0
def outgoing_censor ( config ):
	url = "https://www.lectio.dk/lectio/%s/proevehold.aspx?type=udgcensur&outboundCensorID=%s" % ( str(config["school_id"]), str(config["outgoing_censor_id"]) )

	cookies = {}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("div", attrs={"id" : "m_Content_LectioDetailIslandProevehold_pa"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	teacherProg = re.compile(r"(?P<name>.*) \((?P<abbrevation>.*)\)")
	teacherGroups = teacherProg.match(soup.find("td", attrs={"id" : "m_Content_outboundcensor_teachername"}).text)
	periodProg = re.compile("(?P<start>.*) - (?P<end>.*)")
	periodGroups = periodProg.match(soup.find(attrs={"id" : "m_Content_outboundcensor_periode"}).text)
	institutionProg = re.compile(r"(?P<institution_id>\d*) (?P<institution>.*)")

	phases = []
	information = {}
	description = False

	phaseIds = {}
	phaseNames = []

	if not soup.find(attrs={"id" : "m_Content_holdUvbCtrl_UvbHoldRepeater_ctl00_uvbCtrl_uvbundervisningsbeskrivelseTypeH"}) is None:
		tables = soup.find(attrs={"id" : "m_Content_holdUvbCtrl_UvbHoldRepeater_ctl00_uvbCtrl_uvbcontainer"}).findAll("table", attrs={"class" : "list"})

		for x in tables[1].findAll("a"):
			phaseIds[x.text] = x["href"].replace("#ph", "")
			phaseNames.append(x.text)

		description = True

		informationElements = tables[0].findAll("td")
		subjectProg = re.compile(r"(?P<subject>.*) (?P<level>.*)$")
		teachers = []
		teamProg = re.compile(r"(?P<team>.*) \((?P<teams>.*)\)")
		teamGroups = teamProg.match(informationElements[9].text.replace("\n", ""))
		teams = []

		if not teamGroups is None:
			teams = []
			for x in teamGroups.group("teams").replace("\n", "").split(", "):
				teams.append({"name" : x})

		for row in informationElements[7].findAll("span"):
			if len(row.text) > 0:
				teachers.append({"name" : row.text})

		subjectGroups = subjectProg.match(informationElements[5].text.replace("\n", ""))
		terms = []

		termProg = re.compile(r"(?P<value>.*)\/(?P<end>.*)")

		for x in informationElements[1].text.replace("\n", "").split(" - "):
			termGroups = termProg.match(x)

			terms.append({
				"value" : termGroups.group("value") if not termGroups is None else "",
				"years_string" : x
			})

		information = {
			"teachers" : teachers,
			"terms" : terms,
			"teams" : teams,
			"team_name" : teamGroups.group("team") if not teamGroups is None else "",
			"subject" : {
				"name" : subjectGroups.group("subject").encode("utf8") if not subjectGroups is None else informationElements[5].text.encode("utf8"),
				"level" : subjectGroups.group("level") if not subjectGroups is None else ""
			},
			"institution" : informationElements[3].text.replace("\n", "").encode("utf8"),
		}

		tables.pop(0)
		tables.pop(0)

		phases = []
		coversProg = re.compile(ur"Anvendt modullængden (?P<length>.*) (?P<type>.*)\. fra skoleåret (?P<term>.*)")

		index = 0

		for table in tables:
			if not table is None:
				index = index + 1
				rows = table.findAll("tr", recursive=False)
				elements = []

				for row in rows:
					elements = elements + row.findAll("td", recursive=False)

				reachSpans = elements[5].findAll("span")
				title = reachSpans[2]["title"] if "title" in reachSpans[2] else reachSpans[2].text
				coversGroups = coversProg.match(title)
				focusPoints = []
				focusRows = []
				if not elements[7].find("ul") is None:
					focusRows = elements[7].find("ul").findAll("li", recursive=False)
				descriptionText = elements[1].find("span").text

				if len(focusRows) > 0:
					for row in focusRows:
						header = unicode(row.text)
						focusPointElements = []
						if row.find_next().name == "ul":
							for focusElement in row.find_next().findAll("li"):
								focusPointElements.append(unicode(focusElement.text))

						focusPoints.append({
							"header" : header,
							"elements" : focusPointElements
						})

				work_methods = []

				for row in elements[9].findAll("li"):
					work_methods.append(unicode(row.text.replace("\t", "").replace("\n", "").replace("\r", "")))

				readings = []
				if not elements[3].find("span").find("i") is None:
					elements[3].find("span").find("i").decompose()
					for row in elements[3].find("span").findAll("br"):
						text = unicode(row.find_next(text=True).string).encode("utf8")
						readings.append({"text" : text})

				elements[3].find("span").decompose()
				links = []

				for link in elements[3].findAll("a"):
					links.append({
						"href" : link["href"],
						"text" : unicode(link.find_next(text=True).find_next(text=True)[3:].replace("\t", "").replace("\r\n", ""))
					})
					link.find_next(text=True).find_next(text=True).extract()
					link.decompose()

				written = []

				if not elements[3].find("table") is None:
					writtenRows = elements[3].findAll("tr")
					writtenRows.pop(0)

					for row in writtenRows:
						writtenRowElements = row.findAll("td")
						written.append({
							"title" : writtenRowElements[0].text.replace("\r\n", "").replace("\t", ""),
							"date" : datetime.strptime(writtenRowElements[1].text.replace("\r\n", "").replace("\t", "").strip(), "%d-%m-%Y")
						})

					elements[3].find("table").decompose()

				for x in elements[3].findAll("i"):
					x.decompose()

				documents = []

				for row in elements[3].findAll(text=True):
					if len(row) > 1:
						documents.append({
							"name" : row.strip().replace("\r\n", "").replace("\t", "")
						})

				phases.append({
					"reach" : {
						"covers" : {
							"length" : "unknown" if reachSpans[1].text == "Ikke angivet" else reachSpans[1].text.replace(" moduler", ""),
							"type" : "modules"
						},
						"details" : {
							"length" : coversGroups.group("length") if not coversGroups is None else "45",
							"type" : coversGroups.group("type") if not coversGroups is None else "min",
							"term" : "20" + coversGroups.group("term") if not coversGroups is None else soup.find("select", attrs={"id" : "m_ChooseTerm_term"}).select('option[selected="selected"]')[0].text
						}
					},
					"estimate" : {
						"type" : "modules",
						"length" : "unknown" if reachSpans[0].text == "Ikke angivet" else reachSpans[0].text.replace(",", ".").replace(" moduler", "").strip(),
					},
					"methods" : work_methods,
					"name" : phaseNames[index - 1].encode("utf8"),
					"phase_id" : phaseIds[phaseNames[index - 1]],
					"focus_points" : focusPoints,
					"readings" : readings,
					"links" : links,
					"documents" : documents,
					"written" : written,
					"description" : descriptionText,
					"title" : elements[1].find(text=True).string.replace("\r\n", "").replace("\t", "").encode("utf8")
				})

	institutionGroups = institutionProg.match(soup.find("td", attrs={"id" : "m_Content_outboundcensor_institution"}).text)

	testTeamName = soup.find("td", attrs={"id" : "m_Content_outboundcensor_proeveholdname"}).text
	test_type_code = "other"
	gym_type = "AGYM"
	test_type_team_name = ""

	xprsProg = re.compile(r"(?P<code>.*) (?P<type>.*) (?P<subject_name>.*)")
	xprsGroups = xprsProg.match(soup.find("td", attrs={"id" : "m_Content_outboundcensor_xprsproeve"}).text)
	xprs_type = xprsGroups.group("type") if not xprsGroups is None else ""

	testTypeCodeProg = re.compile(r"(?P<team_name>.*) (?P<code>[\w\S]*)$")
	testTypeCodeGroups = testTypeCodeProg.match(testTeamName)
	testTypeAltCodePRog = re.compile(r"(?P<team_name>.*) (?P<code>[\w\S]*) \((?P<gym_type>[\w\S]*)\)$")
	testTypeCodeAltGroups = testTypeAltCodePRog.match(testTeamName)

	if not testTypeCodeAltGroups is None:
		test_type_team_name = testTypeCodeAltGroups.group("team_name")
		gym_type = testTypeCodeAltGroups.group("gym_type")
		test_type_code = testTypeCodeAltGroups.group("code")
	elif not testTypeCodeGroups is None:
		test_type_team_name = testTypeCodeGroups.group("team_name")
		test_type_code = testTypeCodeGroups.group("code")

	xprs_code = xprsGroups.group("code") if not xprsGroups is None else ""
	xprs_level = "A" if "A" in xprs_code else "B" if "B" in xprs_code else "C" if "C" in xprs_code else "D" if "D" in xprs_code else "E" if "E" in xprs_code else "F" if "F" in xprs_code else "-"

	return {
		"status" : "ok",
		"censor" : {
			"name" : teacherGroups.group("name") if not teacherGroups is None else unicode(soup.find("td", attrs={"id" : "m_Content_outboundcensor_teachername"}).text),
			"abbrevation" : teacherGroups.group("abbrevation") if not teacherGroups is None else ""
		},
		"test_team" : testTeamName,
		"institution" : {
			"name" : institutionGroups.group("institution") if not institutionGroups is None else soup.find("td", attrs={"id" : "m_Content_outboundcensor_institution"}).text,
			"institution_id" : institutionGroups.group("institution_id") if not institutionGroups is None else ""
		},
		"period" : {
			"start" : periodGroups.group("start") if not periodGroups is None else "",
			"end" : periodGroups.group("end") if not periodGroups is None else ""
		},
		"xprs" : {
			"xprs_test" : soup.find("td", attrs={"id" : "m_Content_outboundcensor_xprsproeve"}).text,
			"code_full" : xprs_code,
			"code" : xprs_code.replace(xprs_level, ""),
			"level" : xprs_level,
			"gym_type" : gym_type,
			"test_type_code" : test_type_code,
			"xprs_type" : xprs_type,
			"subject" : xprsGroups.group("subject_name") if not xprsGroups is None else "",
			"type" : "written" if xprs_type == "SKR" else "combined" if xprs_type == "SAM" else "oral" if xprs_type == "MDT" else xprs_type,
			"test_type_long_code" :  "Skriftlig eksamen" if xprs_type == "SKR" else "Mundtlig eksamen" if xprs_type == "MDT" else "Samlet vurdering" if xprs_type == "SAM" else xprs_type
		},
		"test_type_team_name" : test_type_team_name,
		"number_of_students" : soup.find("td", attrs={"id" : "m_Content_outboundcensor_elevtal"}).text,
		"note" : soup.find("td", attrs={"id" : "m_Content_outboundcensor_bemaerkning"}).text,
		"phases" : phases,
		"information" : information,
		"description" : description
	}
Exemplo n.º 42
0
def xprs_subjects ( start, end, increase , school_id, checkLevels = False, levels = ["01", "02", "03", "04", "05", "06"] ):

	subjects = []

	cards = []

	for code in range(0, end-start+1):

		if checkLevels == False:
			cards.append(start + (code*increase))
		else:
			codeKey = start + (code*increase)
			for row in levels:
				cards.append(str(codeKey)+row)

	for code in cards:

		url = "https://www.lectio.dk/lectio/%s/contextcard/contextcard.aspx?lectiocontextcard=XF%s" % ( str(school_id), str(code) )

		cookies = {}

		# Insert User-agent headers and the cookie information
		headers = {
			"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
			"Content-Type" : "application/x-www-form-urlencoded",
			"Host" : "www.lectio.dk",
			"Origin" : "https://www.lectio.dk",
			"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
		}
		error = False
		try:
			response = proxy.session.get(url, headers=headers)
		except Exception, e:
			print code
			error = True

		if error == False:
			html = response.text

			soup = Soup(html)

			codeProg = re.compile(r"(?P<code>[.^\S]*) (?P<name>.*)")

			if not soup.find("span", attrs={"id" : "ctl00_Content_cctitle"}) is None:

				notices = []

				tables = soup.findAll("table")

				codeGroups = codeProg.match(tables[1].findAll("td")[1].text)

				level = "Unkmown"

				if not codeGroups is None:
					level = "A" if "A" in codeGroups.group("code") else "B" if "B" in codeGroups.group("code") else "C"

				subjects.append({
					"name" : unicode(soup.find(attrs={"id" : "ctl00_Content_cctitle"}).text.replace("XPRS-f*g - ", "")),
					"code" : codeGroups.group("code").replace("A", "").replace("B", "").replace("C", "") if not codeGroups is None else "",
					"subject_sub_type" : "none" if tables[1].findAll("td")[3].text == "Ingen underfag" else "differs" if tables[1].findAll("td")[3].text == "Variable underfag" else tables[1].findAll("td")[3].text,
					"context_card_id" : "XF" + str(code),
					"level" : level,
					"code_full" : codeGroups.group("code") if not codeGroups is None else "",
					"xprs_subject_id" : str(code),
					"notices" : tables[1].findAll("td")[5].text.split("\n"),
					"code_full_name" : tables[1].findAll("td")[1].text
				})

			else:
				print code
Exemplo n.º 43
0
def outgoing_censor(config):
    url = "https://www.lectio.dk/lectio/%s/proevehold.aspx?type=udgcensur&outboundCensorID=%s" % (
        str(config["school_id"]), str(config["outgoing_censor_id"]))

    cookies = {}

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    html = response.text

    soup = Soup(html)

    if soup.find("div",
                 attrs={"id": "m_Content_LectioDetailIslandProevehold_pa"
                        }) is None:
        return {"status": False, "error": "Data not found"}

    teacherProg = re.compile(r"(?P<name>.*) \((?P<abbrevation>.*)\)")
    teacherGroups = teacherProg.match(
        soup.find("td", attrs={
            "id": "m_Content_outboundcensor_teachername"
        }).text)
    periodProg = re.compile("(?P<start>.*) - (?P<end>.*)")
    periodGroups = periodProg.match(
        soup.find(attrs={
            "id": "m_Content_outboundcensor_periode"
        }).text)
    institutionProg = re.compile(
        r"(?P<institution_id>\d*) (?P<institution>.*)")

    phases = []
    information = {}
    description = False

    phaseIds = {}
    phaseNames = []

    if not soup.find(
            attrs=
        {
            "id":
            "m_Content_holdUvbCtrl_UvbHoldRepeater_ctl00_uvbCtrl_uvbundervisningsbeskrivelseTypeH"
        }) is None:
        tables = soup.find(
            attrs={
                "id":
                "m_Content_holdUvbCtrl_UvbHoldRepeater_ctl00_uvbCtrl_uvbcontainer"
            }).findAll("table", attrs={"class": "list"})

        for x in tables[1].findAll("a"):
            phaseIds[x.text] = x["href"].replace("#ph", "")
            phaseNames.append(x.text)

        description = True

        informationElements = tables[0].findAll("td")
        subjectProg = re.compile(r"(?P<subject>.*) (?P<level>.*)$")
        teachers = []
        teamProg = re.compile(r"(?P<team>.*) \((?P<teams>.*)\)")
        teamGroups = teamProg.match(informationElements[9].text.replace(
            "\n", ""))
        teams = []

        if not teamGroups is None:
            teams = []
            for x in teamGroups.group("teams").replace("\n", "").split(", "):
                teams.append({"name": x})

        for row in informationElements[7].findAll("span"):
            if len(row.text) > 0:
                teachers.append({"name": row.text})

        subjectGroups = subjectProg.match(informationElements[5].text.replace(
            "\n", ""))
        terms = []

        termProg = re.compile(r"(?P<value>.*)\/(?P<end>.*)")

        for x in informationElements[1].text.replace("\n", "").split(" - "):
            termGroups = termProg.match(x)

            terms.append({
                "value":
                termGroups.group("value") if not termGroups is None else "",
                "years_string":
                x
            })

        information = {
            "teachers":
            teachers,
            "terms":
            terms,
            "teams":
            teams,
            "team_name":
            teamGroups.group("team") if not teamGroups is None else "",
            "subject": {
                "name":
                subjectGroups.group("subject").encode("utf8")
                if not subjectGroups is None else
                informationElements[5].text.encode("utf8"),
                "level":
                subjectGroups.group("level")
                if not subjectGroups is None else ""
            },
            "institution":
            informationElements[3].text.replace("\n", "").encode("utf8"),
        }

        tables.pop(0)
        tables.pop(0)

        phases = []
        coversProg = re.compile(
            ur"Anvendt modullængden (?P<length>.*) (?P<type>.*)\. fra skoleåret (?P<term>.*)"
        )

        index = 0

        for table in tables:
            if not table is None:
                index = index + 1
                rows = table.findAll("tr", recursive=False)
                elements = []

                for row in rows:
                    elements = elements + row.findAll("td", recursive=False)

                reachSpans = elements[5].findAll("span")
                title = reachSpans[2]["title"] if "title" in reachSpans[
                    2] else reachSpans[2].text
                coversGroups = coversProg.match(title)
                focusPoints = []
                focusRows = []
                if not elements[7].find("ul") is None:
                    focusRows = elements[7].find("ul").findAll("li",
                                                               recursive=False)
                descriptionText = elements[1].find("span").text

                if len(focusRows) > 0:
                    for row in focusRows:
                        header = unicode(row.text)
                        focusPointElements = []
                        if row.find_next().name == "ul":
                            for focusElement in row.find_next().findAll("li"):
                                focusPointElements.append(
                                    unicode(focusElement.text))

                        focusPoints.append({
                            "header": header,
                            "elements": focusPointElements
                        })

                work_methods = []

                for row in elements[9].findAll("li"):
                    work_methods.append(
                        unicode(
                            row.text.replace("\t",
                                             "").replace("\n",
                                                         "").replace("\r",
                                                                     "")))

                readings = []
                if not elements[3].find("span").find("i") is None:
                    elements[3].find("span").find("i").decompose()
                    for row in elements[3].find("span").findAll("br"):
                        text = unicode(
                            row.find_next(text=True).string).encode("utf8")
                        readings.append({"text": text})

                elements[3].find("span").decompose()
                links = []

                for link in elements[3].findAll("a"):
                    links.append({
                        "href":
                        link["href"],
                        "text":
                        unicode(
                            link.find_next(text=True).find_next(
                                text=True)[3:].replace("\t",
                                                       "").replace("\r\n", ""))
                    })
                    link.find_next(text=True).find_next(text=True).extract()
                    link.decompose()

                written = []

                if not elements[3].find("table") is None:
                    writtenRows = elements[3].findAll("tr")
                    writtenRows.pop(0)

                    for row in writtenRows:
                        writtenRowElements = row.findAll("td")
                        written.append({
                            "title":
                            writtenRowElements[0].text.replace("\r\n",
                                                               "").replace(
                                                                   "\t", ""),
                            "date":
                            datetime.strptime(
                                writtenRowElements[1].text.replace(
                                    "\r\n", "").replace("\t", "").strip(),
                                "%d-%m-%Y")
                        })

                    elements[3].find("table").decompose()

                for x in elements[3].findAll("i"):
                    x.decompose()

                documents = []

                for row in elements[3].findAll(text=True):
                    if len(row) > 1:
                        documents.append({
                            "name":
                            row.strip().replace("\r\n", "").replace("\t", "")
                        })

                phases.append({
                    "reach": {
                        "covers": {
                            "length":
                            "unknown" if reachSpans[1].text == "Ikke angivet"
                            else reachSpans[1].text.replace(" moduler", ""),
                            "type":
                            "modules"
                        },
                        "details": {
                            "length":
                            coversGroups.group("length")
                            if not coversGroups is None else "45",
                            "type":
                            coversGroups.group("type")
                            if not coversGroups is None else "min",
                            "term":
                            "20" + coversGroups.group("term")
                            if not coversGroups is None else soup.
                            find("select", attrs={
                                "id": "m_ChooseTerm_term"
                            }).select('option[selected="selected"]')[0].text
                        }
                    },
                    "estimate": {
                        "type":
                        "modules",
                        "length":
                        "unknown" if reachSpans[0].text == "Ikke angivet" else
                        reachSpans[0].text.replace(",", ".").replace(
                            " moduler", "").strip(),
                    },
                    "methods":
                    work_methods,
                    "name":
                    phaseNames[index - 1].encode("utf8"),
                    "phase_id":
                    phaseIds[phaseNames[index - 1]],
                    "focus_points":
                    focusPoints,
                    "readings":
                    readings,
                    "links":
                    links,
                    "documents":
                    documents,
                    "written":
                    written,
                    "description":
                    descriptionText,
                    "title":
                    elements[1].find(text=True).string.replace(
                        "\r\n", "").replace("\t", "").encode("utf8")
                })

    institutionGroups = institutionProg.match(
        soup.find("td", attrs={
            "id": "m_Content_outboundcensor_institution"
        }).text)

    testTeamName = soup.find("td",
                             attrs={
                                 "id":
                                 "m_Content_outboundcensor_proeveholdname"
                             }).text
    test_type_code = "other"
    gym_type = "AGYM"
    test_type_team_name = ""

    xprsProg = re.compile(r"(?P<code>.*) (?P<type>.*) (?P<subject_name>.*)")
    xprsGroups = xprsProg.match(
        soup.find("td", attrs={
            "id": "m_Content_outboundcensor_xprsproeve"
        }).text)
    xprs_type = xprsGroups.group("type") if not xprsGroups is None else ""

    testTypeCodeProg = re.compile(r"(?P<team_name>.*) (?P<code>[\w\S]*)$")
    testTypeCodeGroups = testTypeCodeProg.match(testTeamName)
    testTypeAltCodePRog = re.compile(
        r"(?P<team_name>.*) (?P<code>[\w\S]*) \((?P<gym_type>[\w\S]*)\)$")
    testTypeCodeAltGroups = testTypeAltCodePRog.match(testTeamName)

    if not testTypeCodeAltGroups is None:
        test_type_team_name = testTypeCodeAltGroups.group("team_name")
        gym_type = testTypeCodeAltGroups.group("gym_type")
        test_type_code = testTypeCodeAltGroups.group("code")
    elif not testTypeCodeGroups is None:
        test_type_team_name = testTypeCodeGroups.group("team_name")
        test_type_code = testTypeCodeGroups.group("code")

    xprs_code = xprsGroups.group("code") if not xprsGroups is None else ""
    xprs_level = "A" if "A" in xprs_code else "B" if "B" in xprs_code else "C" if "C" in xprs_code else "D" if "D" in xprs_code else "E" if "E" in xprs_code else "F" if "F" in xprs_code else "-"

    return {
        "status":
        "ok",
        "censor": {
            "name":
            teacherGroups.group("name")
            if not teacherGroups is None else unicode(
                soup.find("td",
                          attrs={
                              "id": "m_Content_outboundcensor_teachername"
                          }).text),
            "abbrevation":
            teacherGroups.group("abbrevation")
            if not teacherGroups is None else ""
        },
        "test_team":
        testTeamName,
        "institution": {
            "name":
            institutionGroups.group("institution")
            if not institutionGroups is None else soup.find(
                "td", attrs={
                    "id": "m_Content_outboundcensor_institution"
                }).text,
            "institution_id":
            institutionGroups.group("institution_id")
            if not institutionGroups is None else ""
        },
        "period": {
            "start":
            periodGroups.group("start") if not periodGroups is None else "",
            "end":
            periodGroups.group("end") if not periodGroups is None else ""
        },
        "xprs": {
            "xprs_test":
            soup.find("td",
                      attrs={
                          "id": "m_Content_outboundcensor_xprsproeve"
                      }).text,
            "code_full":
            xprs_code,
            "code":
            xprs_code.replace(xprs_level, ""),
            "level":
            xprs_level,
            "gym_type":
            gym_type,
            "test_type_code":
            test_type_code,
            "xprs_type":
            xprs_type,
            "subject":
            xprsGroups.group("subject_name") if not xprsGroups is None else "",
            "type":
            "written" if xprs_type == "SKR" else "combined" if xprs_type
            == "SAM" else "oral" if xprs_type == "MDT" else xprs_type,
            "test_type_long_code":
            "Skriftlig eksamen"
            if xprs_type == "SKR" else "Mundtlig eksamen" if xprs_type == "MDT"
            else "Samlet vurdering" if xprs_type == "SAM" else xprs_type
        },
        "test_type_team_name":
        test_type_team_name,
        "number_of_students":
        soup.find("td", attrs={
            "id": "m_Content_outboundcensor_elevtal"
        }).text,
        "note":
        soup.find("td", attrs={
            "id": "m_Content_outboundcensor_bemaerkning"
        }).text,
        "phases":
        phases,
        "information":
        information,
        "description":
        description
    }
Exemplo n.º 44
0
def load_list(config, year, type):
    if type == "bchold" or type == "bcteacher" or type == "bcstudent" or type == "bcgroup" or type == "bchold":
        year = "y" + str(year)

    remove = "var _localcache_autocomplete_%s_%s_%s = " % (
        str(type), str(config["branch_id"]), str(year))

    url = "https://www.lectio.dk/lectio/%s/cache/DropDown.aspx?type=%s&afdeling=%s&subcache=%s" % (
        str(config["school_id"]), str(type), str(
            config["branch_id"]), str(year))

    cookies = {}

    # Insert User-agent headers and the cookie information
    headers = {
        "User-Agent":
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
        "Content-Type": "application/x-www-form-urlencoded",
        "Host": "www.lectio.dk",
        "Origin": "https://www.lectio.dk",
        "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
    }

    response = proxy.session.get(url, headers=headers)

    rows = json.loads(
        response.text.replace(remove, "").strip().replace(";", ""))

    teams = []
    team_elements = []
    groups = []
    persons = []

    termProg = re.compile(r"(?P<value>.*)\/(?P<end_year>.*)")
    teamNameProg = re.compile(
        r"(?P<name>.*) \((?P<value>.*)\/(?P<end_year>.*)\)")
    teacherNameProg = re.compile(r"(?P<name>.*) \((?P<abbrevation>.*)\)")
    studentNameProg = re.compile(r"(?P<name>.*) \((?P<class_code>.*)\)")

    for row in rows:
        context_card_id = row[1]

        if "H" in context_card_id:
            teamNameGroups = teamNameProg.match(row[0])

            teams.append({
                "context_cards": [context_card_id],
                "team_id": context_card_id.replace("H", "")
            })

            team_elements.append({
                "team_id":
                context_card_id.replace("H", ""),
                "term": {
                    "value":
                    teamNameGroups.group("value")
                    if not teamNameGroups is None else ""
                },
                "name":
                teamNameGroups.group("name").encode("utf8")
                if not teamNameGroups is None else "",
                "active":
                True if not row[2] == "i" else False
            })

        elif "G" in context_card_id:
            teams.append({
                "context_cards":
                [context_card_id,
                 context_card_id.replace("G", "H")],
                "team_id":
                context_card_id.replace("G", "")
            })

            data = {
                "name": row[0].encode("utf8"),
                "term": {
                    "value": str(year).replace("y", "")
                },
                "active": True if not row[2] == "i" else False
            }

            if len(row) > 4:
                data["group_type"] = "built_in" if row[
                    4] == "groupbuiltin" else "own_group"

            groups.append(data)
        elif "T" in context_card_id:
            teacherGroups = teacherNameProg.match(row[0])

            persons.append({
                "name":
                teacherGroups.group("name")
                if not teacherGroups is None else "",
                "abbrevation":
                teacherGroups.group("abbrevation")
                if not teacherGroups is None else "",
                "teacher_id":
                context_card_id.replace("T", ""),
                "context_cards": [context_card_id],
                "active":
                True if not row[2] == "i" else False,
                "type":
                "teacher"
            })
        elif "S" in context_card_id:
            studentGroups = studentNameProg.match(row[0])

            persons.append({
                "type":
                "student",
                "student_id":
                context_card_id.replace("S", ""),
                "active":
                True if not row[2] == "i" else False,
                "name":
                studentGroups.group("name").encode("utf8")
                if not studentGroups is None else "",
                "class_text":
                studentGroups.group("class_code")
                if not studentGroups is None else "",
                "context_cards": [context_card_id]
            })

    return {
        "status": "ok",
        "persons": persons,
        "teams": teams,
        "team_elements": team_elements,
        "groups": groups
    }


# https://www.lectio.dk/lectio/517/cache/DropDown.aspx?type=favorites&afdeling=4733693427&dt=%2bsvQVm%2fjhkz0rp3DufIc9g%3d%3d4789793695
Exemplo n.º 45
0
def leave ( config, session = False ):
	url = "https://www.lectio.dk/lectio/%s/subnav/fravaerelev.aspx?elevid=%s" % ( str(config["school_id"]), str(config["student_id"]) )

	if session is False:
		session = authenticate.authenticate(config)

	# Insert the session information from the auth function
	cookies = {
		"lecmobile" : "0",
		"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
		"LastLoginUserName" : session["LastLoginUserName"],
		"lectiogsc" : session["lectiogsc"],
		"LectioTicket" : session["LectioTicket"]
	}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("table", attrs={"id" : "s_m_Content_Content_SFTabStudentAbsenceDataTable"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	rows = soup.find("table", attrs={"id" : "s_m_Content_Content_SFTabStudentAbsenceDataTable"}).findAll("tr")
	rows.pop(0)
	rows.pop(0)
	rows.pop(0)
	rows.pop(0)
	rows.pop(len(rows)-1)

	leaveRows = []
	teamIdProg = re.compile(r"\/lectio\/(?P<school_id>.*)\/fravaer_elevhold.aspx\?holdelementid=(?P<team_element_id>.*)")
	leaveProg = re.compile(r"(?P<number>.*)\/(?P<modules>.*)")

	for row in rows:
		elements = row.findAll("td")
		teamIdGroups = teamIdProg.match(elements[0].find("a")["href"])
		periodeGroups = leaveProg.match(elements[2].text.replace(",", "."))
		calculatedGroups = leaveProg.match(elements[4].text.replace(",", "."))
		yearGroups = leaveProg.match(elements[6].text.replace(",", "."))

		writtenPeriodeGroups = leaveProg.match(elements[8].text.replace(",", "."))
		writtenCalculatedGroups = leaveProg.match(elements[10].text.replace(",", "."))
		writtenYearGroups = leaveProg.match(elements[12].text.replace(",", "."))
		data = {
       		"team" : {
       			"name": unicode(elements[0].find("a").text),
       			"team_id" : teamIdGroups.group("team_element_id") if not teamIdGroups is None else ""
       		},
       		"leave" : {
       			"period" : {
	       			"end_date" :  datetime.strptime(soup.find("input", attrs={"id" : "s_m_Content_Content_SFTabPeriodChooserCtrl_end__date_tb"})["value"], "%d/%m-%Y"),
	       			"start_date" :  datetime.strptime(soup.find("input", attrs={"id" : "s_m_Content_Content_SFTabPeriodChooserCtrl_start__date_tb"})["value"], "%d/%m-%Y"),
	       			"percent" : elements[1].text.replace(",", ".").replace("%", ""),
	       			"modules" : periodeGroups.group("modules") if not periodeGroups is None else "",
	       			"leave" : periodeGroups.group("number") if not periodeGroups is None else ""
	       		},
	       		"calculated" : {
	       			"percent" : elements[3].text.replace(",", ".").replace("%", ""),
	       			"modules" : calculatedGroups.group("modules") if not calculatedGroups is None else "",
	       			"leave" : calculatedGroups.group("number") if not calculatedGroups is None else ""
	       		},
	       		"year" : {
	       			"percent" : elements[5].text.replace(",", ".").replace("%", ""),
	       			"modules" : yearGroups.group("modules") if not yearGroups is None else "",
	       			"leave" : yearGroups.group("number") if not yearGroups is None else ""
	       		}
	       	},
	       	"written" : {
	       		"period" : {
	       			"end_date" :  datetime.strptime(soup.find("input", attrs={"id" : "s_m_Content_Content_SFTabPeriodChooserCtrl_end__date_tb"})["value"], "%d/%m-%Y"),
	       			"start_date" :  datetime.strptime(soup.find("input", attrs={"id" : "s_m_Content_Content_SFTabPeriodChooserCtrl_start__date_tb"})["value"], "%d/%m-%Y"),
	       			"percent" : elements[7].text.replace(",", ".").replace("%", ""),
	       			"hours" : writtenPeriodeGroups.group("modules") if not writtenPeriodeGroups is None else "",
	       			"leave" : writtenPeriodeGroups.group("number") if not writtenPeriodeGroups is None else ""
	       		},
	       		"calculated" : {
	       			"percent" : elements[9].text.replace(",", ".").replace("%", ""),
	       			"hours" : writtenCalculatedGroups.group("modules") if not writtenCalculatedGroups is None else "",
	       			"leave" : writtenCalculatedGroups.group("number") if not writtenCalculatedGroups is None else ""
	       		},
	       		"year" : {
	       			"percent" : elements[11].text.replace(",", ".").replace("%", ""),
	       			"hours" : writtenYearGroups.group("modules") if not writtenYearGroups is None else "",
	       			"leave" : writtenYearGroups.group("number") if not writtenYearGroups is None else ""
	       		}
	       	}
		}

		leaveRows.append(data)

	return {
		"status" : "ok",
		"leave" : leaveRows,
		"term" : {
       		"value" : soup.find("select", attrs={"id" : "s_m_ChooseTerm_term"}).select('option[selected="selected"]')[0]["value"],
        	"years_string" : soup.find("select", attrs={"id" : "s_m_ChooseTerm_term"}).select('option[selected="selected"]')[0].text,
        	"start_date" :  datetime.strptime(soup.find("input", attrs={"id" : "s_m_Content_Content_SFTabPeriodChooserCtrl_start__date_tb"})["value"], "%d/%m-%Y")
   		},
	}
Exemplo n.º 46
0
def document ( config, session = False ):
	url = "https://www.lectio.dk/lectio/%s/dokumentrediger.aspx?dokumentid=%s" % ( str(config["school_id"]), str(config["document_id"]) )

	if session is False:
		session = authenticate.authenticate(config)

	if session == False:
		return {"status" : "error", "type" : "authenticate"}

	cookies = {
		"lecmobile" : "0",
		"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
		"LastLoginUserName" : session["LastLoginUserName"],
		"lectiogsc" : session["lectiogsc"],
		"LectioTicket" : session["LectioTicket"]
	}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("div", attrs={"id" : "m_Content_Dokument_pa"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	offset = 0

	elements = soup.find("div", attrs={"id" : "m_Content_Dokument_pa"}).findAll("td")

	if len(elements) < 7:
		offset = 1

	creator = context_card.user({
		"context_card_id" : elements[3-offset].find("span")["lectiocontextcard"],
		"school_id" : config["school_id"]
	}, session)["user"]

	changer = elements[4-offset].find("span")["lectiocontextcard"]
	elements[4-offset].find("span").decompose()
	dateText = elements[4-offset].text.replace(" af ", "").strip()
	dateTimeProg = re.compile(r"(?P<day>.*)/(?P<month>.*)-(?P<year>.*) (?P<hour>.*):(?P<minute>.*)")
	dateGroups = dateTimeProg.match(dateText)
	date = datetime.strptime("%s/%s-%s %s:%s" % (functions.zeroPadding(dateGroups.group("day")), functions.zeroPadding(dateGroups.group("month")), dateGroups.group("year"), dateGroups.group("hour"), dateGroups.group("minute")), "%d/%m-%Y %H:%M") if not dateGroups is None else ""

	connectionRows = soup.find("table", attrs={"id" : "m_Content_AffiliationsGV"}).findAll("tr")
	connectionRows.pop(0)

	connections = []

	for row in connectionRows:
		rowElements = row.findAll("td")

		data = {
			"context_card_id" : rowElements[0]["lectiocontextcard"],
			"type" : "team" if "H" in rowElements[0]["lectiocontextcard"] else "teacher" if "T" in rowElements[0]["lectiocontextcard"] else "student",
			"name" : unicode(rowElements[0].find("span").text),
			"can_edit" : True if "checked" in rowElements[1].find("input").attrs else False
		}

		if rowElements[2].find("select"):
			folder_id = rowElements[2].find("select").select('option[selected="selected"]')[0]["value"]
			data["folder_id"] = folder_id

		connections.append(data)

	document = {
		"name" : unicode(elements[0].find("a").text).replace("\t", "").replace("\n", "").replace("\r", "").strip(),
		"extension" : os.path.splitext(elements[0].find("a").text.replace("\t", "").replace("\n", "").replace("\r", "").strip())[1].replace(".", ""),
		"size" : elements[2-offset].text.replace(",", ".").replace("\t", "").replace("\n", "").replace("\r", "").strip(),
		"document_id" : str(config["document_id"]),
		"creator" : creator,
		"changer" : {
			"context_card_id" : changer,
			"type" : "teacher" if "T" in changer else "student",
			"date" : date
		},
		"comment" : soup.find("textarea", attrs={"id" : "m_Content_EditDocComments_tb"}).text.replace("\r\n",""),
		"public" : True if "checked" in soup.find("input", attrs={"id" : "m_Content_EditDocIsPublic"}).attrs else False,
		"connections" : connections,
		"term" : {
			"value" : soup.find("select", attrs={"id" : "m_ChooseTerm_term"}).select('option[selected="selected"]')[0]["value"],
			"years_string" : soup.find("select", attrs={"id" : "m_ChooseTerm_term"}).select('option[selected="selected"]')[0].text
		}
	}

	return {
		"status" : "ok",
		"document" : document
	}
def timetable( config, url, week, year, session = False ):
	cookies = {}
	# Sorting settings
	settings = {

	}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("table", attrs={"id" : "s_m_Content_Content_SkemaNyMedNavigation_skema_skematabel"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	# Fetch all rows in the table
	rows = soup.find("table", attrs={"id" : "s_m_Content_Content_SkemaNyMedNavigation_skema_skematabel"}).findAll("tr")

	# Fetch module info, to make it possible to draw a complete timetable
	moduleInfo = []
	moduleInfoProg = re.compile(r"(?P<module_number>.*)\. (?P<start_time>.*) - (?P<end_time>.*)")

	for row in soup.findAll("div", attrs={"class" : "s2module-info"}):
		moduleInfoGroups = moduleInfoProg.match(row.text.strip().replace("modul", ""))
		if not moduleInfoGroups is None:
			start = moduleInfoGroups.group("start_time")
			if len(start) < 5:
				start = "0" + start

			end = moduleInfoGroups.group("end_time")
			if len(end) < 5:
				end = "0" + end
			moduleInfo.append({
				"module" : moduleInfoGroups.group("module_number"),
				"start" : start,
				"end" : end
			})

	# Fetch the general information celss
	generalInformationDays = rows[2].findAll("td")
	generalInformation = []

	holidayElements = []

	# Loop through all the cells, and look for information
	index = 0
	for tdRow in generalInformationDays:
		index = index+1
		if index > 1:
			row = tdRow.findAll("a")

			dayOfWeek = index-1

			if dayOfWeek == 7:
				dayOfWeek = 0

			# Loop over the link elements, in the cell
			if not row == None and len(row) > 0:
				for element in row:

					# The time module uses "0" as the first week of the year
					if int(week) == 1:
						timeWeek = 0
					else:
						# Subtract one, because 0 is the first week
						timeWeek = int(week)-1

					date = time.strptime("%s %s %s" % (str(dayOfWeek),str(timeWeek), str(year)),"%w %W %Y")
					content = element.find("div", attrs={"class" : "s2skemabrikcontent"}).findAll("span")[1]
					div = element.find("div", attrs={"class" : "s2skemabrikcontent"})

					href = None
					# If the a tag has a href, fetch it
					try:
						href = element["href"]
					except BaseException:
						pass

					if href == None:
						generalInformation.append({
							"message" : unicode(content.text),
							"date" : datetime.fromtimestamp(mktime(date)),
							"school_id" : str(config["school_id"]),
							"branch_id" : str(config["branch_id"]),
							"term" : soup.find("select", attrs={"id" : "s_m_ChooseTerm_term"}).select('option[selected="selected"]')[0]["value"],
							"week" : week,
							"year" : year
						})
					else:
						# Compile the regular expression
						prog = re.compile(r"\/lectio\/(?P<school_id>[0-9]*)\/aktivitet\/aktivitetinfo.aspx\?id=(?P<activity_id>[0-9]*)&(?P<prev_url>.*)")
						activityGroups = prog.match(element["href"])
						generalInformation.append({
							"message" : unicode(content.text),
							"activity_id" : activityGroups.group("activity_id"),
							"status" : "changed" if "s2changed" in div["class"] else "cancelled" if "s2cancelled" in div["class"] else "normal",
							"date" : datetime.fromtimestamp(mktime(date)),
							"school_id" : str(config["school_id"]),
							"branch_id" : str(config["branch_id"]),
							"term" : soup.find("select", attrs={"id" : "s_m_ChooseTerm_term"}).select('option[selected="selected"]')[0]["value"],
							"week" : week,
							"year" : year
						})

	# Find all the day elements
	timeElements = []


	headers = []

	headerRows = rows[1].findAll("td")
	headerRows.pop(0)
	headerProg = re.compile(ur"(?P<day_name>.*) \((?P<day>.*)\/(?P<month>.*)\)")

	for row in headerRows:
		headerGroups = headerProg.match(row.text)
		headerYear = year

		if not headerGroups is None:
			if int(week) == 1 and int(headerGroups.group("month")) == 12:
				headerYear = str(int(year) - 1)

			headers.append({
				"day" : headerGroups.group("day_name"),
				"date" : datetime.strptime("%s-%s-%s %s" % (functions.zeroPadding(headerGroups.group("day")), functions.zeroPadding(headerGroups.group("month")), headerYear, "12:00"), "%d-%m-%Y %H:%M")
			})

	dayElements = rows[3].findAll("td")
	dayElements.pop(0)

	# Loop over the days
	index = 0
	dayOfWeek = 1
	for dayElement in dayElements:
		# Increment the day
		index = index+1

		dayOfWeek = index

		if dayOfWeek == 7:
			dayOfWeek = 0

		# The time module uses "0" as the first week of the year
		if int(week) == 1:
			timeWeek = 0
		else:
			# Subtract one, because 0 is the first week
			timeWeek = int(week)-1

		# Find all the "a" tags, representing timetable elements
		timetableElements = dayElement.findAll("a")

		moduleIndex = 1

		for checkElement in dayElement.findAll(attrs={"class" : "s2module-bg"}):
			if "s2time-off" in checkElement["class"]:
				# Get time from module info elements
				holidayElements.append({
					"start" : datetime.strptime("%s-%s-%s %s" % (headers[index-1]["date"].strftime("%d"), headers[index-1]["date"].strftime("%m"), headers[index-1]["date"].strftime("%Y"), moduleInfo[moduleIndex-1]["start"]), "%d-%m-%Y %H:%M"),
					"end" : datetime.strptime("%s-%s-%s %s" % (headers[index-1]["date"].strftime("%d"), headers[index-1]["date"].strftime("%m"), headers[index-1]["date"].strftime("%Y"), moduleInfo[moduleIndex-1]["end"]), "%d-%m-%Y %H:%M")
				})
			moduleIndex = moduleIndex + 1

		# Loop over the timetable elements
		for timetableElement in timetableElements:

			#The type of the event, "private" or "school"
			type = None

			# Locate the different types of information in the url, and find the different RegEx groups
			expressions = [
				{"type" : "private", "expression" : r"\/lectio\/(?P<school_id>[0-9]*)\/privat_aftale.aspx\?aftaleid=(?P<activity_id>[0-9]*)"},
				{"type" : "school",  "expression" : r"\/lectio\/(?P<school_id>[0-9]*)\/aktivitet\/aktivitetinfo.aspx\?id=(?P<activity_id>[0-9]*)&(?P<prev_url>.*)"},
				{"type" : "outgoing_censor", "expression" : r"\/lectio\/(?P<school_id>.*)\/proevehold.aspx\?type=udgcensur&outboundCensorID=(?P<outbound_censor_id>.*)&prevurl=(?P<prev_url>.*)"},
				{"type" : "exam", "expression" : r"\/lectio\/(?P<school_id>.*)\/proevehold.aspx\?type=proevehold&ProeveholdId=(?P<test_team_id>.*)&prevurl=(?P<prev_url>.*)"}
			]

			# Loop over the expressions
			groups = []
			type = "other"
			for expressionObject in expressions:
				prog = re.compile(expressionObject["expression"])
				if prog.match(timetableElement["href"]):
					groups = prog.match(timetableElement["href"])
					type = expressionObject["type"]

			# Locate the status div
			div = timetableElement.find("div", attrs={"class" : "s2skemabrikcontent"})

			# A list of the teachers
			teachers = []

			# A list of the assigned teams
			teams = []

			# Find all the info span elements
			infoSpanObjects = timetableElement.findAll("span")

			# Loop over the Info spans
			for span in infoSpanObjects:
				id = None

				# Test if property exists
				try:
					id = span["lectiocontextcard"]
				except BaseException:
					pass

				if not id == None:
					 # Team
					if span["lectiocontextcard"][0] == "H":
						# Append the team
						teams.append({
							"context_card_id" : span["lectiocontextcard"],
							"title" : unicode(span.text),
							"team_id" : span["lectiocontextcard"].replace("HE", "")
						})
					# Teacher
					elif span["lectiocontextcard"][0] == "T":
						teachers.append({
							"abbrevation" : unicode(span.text),
							"context_card_id" : span["lectiocontextcard"],
							"teacher_id" : span["lectiocontextcard"].replace("T", "")
						})

			# Get the titletext where to extract start and end times from
			title = timetableElement["title"]

			# Match the title, to extract the start and end time
			timeProg = re.compile(r"(?P<start_hour>[0-9]*):(?P<start_minute>[0-9]*) til (?P<end_hour>[0-9]*):(?P<end_minute>[0-9]*)")
			timeGroups = timeProg.search(unicode(title).encode("utf8"), re.MULTILINE)

			# Get the "main sections" separated by a double return \n\n
			mainSections = title.split("\n\n")

			# Grab the top section and split it by a single return \n
			topSection = mainSections[0].split("\n")

			# Initialize variables, assume that nothing is cancelled or changed
			isChangedOrCancelled = 0
			isCancelled = False
			isChanged = False

			# If the first item in the top section doesn't contain 'til',
			# it must be either cancelled or changed

			if not "til" in topSection[0]:
				isChangedOrCancelled = 1

				# If it says 'Aflyst!'
				if "Aflyst!" in topSection[0]:
					# It must be cancelled
					isCancelled = True
				else:
					# Otherwise it must be changed
					isChanged = True

			if not timeGroups is None:
				startTime = datetime.fromtimestamp(mktime(time.strptime("%s %s %s %s %s" % (timeGroups.group("start_hour"),timeGroups.group("start_minute"), dayOfWeek , timeWeek, year),"%H %M %w %W %Y")))
				endTime = datetime.fromtimestamp(mktime(time.strptime("%s %s %s %s %s" % (timeGroups.group("end_hour"),timeGroups.group("end_minute"), dayOfWeek , timeWeek, year),"%H %M %w %W %Y")))
			else:
				# Grab the date sections, fx: "15/5-2013 15:30 til 17:00"
				dateSections = topSection[0+isChangedOrCancelled].split(" ")

				# Grab the date, being the first (0) section
				if len(dateSections) == 4:
					startDateSection = dateSections[0]
					endDateSection = dateSections[0]

					startTimeSection = dateSections[1]
					endTimeSection = dateSections[3]
				else:
					startDateSection = dateSections[0]
					endDateSection = dateSections[3]

					startTimeSection = dateSections[1]
					endTimeSection = dateSections[4]

				currentTimezone = timezone("Europe/Copenhagen")

				alternativeDayProg = re.compile(r"(?P<day>[0-9]*)/(?P<month>[0-9]*)-(?P<year>[0-9]*)")
				alternativeStartDayGroups = alternativeDayProg.match(startDateSection.strip())
				alternativeEndDayGroups = alternativeDayProg.match(endDateSection.strip())

				startTime = datetime.strptime("%s/%s-%s %s" % (functions.zeroPadding(alternativeStartDayGroups.group("day")), functions.zeroPadding(alternativeStartDayGroups.group("month")), alternativeStartDayGroups.group("year"), startTimeSection.strip()), "%d/%m-%Y %H:%M")
				endTime = datetime.strptime("%s/%s-%s %s" % (functions.zeroPadding(alternativeEndDayGroups.group("day")), functions.zeroPadding(alternativeEndDayGroups.group("month")), alternativeEndDayGroups.group("year"), endTimeSection.strip()), "%d/%m-%Y %H:%M")

			roomText = ""
			try:
				if not "rer:" in topSection[3 + isChangedOrCancelled]:
					room = topSection[3 + isChangedOrCancelled].strip("Lokale: ").encode('utf-8').replace("r: ","")
			except IndexError:
				pass

			if sameDay(startTime, dayOfWeek, timeWeek, year):
				if type == "private":
					timeElements.append({
						"text" : unicode(timetableElement.text),
						"activity_id" : groups.group("activity_id"),
						"startTime" : startTime,
						"endTime" : endTime,
						"type" : type,
						"school_id" : groups.group("school_id")
					})
				elif type == "outgoing_censor":
					timeElements.append({
						"text" : unicode(timetableElement.text),
						"outbound_censor_id" : groups.group("outbound_censor_id"),
						"startTime" : startTime,
						"endTime" : endTime,
						"type" : type,
						"school_id" : groups.group("school_id")
					})
				elif type == "exam":
					timeElements.append({
						"text" : unicode(timetableElement.text),
						"test_team_id" : groups.group("test_team_id"),
						"startTime" : startTime,
						"endTime" : endTime,
						"type" : type,
						"school_id" : groups.group("school_id")
					})
				elif type == "school":
					# Add to the list
					timeElements.append({
						"text" : unicode(timetableElement.text),
						"activity_id" : groups.group("activity_id"),
						"status" : "changed" if "s2changed" in div["class"] else "cancelled" if "s2cancelled" in div["class"] else "normal",
						"teachers" : teachers,
						"teams" : teams,
						"startTime" : startTime,
						"endTime" : endTime,
						"type" : type,
						"location_text" : unicode(div.text),
						"room_text" : unicode(roomText),
						"school_id" : groups.group("school_id")
					})

	return {
		"status" : "ok",
		"timetable" : timeElements,
		"information" : generalInformation,
		"module_info" : moduleInfo,
		"headers" : headers,
		"term" : {
			"value" : soup.find("select", attrs={"id" : "s_m_ChooseTerm_term"}).select('option[selected="selected"]')[0]["value"],
			"years_string" : soup.find("select", attrs={"id" : "s_m_ChooseTerm_term"}).select('option[selected="selected"]')[0].text
		}
	}
Exemplo n.º 48
0
def templates ( config, session = False ):
	url = "https://www.lectio.dk/lectio/%s/spoergeskema/skabeloner.aspx?elevid=%s" % ( str(config["school_id"]), str(config["student_id"]) )

	if session is False:
		session = authenticate.authenticate(config)

	if session == False:
		return {"status" : "error", "type" : "authenticate"}
	# Insert the session information from the auth function
	cookies = {
		"lecmobile" : "0",
		"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
		"LastLoginUserName" : session["LastLoginUserName"],
		"lectiogsc" : session["lectiogsc"],
		"LectioTicket" : session["LectioTicket"]
	}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("div", attrs={"id" : "s_m_Content_Content_createQueryIsland_pa"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	templates = []

	ownSchoolProg = re.compile(r"\/lectio\/(?P<school_id>.*)\/spoergeskema_besvar.aspx\?mode=display&id=(?P<survey_id>.*)&prevurl=(?P<prev_url>.*)")

	if not soup.find("div", attrs={"id" : "s_m_Content_Content_createQueryIsland_pa"}).find(attrs={"class" : "noRecord"}):
		for row in soup.find("div", attrs={"id" : "s_m_Content_Content_createQueryIsland_pa"}).findAll("tr")[1:]:
			idGroups = ownSchoolProg.match(row.find("a")["href"])
			templates.append({
				"school_id" : str(config["school_id"]),
				"branch_id" : str(config["school_id"]),
				"title" : row.find("a").text.encode("utf8"),
				"survey_id" : idGroups.group("survey_id") if not idGroups is None else "",
				"template" : True
			})

	if not soup.find("div", attrs={"id" : "s_m_Content_Content_LectioDetailIsland1_pa"}).find(attrs={"class" : "noRecord"}):
		for row in soup.find("div", attrs={"id" : "s_m_Content_Content_LectioDetailIsland1_pa"}).findAll("tr")[1:]:
			idGroups = ownSchoolProg.match(row.find("a")["href"])
			elements = row.findAll("td")
			templates.append({
				"title" : row.find("a").text.encode("utf8"),
				"survey_id" : idGroups.group("survey_id") if not idGroups is None else "",
				"school_name" : elements[1].text.encode("utf8"),
				"owner_name" : elements[2].text.encode("utf8"),
				"template" : True
		})

	return {
		"status" : "ok",
		"templates" : templates
	}
Exemplo n.º 49
0
def assignments(config, session=False):
    if session == False:
        session = authenticate.authenticate(config)

    if session == False:
        return {"status": "error", "type": "authenticate"}
    else:
        url = urls.assigment_list.replace("{{SCHOOL_ID}}",
                                          str(config["school_id"])).replace(
                                              "{{STUDENT_ID}}",
                                              str(config["student_id"]))

        # Insert the session information from the auth function
        cookies = {
            "lecmobile": "0",
            "ASP.NET_SessionId": session["ASP.NET_SessionId"],
            "LastLoginUserName": session["LastLoginUserName"],
            "lectiogsc": session["lectiogsc"],
            "LectioTicket": session["LectioTicket"]
        }

        # Insert User-agent headers and the cookie information
        headers = {
            "User-Agent":
            "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
            "Content-Type": "application/x-www-form-urlencoded",
            "Host": "www.lectio.dk",
            "Origin": "https://www.lectio.dk",
            "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
        }

        validationRequest = response = proxy.session.get(url, headers=headers)

        html = response.text

        soup = Soup(html)

        firstViewState = urllib.urlencode(
            {"__VIEWSTATEX": soup.find(id="__VIEWSTATEX")["value"]})

        firstEventValidationText = soup.find(id="aspnetForm").find(
            id="__EVENTVALIDATION")["value"]

        firstEventValidation = urllib.urlencode(
            {"__EVENTVALIDATION": firstEventValidationText})

        firstResponse = proxy.session.post(
            url,
            data=
            '__EVENTTARGET=s%24m%24Content%24Content%24CurrentExerciseFilterCB&__EVENTARGUMENT=&__LASTFOCUS='
            + firstEventValidation + "&" + firstViewState +
            "&time=0&__EVENTARGUMENT=&__VIEWSTATE=",
            headers=headers)

        html = firstResponse.text

        soup = Soup(html)

        viewState = urllib.urlencode(
            {"__VIEWSTATEX": soup.find(id="__VIEWSTATEX")["value"]})

        eventValidationText = soup.find(id="aspnetForm").find(
            id="__EVENTVALIDATION")["value"]

        eventValidation = urllib.urlencode(
            {"__EVENTVALIDATION": eventValidationText})

        response = proxy.session.post(
            url,
            data=
            '__EVENTTARGET=s%24m%24Content%24Content%24ShowThisTermOnlyCB&__EVENTARGUMENT=&__LASTFOCUS='
            + eventValidation + "&" + viewState +
            "&time=0&__EVENTARGUMENT=&__VIEWSTATE=",
            headers=headers)

        html = response.text

        soup = Soup(html)

        if soup.find("table", attrs={"id": "s_m_Content_Content_ExerciseGV"
                                     }) is None:
            return {"status": False, "error": "Data not found"}

        # Extract table cells
        tableRows = soup.find("table",
                              attrs={
                                  "id": "s_m_Content_Content_ExerciseGV"
                              }).findAll("tr")

        # Remove the header cell
        del tableRows[0]

        assignmentsList = []

        for row in tableRows:
            cells = row.findAll("td")

            s = re.search('([0-9]*)\/([0-9]*)-([0-9]*) ([0-9]*):([0-9]*)',
                          cells[3].text)
            date = functions.zeroPadding(
                s.group(1)) + "/" + functions.zeroPadding(
                    s.group(2)) + "-" + s.group(3) + " " + s.group(
                        4) + ":" + s.group(5)
            object = {}
            try:
                object["week"] = cells[0].find("span").text
            except BaseException:
                object["week"] = ""
            try:
                object["group"] = cells[1].find("span").text.encode("utf8")
            except BaseException:
                object["group"] = ""
            try:
                object["title"] = cells[2].find("a").text.encode("utf8")
            except BaseException:
                object["title"] = ""
            try:
                object["context_card_id"] = cells[1].find(
                    "span")["lectiocontextcard"]
                object["team_element_id"] = cells[1].find(
                    "span")["lectiocontextcard"].replace("HE", "")
            except BaseException:
                object["context_card_id"] = ""
                object["team_element_id"] = ""
            try:
                prog = re.compile(
                    r"\/lectio\/(?P<school_id>.*)\/ElevAflevering.aspx\?elevid=(?P<student_id>.*)&exerciseid=(?P<exercise_id>.*)&(?P<the_other>.*)"
                )
                urlGroups = prog.match(cells[2].find("a")["href"])
                object["exercise_id"] = urlGroups.group("exercise_id")
            except BaseException:
                object["exercise_id"] = ""
            try:
                object["link"] = cells[2].find("a")["href"]
            except BaseException:
                object["link"] = ""
            try:
                object["date"] = datetime.strptime(date, "%d/%m-%Y %H:%S")
            except BaseException:
                object["date"] = datetime.strptime("1/1-1977 00:01",
                                                   "%d/%m-%Y %H:%S")
            try:
                object["hours"] = float(cells[4].find("span").text.replace(
                    ",", ".").strip())
            except BaseException:
                object["hours"] = ""
            try:
                status = unicode(cells[5].find("span").text)
                object[
                    "status"] = "handed" if status == "Afleveret" else "missing" if status == "Mangler" else "waiting"
            except BaseException:
                object["status"] = ""
            try:
                object["leave"] = int(cells[6].text.replace(",", ".").replace(
                    "%", "").strip())
            except BaseException:
                object["leave"] = ""
            try:
                waiting_for = unicode(cells[7].find("span").text)
                object[
                    "waiting_for"] = "student" if waiting_for == "Elev" else "teacher"
            except BaseException:
                object["waiting_for"] = ""
            try:
                object["note"] = cells[8].text.encode("utf8")
            except BaseException:
                object["note"] = ""
            try:
                object["grade"] = cells[9].text.encode("utf8")
            except BaseException:
                object["grade"] = ""
            try:
                object["student_note"] = cells[10].text.encode("utf8")
            except BaseException:
                object["student_note"] = ""

            assignmentsList.append(object)

        return {
            "list": assignmentsList,
            "status": "ok",
            "term": {
                "value":
                soup.find("select", attrs={
                    "id": "s_m_ChooseTerm_term"
                }).select('option[selected="selected"]')[0]["value"],
                "years_string":
                soup.find("select", attrs={
                    "id": "s_m_ChooseTerm_term"
                }).select('option[selected="selected"]')[0].text
            }
        }
Exemplo n.º 50
0
def documents ( config, session = False ):
	if session is False:
		session = authenticate.authenticate(config)

	if session == False:
		return {"status" : "error", "type" : "authenticate"}

	cookies = {
		"lecmobile" : "0",
		"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
		"LastLoginUserName" : session["LastLoginUserName"],
		"lectiogsc" : session["lectiogsc"],
		"LectioTicket" : session["LectioTicket"]
	}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	url = "https://www.lectio.dk/lectio/%s/DokumentOversigt.aspx?elevid=%s&folderid=%s" %( str(config["school_id"]), str(config["student_id"]), str(config["folder_id"]) )

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("table", attrs={"id" : "s_m_Content_Content_DocumentGridView_ctl00"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	rows = soup.find("table", attrs={"id" : "s_m_Content_Content_DocumentGridView_ctl00"}).findAll("tr")
	rows.pop(0)

	shortDayTimeProg = re.compile(r"(?P<day_name>.*) (?P<hour>.*):(?P<minute>.*)")
	timeProg = re.compile(r"(?P<hour>.*):(?P<minute>.*)") # Current day, month, year
	dayProg = re.compile(r"(?P<day_name>.*) (?P<day>.*)/(?P<month>.*)") # Current year
	dateProg = re.compile(r"(?P<day>.*)/(?P<month>.*)-(?P<year>.*)")

	dayConversion = {
		u"Ma" : "Mon",
		u"Ti" : "Tue",
		u"On" : "Wed",
		u"To" : "Thu",
		u"Fr" : "Fri",
		u"Lø" : "Sat",
		u"Sø" : "Son"
	}

	documents = []
	documentIdProg = re.compile(r"\/lectio\/(?P<school_id>.*)\/dokumenthent.aspx\?documentid=(?P<document_id>.*)")

	for row in rows:
		elements = row.findAll("td")
		idGroups = documentIdProg.match(elements[1].find("a")["href"])

		changer = context_card.user({
			"context_card_id" : elements[3]["lectiocontextcard"],
			"school_id" : config["school_id"]
		}, session)

		if shortDayTimeProg.match(elements[4].text):
			timeGroups = shortDayTimeProg.match(elements[4].text)
			date = datetime.strptime("%s/%s-%s %s:%s" % (dayConversion[unicode(timeGroups.group("day_name").capitalize())], today.strftime("%W"), today.strftime("%Y"), timeGroups.group("hour"), timeGroups.group("minute")), "%a/%W-%Y %H:%M")
		elif timeProg.match(elements[4].text):
			timeGroups = timeProg.match(elements[4].text)
			date = datetime.strptime("%s/%s-%s %s:%s" % (today.strftime("%d"), today.strftime("%m"), today.strftime("%Y"), timeGroups.group("hour"), timeGroups.group("minute")), "%d/%m-%Y %H:%M")
		elif dayProg.match(elements[4].text):
			dayGroups = dayProg.match(elements[4].text)
			date = datetime.strptime("%s/%s-%s %s:%s" % (dayGroups.group("day"), dayGroups.group("month"), today.strftime("%Y"), "12", "00"), "%d/%m-%Y %H:%M")
		elif dateProg.match(elements[4].text):
			dateGroups = dateProg.match(elements[4].text)
			date = datetime.strptime("%s/%s-%s %s:%s" % (dateGroups.group("day"), dateGroups.group("month"), dateGroups.group("year"), "12", "00"), "%d/%m-%Y %H:%M")

		data = {
			"folder_id" : str(config["folder_id"]),
			"name" : unicode(elements[1].find("span")["title"].replace("Fulde filnavn: ", "")),
			"extension" : os.path.splitext(elements[1].find("span")["title"].replace("Fulde filnavn: ", ""))[1].replace(".", ""),
			"comment" : unicode(elements[2].find("span").text),
			"document_id" : idGroups.group("document_id") if not idGroups is None else "",
			"size" : elements[5].text.replace(",", "."),
			"date" : date,
			"user" : changer["user"]
		}

		documents.append(data)

	return {
		"status" : "ok",
		"documents" : documents
	}
Exemplo n.º 51
0
def description ( config, session = False ):
	url = "https://www.lectio.dk/lectio/%s/studieplan/hold_undervisningsbeskrivelse.aspx?holdelementid=%s" % ( str(config["school_id"]), str(config["team_element_id"]) )

	if session is False:
		session = authenticate.authenticate(config)

	if session == False:
		return {"status" : "error", "type" : "authenticate"}

	cookies = {
		"lecmobile" : "0",
		"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
		"LastLoginUserName" : session["LastLoginUserName"],
		"lectiogsc" : session["lectiogsc"],
		"LectioTicket" : session["LectioTicket"]
	}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("div", attrs={"id" : "s_m_Content_Content_holduvb_UvbHoldRepeater_ctl00_uvbCtrl_uvbcontainer"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	tables = soup.find("div", attrs={"id" : "s_m_Content_Content_holduvb_UvbHoldRepeater_ctl00_uvbCtrl_uvbcontainer"}).findAll("table", attrs={"class" : "list"})

	informationElements = tables[0].findAll("td")
	subjectProg = re.compile(r"(?P<subject>.*) (?P<level>.*)$")
	teachers = []
	teamProg = re.compile(r"(?P<team>.*) \((?P<teams>.*)\)")
	teamGroups = teamProg.match(informationElements[9].text.replace("\n", ""))
	teams = []

	if not teamGroups is None:
		teams = teamGroups.group("teams").replace("\n", "").split(", ")

	for row in informationElements[7].findAll("span"):
		teachers.append(unicode(row.text))

	subjectGroups = subjectProg.match(informationElements[5].text.replace("\n", ""))
	terms = []

	termProg = re.compile(r"(?P<value>.*)\/(?P<end>.*)")

	for x in informationElements[1].text.replace("\n", "").split(" - "):
		termGroups = termProg.match(x)

		terms.append({
			"value" : termGroups.group("value") if not termGroups is None else "",
			"years_string" : x
		})

	information = {
		"teachers" : teachers,
		"terms" : terms,
		"teams" : teams,
		"team_name" : teamGroups.group("team") if not teamGroups is None else "",
		"subject" : {
			"name" : subjectGroups.group("subject").encode("utf8") if not subjectGroups is None else informationElements[5].text.encode("utf8"),
			"level" : subjectGroups.group("level") if not subjectGroups is None else ""
		},
		"institution" : informationElements[3].text.replace("\n", "").encode("utf8"),
	}

	tables.pop(0)
	tables.pop(0)

	phases = []
	phaseIdProg = re.compile(r"\/lectio\/(?P<school_id>.*)\/studieplan\/forloeb_vis.aspx\?phaseid=(?P<phase_id>.*)&prevurl=(?P<prev_url>.*)")
	coversProg = re.compile(ur"Anvendt modullængden (?P<length>.*) (?P<type>.*)\. fra skoleåret (?P<term>.*)")

	for table in tables:
		if not table is None:
			rows = table.findAll("tr", recursive=False)
			elements = []

			for row in rows:
				elements = elements + row.findAll("td", recursive=False)

			if not elements[1].find("a") is None:
				phaseIdGroups = phaseIdProg.match(elements[1].find("a")["href"])
				reachSpans = elements[5].findAll("span")
				title = reachSpans[2]["title"] if "title" in reachSpans[2] else reachSpans[2].text
				coversGroups = coversProg.match(title)
				focusPoints = []
				focusRows = []
				if not elements[7].find("ul") is None:
					focusRows = elements[7].find("ul").findAll("li", recursive=False)
				descriptionText = elements[1].find("span").text

				if len(focusRows) > 0:
					for row in focusRows:
						header = unicode(row.text)
						focusPointElements = []
						if row.find_next().name == "ul":
							for focusElement in row.find_next().findAll("li"):
								focusPointElements.append(unicode(focusElement.text))

						focusPoints.append({
							"header" : header,
							"elements" : focusPointElements
						})

				work_methods = []

				for row in elements[9].findAll("li"):
					work_methods.append(unicode(row.text.replace("\t", "").replace("\n", "").replace("\r", "")))

				readings = []
				if not elements[3].find("span").find("i") is None:
					elements[3].find("span").find("i").decompose()
					for row in elements[3].find("span").findAll("br"):
						text = unicode(row.find_next(text=True).string).encode("utf8")
						readings.append({"text" : text})

				elements[3].find("span").decompose()
				links = []

				for link in elements[3].findAll("a"):
					links.append({
						"href" : link["href"],
						"text" : unicode(link.find_next(text=True).find_next(text=True)[3:].replace("\t", "").replace("\r\n", ""))
					})
					link.find_next(text=True).find_next(text=True).extract()
					link.decompose()

				written = []

				if not elements[3].find("table") is None:
					writtenRows = elements[3].findAll("tr")
					writtenRows.pop(0)

					for row in writtenRows:
						writtenRowElements = row.findAll("td")
						written.append({
							"title" : writtenRowElements[0].text.replace("\r\n", "").replace("\t", ""),
							"date" : datetime.strptime(writtenRowElements[1].text.replace("\r\n", "").replace("\t", "").strip(), "%d-%m-%Y")
						})

					elements[3].find("table").decompose()

				for x in elements[3].findAll("i"):
					x.decompose()

				documents = []

				for row in elements[3].findAll(text=True):
					if len(row) > 1:
						documents.append({
							"name" : row.strip().replace("\r\n", "").replace("\t", "")
						})

				phases.append({
					"reach" : {
						"covers" : {
							"length" : "unknown" if reachSpans[1].text == "Ikke angivet" else reachSpans[1].text.replace(" moduler", ""),
							"type" : "modules"
						},
						"details" : {
							"length" : coversGroups.group("length") if not coversGroups is None else "45",
							"type" : coversGroups.group("type") if not coversGroups is None else "min",
							"term" : "20" + coversGroups.group("term") if not coversGroups is None else soup.find("select", attrs={"id" : "s_m_ChooseTerm_term"}).select('option[selected="selected"]')[0].text
						}
					},
					"estimate" : {
						"type" : "modules",
						"length" : "unknown" if reachSpans[0].text == "Ikke angivet" else reachSpans[0].text.replace(",", ".").replace(" moduler", "").strip(),
					},
					"methods" : work_methods,
					"name" : elements[1].find("a").text.encode("utf8"),
					"phase_id" : phaseIdGroups.group("phase_id") if not phaseIdGroups is None else "",
					"focus_points" : focusPoints,
					"readings" : readings,
					"links" : links,
					"documents" : documents,
					"written" : written,
					"description" : descriptionText,
					"title" : elements[1].find("a").text.encode("utf8")
				})

	return {
		"status" : "ok",
		"information" : information,
		"phases" : phases,
		"term" : {
			"value" : soup.find("select", attrs={"id" : "s_m_ChooseTerm_term"}).select('option[selected="selected"]')[0]["value"],
			"years_string" : soup.find("select", attrs={"id" : "s_m_ChooseTerm_term"}).select('option[selected="selected"]')[0].text
		}
	}
Exemplo n.º 52
0
def class_members ( config, session = False ):
	if session == False:
		url = "https://www.lectio.dk/lectio/%s/subnav/members.aspx?klasseid=%s&showteachers=1&showstudents=1&reporttype=std" % ( str(config["school_id"]), str(config["class_id"]) )
		cookies = {}
	else:
		if session == True:
			session = authenticate.authenticate(config)

		url = "https://www.lectio.dk/lectio/%s/subnav/members.aspx?klasseid=%s&showteachers=1&showstudents=1" % ( str(config["school_id"]), str(config["class_id"]) )
		# Insert the session information from the auth function
		cookies = {
			"lecmobile" : "0",
			"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
			"LastLoginUserName" : session["LastLoginUserName"],
			"lectiogsc" : session["lectiogsc"],
			"LectioTicket" : session["LectioTicket"]
		}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("table", attrs={"id" : "s_m_Content_Content_laerereleverpanel_alm_gv"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	rows = soup.find("table", attrs={"id" : "s_m_Content_Content_laerereleverpanel_alm_gv"}).findAll("tr")
	headers = rows[0].findAll("th")
	rows.pop(0)

	teachers = []
	students = []
	pictureOffset = 1 if len(headers) == 7 else 0
	pictureProg = re.compile(r"\/lectio\/(?P<school_id>.*)\/GetImage.aspx\?pictureid=(?P<picture_id>.*)")

	if len(rows) > 0:
		for row in rows:
			elements = row.findAll("td")
			personType = "teacher" if unicode(elements[0 + pictureOffset].text) == u"Lærer" else "student"
			data = {
				"type" : personType,
				"first_name" : unicode(elements[2 + pictureOffset].find("a").text).encode("utf8"),
				"person_text_id" : elements[1 + pictureOffset].find("span").text.encode("utf8"),
				"last_name" : elements[3 + pictureOffset].find("span").text.strip().encode("utf8"),
				"full_name" : unicode(unicode(elements[2 + pictureOffset].find("a").text) + " " + unicode(elements[3 + pictureOffset].find("span").text)).encode("utf8"),
				"person_id" : elements[1 + pictureOffset]["lectiocontextcard"].replace("T", "") if personType == "teacher" else elements[1 + pictureOffset]["lectiocontextcard"].replace("S", ""),
				"context_card_id" : elements[1 + pictureOffset]["lectiocontextcard"]
			}
			if pictureOffset == 1:
				pictureGroups = pictureProg.match(elements[0].find("img")["src"])
				data["picture_id"] = pictureGroups.group("picture_id") if not pictureGroups is None else ""

			if personType == "teacher":
				data["teams"] = elements[5 + pictureOffset].text.split(", ")
				teachers.append(data)
			else:
				data["field_of_study"] = {
					"name" : unicode(elements[4 + pictureOffset].find("span").text).encode("utf8"),
					"context_card_id" : elements[4 + pictureOffset].find("span")["lectiocontextcard"],
					"field_of_study_id" : elements[4 + pictureOffset].find("span")["lectiocontextcard"].replace("SR", "")
				}
				students.append(data)

	return {
		"status" : "ok",
		"teachers" : teachers,
		"students" : students
	}
Exemplo n.º 53
0
def team_books ( config, session = False ):
	url = "https://www.lectio.dk/lectio/%s/BD/HoldReservations.aspx?HoldID=%s" % ( str(config["school_id"]), str(config["team_id"]) )

	if session is False:
		session = authenticate.authenticate(config)

	if session == False:
		return {"status" : "error", "type" : "authenticate"}
	# Insert the session information from the auth function
	cookies = {
		"lecmobile" : "0",
		"ASP.NET_SessionId" : session["ASP.NET_SessionId"],
		"LastLoginUserName" : session["LastLoginUserName"],
		"lectiogsc" : session["lectiogsc"],
		"LectioTicket" : session["LectioTicket"]
	}

	# Insert User-agent headers and the cookie information
	headers = {
		"User-Agent" : "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
		"Content-Type" : "application/x-www-form-urlencoded",
		"Host" : "www.lectio.dk",
		"Origin" : "https://www.lectio.dk",
		"Cookie" : functions.implode(cookies, "{{index}}={{value}}", "; ")
	}

	response = proxy.session.get(url, headers=headers)

	html = response.text

	soup = Soup(html)

	if soup.find("div", attrs={"id" : "m_Content_ebooks_island_pa"}) is None:
		return {
			"status" : False,
			"error" : "Data not found"
		}

	books = []

	for row in soup.find(attrs={"id" : "m_Content_ebooks_island_pa"}).find("table").findAll("tr")[1:]:
		elements = row.findAll("td")
		books.append({
			"team_id" : str(config["team_id"]),
			"type" : "ebook",
			"title" : unicode(elements[0].text.replace("\r\n", "").replace("\t", "")),
			"read" : unicode(elements[1].text.replace("\r\n", "").replace("\t", ""))
		})

	for row in soup.find(attrs={"id" : "m_Content_reservationsStudentGV"}).findAll("tr")[1:]:
		elements = row.findAll("td")

		books.append({
			"type" : "book",
			"team_id" : str(config["team_id"]),
			"title" : unicode(elements[0].text.replace("\r\n", "").replace("\t", ""))
		})

	return {
		"status" : "ok",
		'books' : books
	}
Exemplo n.º 54
0
def xprs_subjects(start,
                  end,
                  increase,
                  school_id,
                  checkLevels=False,
                  levels=["01", "02", "03", "04", "05", "06"]):

    subjects = []

    cards = []

    for code in range(0, end - start + 1):

        if checkLevels == False:
            cards.append(start + (code * increase))
        else:
            codeKey = start + (code * increase)
            for row in levels:
                cards.append(str(codeKey) + row)

    for code in cards:

        url = "https://www.lectio.dk/lectio/%s/contextcard/contextcard.aspx?lectiocontextcard=XF%s" % (
            str(school_id), str(code))

        cookies = {}

        # Insert User-agent headers and the cookie information
        headers = {
            "User-Agent":
            "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1665.2 Safari/537.36",
            "Content-Type": "application/x-www-form-urlencoded",
            "Host": "www.lectio.dk",
            "Origin": "https://www.lectio.dk",
            "Cookie": functions.implode(cookies, "{{index}}={{value}}", "; ")
        }
        error = False
        try:
            response = proxy.session.get(url, headers=headers)
        except Exception, e:
            print code
            error = True

        if error == False:
            html = response.text

            soup = Soup(html)

            codeProg = re.compile(r"(?P<code>[.^\S]*) (?P<name>.*)")

            if not soup.find("span", attrs={"id": "ctl00_Content_cctitle"
                                            }) is None:

                notices = []

                tables = soup.findAll("table")

                codeGroups = codeProg.match(tables[1].findAll("td")[1].text)

                level = "Unkmown"

                if not codeGroups is None:
                    level = "A" if "A" in codeGroups.group(
                        "code") else "B" if "B" in codeGroups.group(
                            "code") else "C"

                subjects.append({
                    "name":
                    unicode(
                        soup.find(attrs={
                            "id": "ctl00_Content_cctitle"
                        }).text.replace("XPRS-f*g - ", "")),
                    "code":
                    codeGroups.group("code").replace("A", "").replace(
                        "B", "").replace("C", "")
                    if not codeGroups is None else "",
                    "subject_sub_type":
                    "none" if tables[1].findAll("td")[3].text
                    == "Ingen underfag" else "differs"
                    if tables[1].findAll("td")[3].text == "Variable underfag"
                    else tables[1].findAll("td")[3].text,
                    "context_card_id":
                    "XF" + str(code),
                    "level":
                    level,
                    "code_full":
                    codeGroups.group("code") if not codeGroups is None else "",
                    "xprs_subject_id":
                    str(code),
                    "notices":
                    tables[1].findAll("td")[5].text.split("\n"),
                    "code_full_name":
                    tables[1].findAll("td")[1].text
                })

            else:
                print code