def makeOverdueFineNotifications(id): data = db.Database(filename="IFB299.db") #get parking violations res = data.retrieveMulti("parkingViolations", "User_ID", id) #Check parking Violations for overdue fines for fine in res: if getStatus(fine, "Parking") == "Overdue": data.insertUserNotification( dict(User_ID=id, Notification_Type="Parking Fine Overdue")) #get other violations res = data.retrieveMulti("otherViolations", "User_ID", id) #Check parking Violations for overdue fines for fine in res: if getStatus(fine, "Other") == "Overdue": data.insertUserNotification( dict(User_ID=id, Notification_Type="Campus Fine Overdue"))
def updateViolation(violation): data = db.Database(filename="IFB299.db") if violation['Violation_Type'] == "Parking": oldViolation = data.retrieve("parkingViolations", "Citation_Number", violation['Citation_Number']) for key in violation: if key != "Violation_Type": data.updateParkingViolation(key, violation[key], violation['Citation_Number']) else: oldViolation = data.retrieve("otherViolations", "Citation_Number", violation['Citation_Number']) for key in violation: if key != "Violation_Type": data.updateOtherViolation(key, violation[key], violation['Citation_Number']) data.close()
def export_list(db_file, filemanager, list_uri, out_dir): database = db.Database(db_file) list_name, _ = database.get_playlist_info(list_uri) list_dir_name = valid_filename(list_name) list_dir = os.path.join(out_dir, list_dir_name) os.makedirs(list_dir) t_uris = database.get_tracks(list_uri) logger.info("Copying %s tracks from list '%s' to directory '%s'", len(t_uris), list_name, list_dir) for t_uri in t_uris: if filemanager.track_exists(t_uri): t_path = filemanager.get_track_path(t_uri) shutil.copy(t_path, list_dir) else: logger.info("Track missing: %s", t_uri) database.close() logger.info("Finished.")
def alumnos(): _db = db.Database() def db_query(): if request.method == "GET": student = _db.list_alumno() carreras = _db.list_carreras() return student, carreras else: if request.method == "POST": matricula = request.form["matricula"] curp = request.form["curp"] nombre = request.form["nombre"] sexo = request.form["sexo"] bdate = request.form["bdate"] telefono = request.form["telefono"] celular = request.form["celular"] direccion = (request.form["calle"] + " " + request.form["colonia"] + " " + request.form["cp"] + " " + request.form["municipio"] + ", " + request.form["entidad"]) departamento = request.form["departamento"] carrera = request.form["carrera"] if (matricula != "" and curp != "" and nombre != "" and sexo != "" and bdate != "" and direccion != "" and departamento != "" and carrera != ""): _db.insert_alumno( matricula, curp, nombre, sexo, bdate, telefono, celular, direccion, departamento, carrera, ) student = _db.list_alumno() carreras = _db.list_carreras() return student, carreras res, carr = db_query() return render_template("alumnos.html", result=res, carreras=carr)
def event_thread(hermes): # type: (Hermes) -> None handle = db.Database() while True: time.sleep(1) for (uuid, name) in handle.get_due_events(): if name is None: hermes.publish_start_session_notification( "default", "This is a reminder to do your stuff.", None) handle.delete_event(uuid) else: message = "This is a reminder to {}".format(name) hermes.publish_start_session_notification( "default", message, None) handle.delete_event(uuid)
def main(): # fall subjects #subjects = ["ACSC", "ANAT", "ARTC", "ARBC", "ARCH", "ASSC", "BIOC", "BIOE", "BIOL", "BMNG", "BUSI", "CANA", "CHEE", "CHEM", "CHIN", "CIVL", "CLAS", "COMM", "CH_E", "CPST", "CSCI", "CTMP", "CRWR", "DEHY", "DENT", "DMUT", "EMSP", "ERTH", "ECON", "ECED", "ECMM", "ENGI", "INWK", "ENGM", "ENGL", "ENSL", "ENVE", "ENVS", "ENVI", "EURO", "FILM", "FIGA", "FIGS", "FOSC", "FREN", "GWST", "GEOG", "GERM", "HESA", "HINF", "HLTH", "HPRO", "HSCE", "HAHP", "HSTC", "HIST", "HUCD", "INDG", "IENG", "INFX", "INFO", "INTE", "INTD", "IPHE", "ITAL", "JOUR", "KINE", "KING", "LAWS", "LEIS", "MRIT", "MGMT", "MARA", "MARI", "MATL", "MATH", "MTHA", "MECH", "MEDP", "MEDR", "MICI", "MINE", "MUSC", "NESC", "NUMT", "NURS", "OCCU", "OCEA", "ORAL", "PHDP", "PATH", "PERF", "PERI", "PHAC", "PHAR", "PHIL", "PHYC", "PHYL", "PHYT", "PLAN", "POLI", "PGPH", "PEAS", "PROS", "PSYR", "PSYO", "PUAD", "RADT", "REGN", "RELS", "RSPT", "RUSN", "SCIE", "SLWK", "SOSA", "SPAN", "STAT", "SUST", "THEA", "TYPR", "VISC"] # winter subjects #subjects = ["ACSC","ANAT","ARBC","ARCH","ASSC","BIOC","BIOE","BIOL","BMNG","BUSI","CANA","CNLT","CHEE","CHEM","CHIN","CIVL","CLAS","COMM","CH_E","CPST","CSCI","CTMP","CRWR","DEHY","DENT","DMUT","EMSP","ERTH","ECON","ECED","ECMM","ENGI","INWK","ENGM","ENGL","ENSL","ENVE","ENVS","ENVI","EURO","FILM","FOSC","FREN","GWST","GEOG","GERM","HESA","HINF","HLTH","HPRO","HSCE","HAHP","HSTC","HIST","HUCD","INDG","IENG","INFO","INTE","INTD","IPHE","ITAL","JOUR","KINE","KING","LAWS","LEIS","MRIT","MGMT","MARA","MARI","MATL","MATH","MECH","MDLT","MEDP","MEDR","MICI","MINE","MUSC","NESC","NUMT","NURS","OCCU","OCEA","ORAL","PHDP","PATH","PERF","PERI","PHAC","PHAR","PHIL","PHYC","PHYL","PHYT","PLAN","POLI","PGPH","PEAS","PROS","PSYR","PSYO","PUAD","RADT","REGN","RELS","RSPT","RUSN","SCIE","SLWK","SOSA","SPAN","STAT","SUST","THEA","TYPR","VISC"] # 2019 summer subjects #subjects = ["ANAT", "ASSC", "BIOC", "BIOT", "BIOE", "BIOL", "BMNG", "BUSI", "CANA", "CHEE", "CHEM", "CIVL", "CLAS", "COMM","CMSD", "CH_E", "CPST", "CSCI", "DEHY", "DENT", "DMUT", "EMSP", "ERTH", "ECON", "ECED", "ECMM", "ENGI", "INWK", "ENGM", "ENGL", "ENSL", "ENVE", "ENVS", "ENVI", "FOSC", "FREN", "GWST", "GEOG", "GERM", "HESA", "HINF", "HLTH", "HPRO", "HSTC", "HIST", "IENG", "INFO", "INTE", "INTD", "IPHE", "JOUR", "KINE", "LAWS", "LEIS", "MGMT", "MARA", "MARI", "MATL", "MATH", "MECH", "MEDP", "MEDR", "MEDI", "MICI", "MINE", "MUSC", "NESC", "NUMT", "NURS", "OCCU", "OCEA", "ORAL", "PHDP", "PATH", "PERI", "PHAC", "PETR", "PHAR", "PHAC" , "PHIL", "PHYC", "PHYL", "PHYT", "PLAN", "POLI", "PGPH", "PEAS", "PSYR", "PSYO", "PUAD", "RADT", "REGN", "RELS", "RSPT", "RUSN", "SCIE", "SLWK", "SOSA", "SPAN", "STAT", "SUST", "THEA", "VISC"] #2020 Fall subjects = [ "ACSC", "ANAT", "ARBC", "ARCH", "ASSC", "BIOC", "BIOE", "BIOL", "BMNG", "BUSI", "CANA", "CHEE", "CHEM", "CHIN", "CIVL", "CLAS", "COMM", "CH_E", "CPST", "CSCI", "CTMP", "CRWR", "DEHY", "DENT", "DMUT", "EMSP", "ERTH", "ECON", "ECED", "ECMM", "ENGI", "ENGM", "ENGL", "ENSL", "ENVE", "ENVS", "ENVI", "EURO", "FILM", "FIGA", "FIGS", "FOSC", "FREN", "GWST", "GEOG", "GERM", "HESA", "HINF", "HLTH", "HPRO", "HSCE", "HAHP", "HSTC", "HIST", "INDG", "IENG", "INFO", "INTE", "INTD", "IPHE", "ITAL", "JOUR", "KINE", "KING", "LAWS", "LEIS", "MRIT", "MGMT", "MARA", "MARI", "MATL", "MATH", "MECH", "MEDP", "MEDR", "MICI", "MINE", "MUSC", "NESC", "NUMT", "NURS", "OCCU", "OCEA", "ORAL", "PHDP", "PATH", "PERF", "PERI", "PHAC", "PHAR", "PHIL", "PHYC", "PHYL", "PHYT", "PLAN", "POLI", "PGPH", "PEAS", "PROS", "PSYR", "PSYO", "PUAD", "RADT", "REGN", "RELS", "RSPT", "RUSN", "SCIE", "SLWK", "SOSA", "SPAN", "STAT", "SUST", "THEA", "TYPR", "VISC" ] # test subjects #subjects = ["CSCI","INFO", "INTE", "INTD", "IPHE", "ITAL", "JOUR", "KINE", "KING", "LAWS", "LEIS", "MRIT", "MGMT", "MARA", "MARI", "MATL", "MATH", "MECH", "MEDP", "MEDR", "MICI", "MINE", "MUSC", "NESC", "NUMT", "NURS", "OCCU", "OCEA", "ORAL", "PHDP", "PATH", "PERF", "PERI", "PHAC", "PHAR", "PHIL", "PHYC", "PHYL", "PHYT", "PLAN", "POLI", "PGPH", "PEAS", "PROS", "PSYR", "PSYO", "PUAD", "RADT", "REGN", "RELS", "RSPT", "RUSN", "SCIE", "SLWK", "SOSA", "SPAN", "STAT", "SUST", "THEA", "TYPR", "VISC"] terms = { "2018winter": "201920", "2018fall": "201910", "2018fall/winter": "201910%\2C201920", "2019Summer": "201930", "2019fall/winter": "202010,202020" } district = "100" database = db.Database() for subject in subjects: url = "https://dalonline.dal.ca/PROD/fysktime.P_DisplaySchedule?s_term=" + terms[ "2019fall/winter"] + "&s_subj=" + subject + "&s_district=" + district # THIS IS THE FINAL ARRAY WITH ALL INFORMATION IN IT # use as data[courseindex] .title or .classes[classindex] for full info data = parseUrl(url) database.saveCourses(data)
def parse(self, url): ''' jdbc:HypersonicSQL:<DB> ''' url = self.trimUrlPrefix(url).strip() isInMemoryDatabase = url == '.' isClientServerMode = re.match('http://', url, re.I) # all servers except of one in client-server mode have address address = (not isClientServerMode and 'localhost' or None) # server in mode "in-memory" doesn't have database databases = (not isInMemoryDatabase and (db.Database(url.strip()),) or ()) return (db.DatabaseServer(address=address, databases=databases, vendor=self.getPlatform().vendor),)
def getFine(id): """Makes dictionary for fine reminder emails""" data = db.Database(filename="IFB299.db") fine = data.retrieve("finePayments", "Fine_Number", id) if fine['Citation_Type'] == "Parking": violation = getParkingViolation(id) else: violation = getOtherViolation(id) fine['Location'] = violation['Place_in_campus'] fine['Name'] = violation['User_ID'] fine['Description'] = violation['Description'] fine['Date'] = dt.datetime.strptime(violation['Date'], '%d/%m/%Y').strftime('%d/%m/%Y') fine['Time'] = violation['Time'] fine['Amount'] = getFineAmount(id) return fine data.close()
def change_board_data(url): username = get_username(request.headers) if username is None: return jsonify({'success': False, 'message': 'Please log in'}) if not all(_ in request.json for _ in ('json', )): return jsonify({ 'success': False, 'message': 'Please provide all informations' }) if not db.Database().change_board_data(url, request.json['json']): return jsonify({ 'success': False, 'message': "Could not change board informations" }) return jsonify({ 'success': True, 'message': 'Successfully changed board informations' })
def getOverDuePayment(id): """Returns the amount of money to add for an overdue fine""" data = db.Database(filename="IFB299.db") fine = data.retrieve("finePayments", "Fine_Number", id) issueDate = "" if fine['Citation_Type'] == "Parking": issueDate = dt.datetime.strptime( data.retrieve("parkingViolations", "Citation_Number", fine['Citation_Number'])['Date'], '%d/%m/%Y') else: issueDate = dt.datetime.strptime( data.retrieve("otherViolations", "Citation_Number", fine['Citation_Number'])['Date'], '%d/%m/%Y') overdue = dt.datetime.now().date() - issueDate.date() data.close() #add $5 for every week overdue return 5 * int(overdue.days / 7)
def add_team(): username = get_username(request.headers) if username is None: return jsonify({'success': False, 'message': 'Please log in'}) if not all(_ in request.json for _ in ('team_name', 'team_members')): return jsonify({ 'success': False, 'message': 'Please provide all informations' }) if not db.Database().add_team(request.json['team_name'], [username] + request.json['team_members']): return jsonify({ 'success': False, 'message': 'Verify team members spelling, or user already in team' }) return jsonify({'success': True, 'message': 'Successfully added new team'})
def main(args): with open(args.config) as infile: config = json.load(infile) combines = build_combines_list(config.get('hosts_to_combine', {})) files = iglob('{logsdir}/*.xml'.format(logsdir=args.logsdir)) database = db.Database(args.database).clear_db().initialise_db() for fname in files: logger.info('Extracting from %s', fname) with open(fname) as infile: root = ET.fromstring(infile.read()) timestamp = int(root.attrib['start']) event_id = database.add_event(timestamp) hosts = [] for host in root.findall('host'): status = host.find('status') if not status.attrib['state'] == 'up': continue hostname_node = host.find('hostnames').find('hostname') if hostname_node is None: continue hostname = check_for_renames(hostname_node.attrib['name'], combines=combines) host_id = database.add_host( hostname=hostname, event_id=event_id, ) address_nodes = host.findall('address') for node in address_nodes: addrtype = node.attrib['addrtype'] addr = node.attrib['addr'] database.add_address(address=addr, type=addrtype, host_id=host_id)
def libre(): _db = db.Database() def db_query(): if request.method == "GET": horas = _db.list_libre() return horas else: if request.method == "POST": nomina = request.form["nomina"] hora = request.form["hora"] if nomina != "" and hora != "": _db.insert_libre(nomina, hora) horas = _db.list_libre() return horas res = db_query() return render_template("libre.html", result=res)
def db_query(): _db = db.Database() if request.method == "GET": lista = _db.list_pertenece() return lista else: if request.method == "POST": matricula = request.form["matricula"] curso = request.form["curso"] grupo = request.form["grupo"] semestre = request.form["semestre"] year = request.form["year"] if (matricula != "" and curso != "" and grupo != "" and semestre != "" and year != ""): _db.insert_pertenece(matricula, curso, grupo, semestre, year) lista = _db.list_pertenece() return lista
def calculate_student_stats(self, response_matrix=None, return_values=False, store=True): response_matrix = response_matrix or self.get_response_matrix() # Assign to a symbol. robjects.globalenv['response_matrix'] = response_matrix # Use mirt. importr('mirt') # robjects.r('tmp <- tempfile(pattern=paste("foo", Sys.getpid(), sep=""))') # Create the model. twopl_mod = "ability = 1 - " + str(len(response_matrix)) # Fit the model. robjects.r('twopl_fit <- mirt(data = response_matrix, model = "' + twopl_mod + '", itemtype = "2PL", SE = TRUE)') # Fit the student model. robjects.r( 'student_fit <- fscores(twopl_fit, method = "EAP", full.scores = TRUE, full.scores.SE = TRUE)' ) locations = robjects.r('student_fit[,1]') # Create a list to store the locations for saving into the database. locations_to_save = [] # Store inside the questions. for i in range(len(self.students)): student = self.students[i] # Cache the discrimination. # R vectors are 1-indexed. student.location = locations.rx(i + 1)[0] locations_to_save.append(student.location) if store: # Set up a database object. database_manager = db.Database() database_manager.insert_or_update_from('student_locations', session['id'], locations_to_save)
def _converttoDbInstance(self, item): r'''@types: com.amazonaws.services.rds.model.DBInstance -> aws_rds.Instance getDBParameterGroups()#rovides the list of DB Parameter Groups applied to this DB Instance. com.amazonaws.services.rds.model.DBParameterGroupStatus getDBParameterGroupName() # The name of the DP Parameter Group. getParameterApplyStatus() # The status of parameter updates. getDBSecurityGroups() # Provides List of DB Security Group elements containing only DBSecurityGroup.Name and DBSecurityGroup.Status subelements. com.amazonaws.services.rds.model.DBSecurityGroupMembership getDBSecurityGroupName() getStatus() ''' dbName = item.getDBName() platform = db_platform.findPlatformBySignature(item.getEngine()) vendor = platform and platform.vendor databases = dbName and [db.Database(dbName)] endpoint = self._convertToTcpEndpoint(item.getEndpoint()) server = db.DatabaseServer(endpoint.getAddress(), endpoint.getPort(), databases=databases, vendor=vendor, version=item.getEngineVersion(), platform=platform) sizeInGb = item.getAllocatedStorage() sizeInMb = ((sizeInGb and str(sizeInGb).isnumeric) and sizeInGb * 1024 or None) return aws_rds.Instance( item.getDBInstanceIdentifier(), server, type=item.getDBInstanceClass(), status=item.getDBInstanceStatus(), licenseModel=item.getLicenseModel(), sizeInMb=sizeInMb, availabilityZoneName=item.getAvailabilityZone(), creationTime=item.getInstanceCreateTime(), engineName=item.getEngine(), parameterGroups=map(self._convertToParameterGroupStatus, item.getDBParameterGroups()), securityGroups=map(self._convertToSecurityGroupMembership, item.getDBSecurityGroups()))
def update_username(): username = get_username(request.headers) if username is None: return jsonify({'success': False, 'message': 'Please log in'}) if not all(_ in request.json for _ in ('old_username', 'new_username')): return jsonify({ 'success': False, 'message': 'Please provide all informations' }) if username != request.json['old_username']: return jsonify({'success': False, 'message': 'Incorrect username'}) if request.json['old_username'] == request.json['new_username']: return jsonify({'success': False, 'message': 'Usernames are the same'}) if db.Database().update_username(request.json['old_username'], request.json['new_username']): return jsonify({ 'success': True, 'message': 'Successfully updated username' }) return jsonify({'success': False, 'message': "Could not update username"})
def db_query(): _db = db.Database() if request.method == "POST": DepID = request.form["DepID"] nombre = request.form["nombre"] numOficina = request.form["numOficina"] telefono = request.form["telefono"] _db.insert_departamento(DepID, nombre, numOficina, telefono) deps = _db.list_dep() return deps else: if request.method == "GET": dep_num = request.values.get('DepsID', '') dep_name = request.values.get('depName', '') deps = _db.list_deps(dep_num, dep_name) return deps
def onClick_Load(self): logging.info("Load News ...") d = db.Database() self.res = d.read() if not self.res: notification_manager = notification.Notification_Manager(background="white") notification_manager.alert("ERROR !!") else: listbox = Listbox(self, width=40, height=29) listbox.place(x=20, y=70) listbox.bind('<<ListboxSelect>>', self.onClick_ListBox) scrollbar = ttk.Scrollbar(self, orient=VERTICAL, command=listbox.yview) scrollbar.pack(side="right", fill="y") listbox.config(yscrollcommand=scrollbar.set) for new in self.res: listbox.insert(END, str(new.get_id()) + '|' + new.get_title()) self.title = Label(self, text="Select From List ...") self.title.place(x=380, y=70) self.body = Label(self, text="...") self.body.place(x=380, y=110)
def sync(db_file, username): logger.info("Starting sync") database = db.Database(db_file) si = spotify.SpotifyInterface(username) p_uris = database.get_playlists() logger.info("Found %s playlists for sync", len(p_uris)) for p_uri in p_uris: name, snapshot_id, new_tracks = si.get_playlist_name_and_tracks(p_uri) old_name, old_snapshot_id = database.get_playlist_info(p_uri) if snapshot_id == old_snapshot_id: logger.info("Playlist '%s' unchanged", name) continue database.set_playlist_info(p_uri, name, snapshot_id) database.remove_playlist_tracks(p_uri) for i, t in enumerate(new_tracks): database.add_track(p_uri, t, i + 1) logger.info("Playlist: '%s' updated", name) database.mark_removed_tracks() database.close() logger.info("Sync finished")
def main(): global binders, binder, worker global database, client, binder_name host, port, tenant, user, password, binder_name, binder_creds = parseargs() if not binder_name in binders: print "Binder "+binder_name+" not found" sys.exit(1) binder = binders[binder_name](logger) binder.connect(binder_creds) database = db.Database('cfy.db') client = CloudifyClient(host=host, port=port, trust_all=True, username=user, password=password, tenant=tenant) worker = Syncworker(database, client, logger) worker.start() signal.signal(signal.SIGINT, signal_handler) app.run(host='0.0.0.0', port=BROKER_PORT, threaded=True)
def topics(): page, per_page, offset = get_page_args(page_parameter='page', per_page_parameter='per_page') order = request.args.get('order') or "time" database = db.Database() subjects = database.get_topics(order, offset=offset, per_page=per_page) pagination = Pagination( css_framework='bootstrap4', link_size='sm', show_single_page=False, page=page, per_page=per_page, total=database.get_num_topics(), record_name='Topics', format_total=True, format_number=True, ) context = {'subjects': subjects, 'pagination': pagination} return render_template('topics.html', **context)
def generate_playlist_files(db_file, tracks_dir, playlist_dir): """use_uris: if the playlist uris should be used for the name""" database = db.Database(db_file) p_uris = database.get_playlists() logger.info("%s playlists", len(p_uris)) created_files = [] for p_uri in p_uris: t_uris = database.get_tracks(p_uri) # does not contain ignored tracks name, _ = database.get_playlist_info(p_uri) filename = valid_filename(name) + ".m3u" logger.info("writing playlist %s (%s)", p_uri, filename) filename = os.path.join(playlist_dir, filename) with open(filename, "w") as f: for t_uri in t_uris: f.write("../tracks/%s.mp3\n" % (t_id(t_uri))) created_files.append(filename) for f in os.listdir(playlist_dir): # remove playlist files with old names f = os.path.join(playlist_dir, f) if f.endswith(".m3u") and os.path.isfile(f) and f not in created_files: logger.info("removing old file %s", f) os.remove(f) database.close()
def proxy_list_org(): #http://proxy-list.org print("[!] Starting proxy-list.org thread...") BASE_URL = "https://proxy-list.org/english/index.php?p=" Re_Pattern_IP = re.compile("(.*):") Re_Pattern_PORT = re.compile(":(.*)") while True: #print("[!] Scraping proxy-list.org...") for startingURL_Param in range(1, 11): while True: try: #If there's an error duing the request, it will try to reconnect until succeed while True: try: HTML_ProxyPage = requests.get( BASE_URL + str(startingURL_Param)).content break except Exception as e: print("An Error occurred: " + str(e)) soup = bs(HTML_ProxyPage, "html.parser") for Raw_ProxyInfo in soup.find_all("ul", {"class": None}): ip_port = str( base64.b64decode( Raw_ProxyInfo.find("li", { "class": "proxy" }).text.replace("Proxy('", "").replace("')", "")), "utf-8") IP = re.findall(Re_Pattern_IP, ip_port)[0] PORT = re.findall(Re_Pattern_PORT, ip_port)[0] PROTOCOL = Raw_ProxyInfo.find("li", { "class": "https" }).text if PROTOCOL != "-": db.Database().add(IP, PORT, PROTOCOL.lower()) break except Exception as e: print("An error occurred with proxy_list_org: " + str(e)) sleep(IntervalDelay)
def db_query(): _db = db.Database() if request.method == "GET": groups = _db.list_grupo() horas = _db.list_horario() return groups, horas else: if request.method == "POST": numero = request.form["numero"] semestre = request.form["semestre"] year = request.form["year"] curso = request.form["curso"] profesor = request.form["profesor"] hora = request.form["hora"] if (numero != "" and semestre != "" and year != "" and curso != "" and profesor != "" and hora != ""): _db.insert_grupo(numero, semestre, year, curso, profesor, hora) groups = _db.list_grupo() horas = _db.list_horario() return groups, horas
def threadParse(self): reader = parser.Parser() d = db.Database() logging.info("Check Database ... ") print "Check Database ... ", if d.start(): print "OK !" else: print "ERROR !" for i in range(0, 22): index = 'reut2-' + '{:03}'.format(i) + '.sgm' logging.info("Open File : " + index + " ... ") print "Open File : " + index + " ... ", try: doc = etree.parse(index, etree.XMLParser(encoding='UTF-8', ns_clean=True, recover=True)) print "OK !" reader.parse(doc) except: print "ERROR !" print self.popup.update() self.progress += 4.54 self.progress_var.set(self.progress) self.popup.destroy() print('\a') print print "Parsed !!" logging.info("Parsed !!") tkn.notify( kws={ "title": "IR System", "msg": "Parsing Done", "expire_time": 2000, "spacing": 20, "justify": "left", "text_padding": 50, "alpha": 0.8 } )
def calculate_student_stats(self, response_matrix=None, return_values=False, store=True): response_matrix = response_matrix or self.get_response_matrix() # Assign to a symbol. robjects.globalenv['response_matrix'] = response_matrix # Use ltm. importr('ltm') robjects.r( 'item_scores <- ltm(response_matrix ~ z1, na.action = NULL)') robjects.r( 'locations <- factor.scores(item_scores, resp.patterns = response_matrix)' ) # Store a row of location coefficients. locations = robjects.r('locations$score.dat["z1"]') # Create a list to store the locations for saving into the database. locations_to_save = [] # Store inside the questions. for i in range(len(self.students)): student = self.students[i] # Cache the discrimination. # R vectors are 1-indexed. student.location = locations.rx(i + 1, 'z1')[0] locations_to_save.append(student.location) if store: # Set up a database object. database_manager = db.Database() database_manager.insert_or_update_from('student_locations', session['id'], locations_to_save)
def getUserNotifications(email): data = db.Database(filename="IFB299.db") User_ID = data.retrieve("users", "Email", email)['User_ID'] #Create overdue fine notifications makeOverdueFineNotifications(User_ID) res = data.retrieveMulti("userNotifications", "User_ID", User_ID) notifications = [] for alert in res: if alert['Notification_Type'] == "New Parking Violation": notifications.append("You have a new parking fine.") elif alert['Notification_Type'] == "New Campus Violation": notifications.append("You have a new campus fine.") elif alert['Notification_Type'] == "Fine Payment Recieved": notifications.append("Your fine payment has been recieved") elif alert['Notification_Type'] == "Parking Fine Overdue": notifications.append("You have an overdue Parking fine.") elif alert['Notification_Type'] == "Campus Fine Overdue": notifications.append("You have an overdue campus fine.") #Remove notifications data.delete("userNotifications", "User_ID", User_ID) data.close() return notifications
def _parsePartialUrl(self, url): r'''@types: str -> db.DatabaseServer @raise MalformedUrl: url cannot be parsed ''' tokens = url.split(';', 1) endpointStr, propertiesSet = (len(tokens) == 2 and tokens or (tokens[0], None)) server = (endpointStr and ParserHelper().parseHostBasedUrl(endpointStr) or db.DatabaseServer()) server.vendor = self.getPlatform().vendor if propertiesSet: propertiesDefinitions = [p for p in propertiesSet.split(';') if p.find('=') != 1] for definition in propertiesDefinitions: name, value = definition.split('=', 1) name = name.lower() if name == 'servicename' and value: server.instance = value elif name == 'databasename' and value: server.addDatabases(db.Database(value)) return server
def db_query(): _db = db.Database() if request.method == "POST": dId = request.form["dId"] dName = request.form["dName"] dNumber = request.form["dNumber"] dPhoneNumber = request.form["dPhoneNumber"] dOffice = request.form["dOffice"] _db.insert_department(dId, dName, dNumber, dPhoneNumber, dOffice) print('Department inserted', file=sys.stdout) deps = _db.list_departments() print('Listing all the departments', file=sys.stdout) return deps else: if request.method == "GET": department_name = request.values.get('dName', '') deps = _db.list_department(department_name) print('Listing department given info ' + department_name, file=sys.stdout) return deps