def test_stream_zipped_bundle(self, app, fake_auth): with mock_legacy_note_attachment(app): stream = get_zip_stream_for_sid('9000000000')['stream'] body = b'' for chunk in stream: body += chunk zipfile = ZipFile(io.BytesIO(body), 'r') contents = {} for name in zipfile.namelist(): contents[name] = zipfile.read(name) assert len(contents) == 2 assert contents[ 'dog_eaten_homework.pdf'] == b'When in the course of human events, it becomes necessarf arf woof woof woof' today = localize_datetime(utc_now()).strftime('%Y%m%d') csv_rows = contents[ f"advising_notes_wolfgang_pauli-o'rourke_{today}.csv"].decode( 'utf-8').strip().split('\r\n') assert len(csv_rows) == 4 assert csv_rows[ 0] == 'date_created,student_sid,student_name,author_uid,author_csid,author_name,subject,topics,attachments,\ body,late_change_request_action,late_change_request_status,late_change_request_term,late_change_request_course' assert csv_rows[1] ==\ "2017-11-02,9000000000,Wolfgang Pauli-O'Rourke,,700600500,,,,dog_eaten_homework.pdf,I am confounded by this confounding student,,,," assert csv_rows[ 2] == "2017-11-02,9000000000,Wolfgang Pauli-O'Rourke,,600500400,,,Ne Scéaw,,Is this student even on campus?,,,," assert csv_rows[ 3] == "2020-12-05,9000000000,Wolfgang Pauli-O'Rourke,,,,,,,,Late Grading Basis Change,In Error,Fall 2020,24460 PSYCH 110 - \
def _add_attachments_to_notes(attachments, template_attachment_ids, author_uid, note_ids): now = utc_now().strftime('%Y-%m-%d %H:%M:%S') def _add_attachment(_s3_path): count_per_chunk = 10000 for chunk in range(0, len(note_ids), count_per_chunk): query = """ INSERT INTO note_attachments (created_at, note_id, path_to_attachment, uploaded_by_uid) SELECT created_at, note_id, path_to_attachment, uploaded_by_uid FROM json_populate_recordset(null::note_attachments, :json_dumps); """ note_ids_subset = note_ids[chunk:chunk + count_per_chunk] data = [ { 'created_at': now, 'note_id': note_id, 'path_to_attachment': _s3_path, 'uploaded_by_uid': author_uid, } for note_id in note_ids_subset ] db.session.execute(query, {'json_dumps': json.dumps(data)}) for byte_stream_bundle in attachments: s3_path = put_attachment_to_s3(name=byte_stream_bundle['name'], byte_stream=byte_stream_bundle['byte_stream']) _add_attachment(s3_path) if template_attachment_ids: for template_attachment in NoteTemplateAttachment.get_attachments(template_attachment_ids): _add_attachment(template_attachment.path_to_attachment)
def download_notes_and_attachments(sid): students = data_loch.get_basic_student_data([sid]) student = students[0] if students else None notes = get_advising_notes(sid) if student else None if not student or not notes: return Response('Not found', status=404) filename = '_'.join([ 'advising_notes', student.get('first_name', '').lower(), student.get('last_name', '').lower(), localize_datetime(utc_now()).strftime('%Y%m%d'), ]) def generator(): for chunk in get_zip_stream(filename=filename, notes=notes, student=student): yield chunk response = Response(stream_with_context(generator()), mimetype='application/zip') encoding_safe_filename = urllib.parse.quote( f'{filename}.zip'.encode('utf8')) response.headers[ 'Content-Disposition'] = f'attachment; filename={encoding_safe_filename}' return response
def delete_and_block(cls, uid): now = utc_now() user = cls.query.filter_by(uid=uid).first() user.deleted_at = now user.is_blocked = True std_commit() return user
def _assert_zip_download(self, app, client): today = localize_datetime(utc_now()).strftime('%Y%m%d') with mock_legacy_note_attachment(app): response = client.get('/api/notes/download_for_sid/9000000000') assert response.status_code == 200 assert response.headers['Content-Type'] == 'application/zip' assert response.headers['Content-Disposition'] == f"attachment; filename=advising_notes_wolfgang_pauli-o'rourke_{today}.zip" assert response.data
def _create_notes( author_id, author_uid, author_name, author_role, author_dept_codes, body, is_private, sids, subject, ): ids_by_sid = {} now = utc_now().strftime('%Y-%m-%dT%H:%M:%S+00') # The syntax of the following is what Postgres expects in json_populate_recordset(...) joined_author_dept_codes = '{' + ','.join(author_dept_codes) + '}' count_per_chunk = 10000 for chunk in range(0, len(sids), count_per_chunk): sids_subset = sids[chunk:chunk + count_per_chunk] query = """ INSERT INTO notes (author_dept_codes, author_name, author_role, author_uid, body, is_private, sid, subject, created_at, updated_at) SELECT author_dept_codes, author_name, author_role, author_uid, body, is_private, sid, subject, created_at, updated_at FROM json_populate_recordset(null::notes, :json_dumps) RETURNING id, sid; """ data = [ { 'author_uid': author_uid, 'author_name': author_name, 'author_role': author_role, 'author_dept_codes': joined_author_dept_codes, 'sid': sid, 'subject': subject, 'body': body, 'is_private': is_private, 'created_at': now, 'updated_at': now, } for sid in sids_subset ] results_of_chunk_query = {} for row in db.session.execute(query, {'json_dumps': json.dumps(data)}): sid = row['sid'] results_of_chunk_query[sid] = row['id'] # Yes, the note author has read the note. notes_read_query = """ INSERT INTO notes_read (note_id, viewer_id, created_at) SELECT note_id, viewer_id, created_at FROM json_populate_recordset(null::notes_read, :json_dumps) """ notes_read_data = [ { 'note_id': note_id, 'viewer_id': author_id, 'created_at': now, } for note_id in results_of_chunk_query.values() ] db.session.execute(notes_read_query, {'json_dumps': json.dumps(notes_read_data)}) ids_by_sid.update(results_of_chunk_query) return ids_by_sid
def _delete_attachments(cls, note_template, delete_attachment_ids): modified = False now = utc_now() for attachment in note_template.attachments: if attachment.id in delete_attachment_ids: attachment.deleted_at = now modified = True if modified: note_template.updated_at = now
def _add_attachment(cls, note, attachment): note.attachments.append( NoteAttachment.create_attachment( note=note, name=attachment['name'], byte_stream=attachment['byte_stream'], uploaded_by=note.author_uid, ), ) note.updated_at = utc_now()
def delete(cls, appointment_id): appointment = cls.find_by_id(appointment_id) if appointment: now = utc_now() appointment.deleted_at = now for topic in appointment.topics: topic.deleted_at = now std_commit() cls.refresh_search_index()
def _add_attachment(cls, note_template, attachment, uploaded_by_uid): note_template.attachments.append( NoteTemplateAttachment.create( note_template_id=note_template.id, name=attachment['name'], byte_stream=attachment['byte_stream'], uploaded_by=uploaded_by_uid, ), ) note_template.updated_at = utc_now()
def delete(cls, note_template_id): note_template = cls.find_by_id(note_template_id) if note_template: now = utc_now() note_template.deleted_at = now for attachment in note_template.attachments: attachment.deleted_at = now for topic in note_template.topics: db.session.delete(topic) std_commit()
def _create_users(): for test_user in _test_users: # This script can be run more than once. Do not create user if s/he exists in BOAC db. uid = test_user['uid'] # Mock CSIDs and names are random unless we need them to correspond to test data elsewhere. csid = test_user['csid'] or datetime.now().strftime('%H%M%S%f') first_name = test_user.get( 'firstName', ''.join(random.choices(string.ascii_uppercase, k=6))) last_name = test_user.get( 'lastName', ''.join(random.choices(string.ascii_uppercase, k=6))) # Put mock CalNet data in our json_cache for all users EXCEPT the test "no_calnet_record" user. if uid != no_calnet_record_for_uid: calnet_feed = { 'uid': uid, 'csid': csid, 'firstName': first_name, 'lastName': last_name, 'name': f'{first_name} {last_name}', } if 'calnetDeptCodes' in test_user: calnet_feed['departments'] = [] for dept_code in test_user['calnetDeptCodes']: calnet_feed['departments'].append({ 'code': dept_code, 'name': BERKELEY_DEPT_CODE_TO_NAME.get(dept_code), }) if 'title' in test_user: calnet_feed['title'] = test_user['title'] insert_in_json_cache(f'calnet_user_for_uid_{uid}', calnet_feed) # Add user to authorized_users table if not already present. user = AuthorizedUser.find_by_uid(uid=uid) if not user: user = AuthorizedUser( uid=uid, created_by='2040', is_admin=test_user['isAdmin'], in_demo_mode=test_user['inDemoMode'], can_access_advising_data=test_user['canAccessAdvisingData'], can_access_canvas_data=test_user['canAccessCanvasData'], degree_progress_permission=test_user.get( 'degreeProgressPermission'), search_history=test_user.get('searchHistory', []), ) if test_user.get('deleted'): user.deleted_at = utc_now() db.session.add(user) AuthorizedUser.delete(delete_this_admin_uid) AuthorizedUser.delete(delete_this_uid) std_commit(allow_test_environment=True)
def delete(cls, note_id): note = cls.find_by_id(note_id) if note: now = utc_now() note.deleted_at = now for attachment in note.attachments: attachment.deleted_at = now for topic in note.topics: topic.deleted_at = now std_commit() cls.refresh_search_index()
def _update_note_topics(cls, note, topics): modified = False now = utc_now() topics = set(topics) existing_topics = set(note_topic.topic for note_topic in NoteTopic.find_by_note_id(note.id)) topics_to_delete = existing_topics - topics topics_to_add = topics - existing_topics for topic in topics_to_delete: topic_to_delete = next((t for t in note.topics if t.topic == topic), None) if topic_to_delete: topic_to_delete.deleted_at = now modified = True for topic in topics_to_add: note.topics.append( NoteTopic.create(note, topic, note.author_uid), ) modified = True if modified: note.updated_at = now
def _update_appointment_topics(appointment, topics, updated_by): modified = False now = utc_now() topics = set([titleize(vacuum_whitespace(topic)) for topic in topics]) existing_topics = set(appointment_topic.topic for appointment_topic in AppointmentTopic.find_by_appointment_id(appointment.id)) topics_to_delete = existing_topics - topics topics_to_add = topics - existing_topics for topic in topics_to_delete: topic_to_delete = next((t for t in appointment.topics if t.topic == topic), None) if topic_to_delete: topic_to_delete.deleted_at = now modified = True for topic in topics_to_add: appointment.topics.append( AppointmentTopic.create(appointment, topic), ) modified = True if modified: appointment.updated_at = now appointment.updated_by = updated_by
def update( self, updated_by, details=None, scheduled_time=None, student_contact_info=None, student_contact_type=None, topics=(), ): if details != self.details: self.updated_at = utc_now() self.updated_by = updated_by self.details = details self.scheduled_time = scheduled_time self.student_contact_info = student_contact_info self.student_contact_type = student_contact_type _update_appointment_topics(self, topics, updated_by) std_commit() db.session.refresh(self) self.refresh_search_index()
def get_today_scheduled_appointments(dept_code): def _is_current_user_authorized(): return current_user.is_admin or dept_code in _dept_codes_with_scheduler_privilege() dept_code = dept_code.upper() if dept_code not in BERKELEY_DEPT_CODE_TO_NAME: raise ResourceNotFoundError(f'Unrecognized department code: {dept_code}') elif _is_current_user_authorized(): local_today = localize_datetime(utc_now()) advisor_uid = request.args.get('advisorUid') scheduled_for_today = Appointment.get_scheduled(dept_code, local_today, advisor_uid) appointments = [a.to_api_json(current_user.get_id()) for a in scheduled_for_today] openings = AppointmentAvailability.get_openings(dept_code, local_today, appointments) _put_student_profile_per_appointment(appointments) return tolerant_jsonify({ 'appointments': appointments, 'openings': openings, }) else: raise ForbiddenRequestError(f'You are unauthorized to manage {dept_code} appointments.')
def _update_note_template_topics(cls, note_template, topics): modified = False now = utc_now() topics = set([titleize(vacuum_whitespace(topic)) for topic in topics]) existing_topics = set( note_topic.topic for note_topic in NoteTemplateTopic.find_by_note_template_id( note_template.id)) topics_to_delete = existing_topics - topics topics_to_add = topics - existing_topics for topic in topics_to_delete: topic_to_delete = next( (t for t in note_template.topics if t.topic == topic), None) if topic_to_delete: NoteTemplateTopic.delete(topic_to_delete.id) modified = True for topic in topics_to_add: note_template.topics.append( NoteTemplateTopic.create(note_template, topic), ) modified = True if modified: note_template.updated_at = now
def _to_api_json(alert): term_id_match = re.search(r'^2[012]\d[0258]', alert.key[0:4]) active_until = alert.deleted_at or utc_now() return { 'sid': alert.sid, 'term': term_name_for_sis_id(term_id_match.string) if term_id_match else None, 'key': alert.key, 'type': alert.alert_type, 'is_active': not alert.deleted_at, 'active_duration_hours': round( (active_until - alert.created_at).total_seconds() / 3600), 'created_at': alert.created_at, 'deleted_at': alert.deleted_at, }
def delete(cls, topic_id): topic = cls.query.filter_by(id=topic_id, deleted_at=None).first() if topic: now = utc_now() topic.deleted_at = now std_commit()
def delete(cls, uid): now = utc_now() user = cls.query.filter_by(uid=uid).first() user.deleted_at = now std_commit() return user
def delete(cls, template_id): template = cls.query.filter_by(id=template_id).first() template.deleted_at = utc_now() std_commit()
def get_zip_stream_for_sid(sid): z = zipstream.ZipFile(mode='w', compression=zipstream.ZIP_DEFLATED) notes = get_advising_notes(sid) if not notes: return None filename = 'advising_notes' student_data = data_loch.get_basic_student_data([sid]) if student_data: student_row = student_data[0] student_name = join_if_present( ' ', [student_row.get('first_name'), student_row.get('last_name')]) filename = '_'.join([ filename, student_row.get('first_name', '').lower(), student_row.get('last_name', '').lower() ]) else: student_name = '' filename = '_'.join( [filename, localize_datetime(utc_now()).strftime('%Y%m%d')]) supplemental_calnet_advisor_feeds = get_calnet_users_for_csids( app, list( set([ note['author']['sid'] for note in notes if note['author']['sid'] and not note['author']['name'] ])), ) app_timezone = pytz.timezone(app.config['TIMEZONE']) def iter_csv(): def csv_line(_list): csv_output = io.StringIO() csv.writer(csv_output).writerow(_list) return csv_output.getvalue().encode('utf-8') csv_output.close() yield csv_line([ 'date_created', 'student_sid', 'student_name', 'author_uid', 'author_csid', 'author_name', 'subject', 'topics', 'attachments', 'body', 'late_change_request_action', 'late_change_request_status', 'late_change_request_term', 'late_change_request_course', ]) for note in notes: calnet_author = supplemental_calnet_advisor_feeds.get( note['author']['sid']) if calnet_author: calnet_author_name =\ calnet_author.get('name') or join_if_present(' ', [calnet_author.get('firstName'), calnet_author.get('lastName')]) calnet_author_uid = calnet_author.get('uid') else: calnet_author_name = None calnet_author_uid = None # strptime expects a timestamp without timezone; ancient date-only legacy notes get a bogus time appended. timestamp_created = f"{note['createdAt']}T12:00:00" if len( note['createdAt']) == 10 else note['createdAt'][:19] datetime_created = pytz.utc.localize( datetime.strptime(timestamp_created, '%Y-%m-%dT%H:%M:%S')) date_local = datetime_created.astimezone(app_timezone).strftime( '%Y-%m-%d') e_form = note.get('eForm') or {} yield csv_line([ date_local, sid, student_name, (note['author']['uid'] or calnet_author_uid), note['author']['sid'], (note['author']['name'] or calnet_author_name), note['subject'], '; '.join([t for t in note['topics'] or []]), '; '.join( [a['displayName'] for a in note['attachments'] or []]), note['body'], e_form.get('action'), e_form.get('status'), term_name_for_sis_id(e_form.get('term')), f"{e_form['sectionId']} {e_form['courseName']} - {e_form['courseTitle']} {e_form['section']}" if e_form.get('sectionId') else None, ]) z.write_iter(f'{filename}.csv', iter_csv()) all_attachment_filenames = set() all_attachment_filenames.add(f'{filename}.csv') for note in notes: for attachment in note['attachments'] or []: is_legacy_attachment = not is_int(attachment['id']) id_ = attachment['id'] if is_legacy_attachment else int( attachment['id']) stream_data = get_legacy_attachment_stream( id_) if is_legacy_attachment else get_boa_attachment_stream( id_) if stream_data: attachment_filename = stream_data['filename'] basename, extension = path.splitext(attachment_filename) suffix = 1 while attachment_filename in all_attachment_filenames: attachment_filename = f'{basename} ({suffix}){extension}' suffix += 1 all_attachment_filenames.add(attachment_filename) z.write_iter(attachment_filename, stream_data['stream']) return { 'filename': f'{filename}.zip', 'stream': z, }