def organization_presenters_create(auth_user, organization): with WriteSession() as session: with session.no_autoflush: org_presenter = Presenter.from_request(request.json) if session.query(Presenter).filter(Presenter.organization == organization, Presenter.slug == org_presenter.slug).first(): abort(400, {'code': 400, 'message': 'That presenter already exists on this organization', 'data': {'presenter': org_presenter.to_json()}}) if org_presenter.user and session.query(Presenter).filter(Presenter.organization == organization, Presenter.user == org_presenter.user).first(): abort(400, {'code': 400, 'message': 'A presenter with that user already exists on this organization', 'data': {'presenter': org_presenter.to_json()}}) org_presenter.organization = organization session.add(org_presenter) session.commit() return organization.to_json(with_relationships={'presenters': {'user': None}})['presenters']
def refresh_data(sched_info_fname, convention_info_fname=None): # Keep track of the number of errors and warnings. num_errors = 0 num_warnings = 0 # Delete records from tables of interest. events = Event.query.all() for event in events: event.rooms = [] event.presenters = [] event.timeslots = [] db.session.commit() DataLoadError.query.delete() Convention.query.delete() Timeslot.query.delete() Track.query.delete() Event.query.delete() Presenter.query.delete() Room.query.delete() RoomGroup.query.delete() # Define the convention. if not convention_info_fname: script_dir = os.path.dirname(__file__) convention_info_fname = os.path.join(script_dir, CONVENTION_INFO_FNAME) convention = Convention() with open(convention_info_fname, 'rb') as csvfile: csvreader = csv.reader(csvfile, delimiter=',', quotechar='"') first_row = True for row in csvreader: if first_row: first_row = False else: convention.name = row[0] convention.description = row[1] convention.date_format = row[5] convention.datetime_format = row[6] convention.start_dt = datetime.datetime.strptime( row[2], convention.datetime_format) convention.end_dt = datetime.datetime.strptime( row[3], convention.datetime_format) convention.timeslot_duration = datetime.timedelta( 0, int(row[4]) * 60) # Minutes converted to seconds. convention.url = row[7] convention.active = True # There is only one row of convention data. break db.session.add(convention) db.session.commit() # Commit the data to the database. db.session.commit() # Create timeslots. timeslot_count = int( (convention.end_dt - convention.start_dt).total_seconds() / convention.timeslot_duration.total_seconds()) for n in range(timeslot_count): timeslot = Timeslot(n) timeslot.active = True db.session.add(timeslot) # Commit the data to the database. db.session.commit() # Add tracks. # The track name and the email address for each CREM track. track_infos = ( ('Literature', '*****@*****.**'), ('Tech', '*****@*****.**'), ('After Dark', '*****@*****.**'), ('Action Adventure', '*****@*****.**'), ('Costuming', '*****@*****.**'), ('Comics', '*****@*****.**'), ('Gaming', '*****@*****.**'), ('DIY', '*****@*****.**'), ('Food', '*****@*****.**'), ('Science', '*****@*****.**'), ('Media', '*****@*****.**'), ('Mayhem', '*****@*****.**'), ('Anime', '*****@*****.**'), ('Penguicon', '*****@*****.**'), ('Life', '*****@*****.**'), ) # Create tracks and save database objects in dictionary for later reference. tracks = {} for track_info in track_infos: track = Track(track_info[0], track_info[1]) tracks[track_info[0]] = track db.session.add(track) # Commit the data to the database. db.session.commit() # Add room groups. room_group_names = ( 'Algonquin', 'Charlevoix', 'Lobby', ) for room_group_name in room_group_names: room_group = RoomGroup(room_group_name) db.session.add(room_group) # Commit the data to the database. db.session.commit() # Add rooms. # For each room, the name, square feet, capacity and room group it belongs to. room_infos = ( ('Algonquin A', 1207, 100, 'Algonquin'), ('Algonquin B', 1207, 100, 'Algonquin'), ('Algonquin C', 1207, 100, 'Algonquin'), ('Algonquin D', 1207, 100, 'Algonquin'), ('Algonquin Foyer', 3000, 450, None), ('Charlevoix A', 756, 64, 'Charlevoix'), ('Charlevoix B', 756, 64, 'Charlevoix'), ('Charlevoix C', 756, 64, 'Charlevoix'), ('Portage Auditorium', 1439, 68, None), ('Windover', 1475, 40, None), ('TC Linguinis', 1930, 40, None), ('Baldwin Board Room', 431, 12, None), ('Board of Directors', 511, 15, None), ('Board of Governors', 391, 5, None), ('Board of Regents', 439, 15, None), ('Board of Trustees', 534, 40, None), ('Hamlin', 360, 25, None), ('Montcalm', 665, 50, None), ('Nicolet', 667, 50, None), ('Game Table A', 20, 10, 'Lobby'), ('Game Table B', 20, 10, 'Lobby'), ('Game Table C', 20, 10, 'Lobby'), ('Game Table D', 20, 10, 'Lobby'), ) # Create rooms and save database objects in dictionary for later reference. rooms = {} for room_info in room_infos: room = Room() room.room_name = room_info[0] room.room_sq_ft = room_info[1] room.room_capacity = room_info[2] if room_info[3]: room.room_group = db.session.query(RoomGroup).\ filter(RoomGroup.room_group_name == room_info[3]).first() rooms[room.room_name] = room db.session.add(room) # Commit the data to the database. db.session.commit() # Keep track of presenters. presenters = {} # Read events from file. with open(sched_info_fname, 'rb') as csvfile: csvreader = csv.reader(csvfile, delimiter=',', quotechar='"') first_row = True for row in csvreader: if first_row: first_row = False continue if len(row) < 11: load_error = DataLoadError() load_error.error_level = 'Error' load_error.destination_table = 'event' load_error.line_num = csvreader.line_num load_error.error_msg = 'Not enough columns in row %d' % csvreader.line_num load_error.error_dt = datetime.datetime.now() db.session.add(load_error) num_errors += 1 continue trackname = row[5].split(',')[0].strip() if trackname not in tracks: # There is no corresponding track, so add it. email = '-'.join( trackname.lower().split()) + '*****@*****.**' track = Track(trackname, email) tracks[trackname] = track db.session.add(track) load_error = DataLoadError() load_error.error_level = 'Warning' load_error.destination_table = 'event' load_error.line_num = csvreader.line_num load_error.error_msg = '%s is not a defined track; adding it' % trackname load_error.error_dt = datetime.datetime.now() db.session.add(load_error) num_errors += 1 continue event = Event() event.title = row[6] event.description = row[8] event.track = tracks[trackname] # Add timeslots and duration. try: timeslots = get_timeslots(row[0], row[1], row[9], convention, Timeslot) event.timeslots = timeslots event.duration = len(timeslots) except Exception as e: load_error = DataLoadError() load_error.error_level = 'Error' load_error.destination_table = 'event' load_error.line_num = csvreader.line_num load_error.error_msg = str(e) load_error.error_dt = datetime.datetime.now() db.session.add(load_error) num_errors += 1 continue event.facilityRequest = row[10] event.convention = convention # Add room to the event. if row[4].strip(): if row[4] not in rooms: # This is not a predefined room, so add it. load_error = DataLoadError() load_error.error_level = 'Warning' load_error.destination_table = 'event' load_error.line_num = csvreader.line_num load_error.error_msg = '%s is not a pre-defined room; adding this room' % row[ 4] load_error.error_dt = datetime.datetime.now() num_warnings += 1 db.session.add(load_error) room = Room() room.room_name = row[4] room.room_sq_ft = 0 room.room_capacity = 0 rooms[row[4]] = room db.session.add(room) else: room = rooms[row[4]] event.rooms.append(room) # Add presenters. if row[7].strip(): presenter_names = row[7].split(',') presenter_names = [s.strip() for s in presenter_names] for presenter_name in presenter_names: if presenter_name in presenters: presenter = presenters[presenter_name] else: last_name = presenter_name.split(' ')[-1].strip() first_name = ' '.join( presenter_name.split(' ')[0:-1]).strip() presenter = Presenter(first_name, last_name) presenters[presenter_name] = presenter db.session.add(presenter) event.presenters.append(presenter) db.session.add(event) # Commit the data to the database. db.session.commit() # Return the number of errors and warnings. return num_errors, num_warnings
('Sarah', 'Elkins', '*****@*****.**', '301-613-4393'), ('Jen', 'Talley', '*****@*****.**', '7347319771'), ('cassinator', '', '*****@*****.**', ''), ('mwlauthor', '', '', ''), ('Leah', 'Rapp', '*****@*****.**', ''), ('Bob', 'Trembley', '*****@*****.**', ''), ('Kent', 'Newland', '*****@*****.**', ''), ('Angela', 'Rush', '*****@*****.**', '248-505-1551'), ('Brittany', 'Burke', '*****@*****.**', '248-259-5122'), ('Stu', 'Chisholm', '*****@*****.**', '(586) 773-6182'), ('Joshua', 'DeBonis', '*****@*****.**', '203.470.7264'), ('Nikita', 'Mikros', '*****@*****.**', ''), ) for presenter_info in presenter_infos: presenter = Presenter(presenter_info[0], presenter_info[1]) presenter.email = presenter_info[2] presenter.phone = presenter_info[3] db.session.add(presenter) # Commit the test data to the database. db.session.commit() # Assign a random set of presenters to each event. events = Event.query.all() presenters = Presenter.query.all() for event in events: # Randomly select from 0 to 4 presenters for this event. random_presenters = random.sample(presenters, random.randrange(5)) event.presenters = random_presenters