def _get_required_field(req, superkey, key): try: field = req.pop(key) except KeyError: raise openreview.OpenReviewException( f"Bad request: required field missing in {superkey}: {key}") return field
def search_profiles(confirmedEmails=None, ids=None, term=None): if version == 1: with open('tests/data/expertiseServiceData.json') as json_file: data = json.load(json_file) elif version == 2: with open('tests/data/api2Data.json') as json_file: data = json.load(json_file) else: raise openreview.OpenReviewException( 'Version number not supported') profiles = data['profiles'] profiles_dict_emails = {} profiles_dict_tilde = {} for profile in profiles: profile = openreview.Profile.from_json(profile) if profile.content.get('emails') and len( profile.content.get('emails')): profiles_dict_emails[profile.content['emails'][0]] = profile profiles_dict_tilde[profile.id] = profile if confirmedEmails: return_value = {} for email in confirmedEmails: if profiles_dict_emails.get(email, False): return_value[email] = profiles_dict_emails[email] if ids: return_value = [] for tilde_id in ids: return_value.append(profiles_dict_tilde[tilde_id]) return return_value
def __init__(self, request): self.entityA = {} self.entityB = {} self.model = {} root_key = 'request' def _get_field_from_request(field): return _get_required_field(request, root_key, field) def _load_entity_a(entity): self._load_entity('entityA', entity, self.entityA) def _load_entity_b(entity): self._load_entity('entityB', entity, self.entityB) # Get the name of the job self.name = _get_field_from_request('name') # Validate entityA and entityB entity_a = _get_field_from_request('entityA') entity_b = _get_field_from_request('entityB') _load_entity_a(entity_a) _load_entity_b(entity_b) # Optionally check for model object self.model = request.pop('model', {}) # Check for empty request if len(request.keys()) > 0: raise openreview.OpenReviewException( f"Bad request: unexpected fields in {root_key}: {list(request.keys())}" )
def _load_entity(self, entity_id, source_entity, target_entity): '''Load information from an entity into the config''' def _get_from_entity(key): return _get_required_field(source_entity, entity_id, key) type = _get_from_entity('type') target_entity['type'] = type # Handle type group if type == 'Group': if 'memberOf' in source_entity.keys(): target_entity['memberOf'] = _get_from_entity('memberOf') # Check for optional expertise field if 'expertise' in source_entity.keys(): target_entity['expertise'] = source_entity.pop('expertise') else: raise openreview.OpenReviewException( f"Bad request: no valid {type} properties in {entity_id}") # Handle type note elif type == 'Note': if 'invitation' in source_entity.keys( ) and 'id' in source_entity.keys(): raise openreview.OpenReviewException( f"Bad request: only provide a single id or single invitation in {entity_id}" ) if 'invitation' in source_entity.keys(): target_entity['invitation'] = _get_from_entity('invitation') elif 'id' in source_entity.keys(): target_entity['id'] = _get_from_entity('id') else: raise openreview.OpenReviewException( f"Bad request: no valid {type} properties in {entity_id}") else: raise openreview.OpenReviewException( f"Bad request: invalid type in {entity_id}") # Check for extra entity fields if len(source_entity.keys()) > 0: raise openreview.OpenReviewException( f"Bad request: unexpected fields in {entity_id}: {list(source_entity.keys())}" )
def delete_job(): """ Retrieves the config of a job to be deleted, and removes the job by deleting the job directory. :param token: Authorization from a logged in user, which defines the set of accessible data :type token: str :param job_id: The ID of a submitted job :type job_id: str """ openreview_client = get_client() user_id = get_user_id(openreview_client) if not user_id: flask.current_app.logger.error('No Authorization token in headers') return flask.jsonify( format_error(403, 'Forbidden: No Authorization token in headers')), 403 try: # Parse query parameters job_id = flask.request.args.get('job_id', None) if job_id is None or len(job_id) == 0: raise openreview.OpenReviewException( 'Bad request: job_id is required') result = ExpertiseService(openreview_client, flask.current_app.config, flask.current_app.logger).del_expertise_job( user_id, job_id) flask.current_app.logger.debug('GET returns ' + str(result)) return flask.jsonify(result), 200 except openreview.OpenReviewException as error_handle: flask.current_app.logger.error(str(error_handle)) error_type = str(error_handle) status = 500 if 'not found' in error_type.lower(): status = 404 elif 'forbidden' in error_type.lower(): status = 403 elif 'bad request' in error_type.lower(): status = 400 return flask.jsonify(format_error(status, error_type)), status # pylint:disable=broad-except except Exception as error_handle: flask.current_app.logger.error(str(error_handle)) return flask.jsonify( format_error( 500, 'Internal server error: {}'.format(error_handle))), 500
def get_group(group_id): if version == 1: with open('tests/data/expertiseServiceData.json') as json_file: data = json.load(json_file) elif version == 2: with open('tests/data/api2Data.json') as json_file: data = json.load(json_file) else: raise openreview.OpenReviewException( 'Version number not supported') group = openreview.Group.from_json(data['groups'][group_id]) return group
def create_profile(client, email, first, last, middle=None, allow_duplicates=False): profile = get_profile(client, email) if not profile: response = client.get_tildeusername(first, last, middle) tilde_id = response['username'].encode('utf-8') if tilde_id.endswith(last + '1') or allow_duplicates: tilde_group = openreview.Group(id=tilde_id, signatures=[super_user_id], signatories=[tilde_id], readers=[tilde_id], writers=[super_user_id], members=[email]) email_group = openreview.Group(id=email, signatures=[super_user_id], signatories=[email], readers=[email], writers=[super_user_id], members=[tilde_id]) profile_content = { 'emails': [email], 'preferred_email': email, 'names': [{ 'first': first, 'middle': middle, 'last': last, 'username': tilde_id }] } client.post_group(tilde_group) client.post_group(email_group) profile = client.post_profile(tilde_id, profile_content) return profile else: raise openreview.OpenReviewException( 'There is already a profile with this first: {0}, middle: {1}, last name: {2}' .format(first, middle, last)) else: return profile
def get_note(id): if version == 1: with open('tests/data/fakeData.json') as json_file: data = json.load(json_file) elif version == 2: with open('tests/data/api2Data.json') as json_file: data = json.load(json_file) else: raise openreview.OpenReviewException( 'Version number not supported') for invitation in data['notes'].keys(): for note in data['notes'][invitation]: if note['id'] == id: return openreview.Note.from_json(note) raise openreview.OpenReviewException({ 'name': 'NotFoundError', 'message': f"The Note {id} was not found", 'status': 404, 'details': { 'path': 'id', 'value': id } })
def get_notes(id=None, paperhash=None, forum=None, original=None, invitation=None, replyto=None, tauthor=None, signature=None, writer=None, trash=None, number=None, content=None, limit=None, offset=None, mintcdate=None, details=None, sort=None): if offset != 0: return [] if version == 1: with open('tests/data/expertiseServiceData.json') as json_file: data = json.load(json_file) elif version == 2: with open('tests/data/api2Data.json') as json_file: data = json.load(json_file) else: raise openreview.OpenReviewException( 'Version number not supported') if invitation: notes = data['notes'][invitation] return [openreview.Note.from_json(note) for note in notes] if 'authorids' in content: authorid = content['authorids'] profiles = data['profiles'] for profile in profiles: if authorid == profile['id']: return [ openreview.Note.from_json(note) for note in profile['publications'] ] return []
def deploy(self, assingment_title): ''' WARNING: This function untested ''' # pylint: disable=too-many-locals # Get the configuration note to check the group to assign client = self.conference.client notes = client.get_notes(invitation=self.match_group.id + '/-/Assignment_Configuration', content={'title': assingment_title}) if notes: configuration_note = notes[0] match_group = configuration_note.content['match_group'] is_area_chair = self.conference.get_area_chairs_id() == match_group submissions = openreview.tools.iterget_notes( client, invitation=self.conference.get_blind_submission_id()) assignment_edges = openreview.tools.iterget_edges( client, invitation=self.conference.get_paper_assignment_id( self.match_group.id), label=assingment_title) paper_by_forum = {n.forum: n for n in submissions} for edge in assignment_edges: paper_number = paper_by_forum.get(edge.head).number user = edge.tail new_assigned_group = self.conference.set_assignment( user, paper_number, is_area_chair) print(new_assigned_group) else: raise openreview.OpenReviewException( 'Configuration not found for ' + assingment_title)
def run(self): try: self.config_note_interface.set_status(MatcherStatus.DEPLOYING) notes = self.config_note_interface.client.get_notes(invitation='OpenReview.net/Support/-/Request_Form', content={'venue_id':self.config_note_interface.venue_id}) if not notes: raise openreview.OpenReviewException('Venue request not found') conference = openreview.helpers.get_conference(self.config_note_interface.client, notes[0].id) ## impersonate user to get all the permissions to deploy the groups conference.client.impersonate(self.config_note_interface.venue_id) conference.set_assignments(assignment_title=self.config_note_interface.label, committee_id=self.config_note_interface.match_group, overwrite=True, enable_reviewer_reassignment=True) self.config_note_interface.set_status(MatcherStatus.DEPLOYED) except Exception as e: self.logger.error(str(e)) self.config_note_interface.set_status(MatcherStatus.DEPLOYMENT_ERROR, str(e))
def build_conflicts(match_group, submissions): edges = [] invitation = _create_edge_invitation( conference.get_invitation_id('Conflict', prefix=match_group.id), match_group) user_profiles = openreview.matching._get_profiles(client, match_group.members) user_profiles_info = [ get_profile_info(profile) for profile in user_profiles ] for submission in submissions: author_domains = set() author_emails = set() author_relations = set() author_publications = set() author_profiles = openreview.matching._get_profiles( client, submission.details['original']['content']['authorids']) for author in author_profiles: author_info = get_profile_info(author) author_domains.update(author_info['domains']) author_emails.update(author_info['emails']) author_relations.update(author_info['relations']) author_publications.update(author_info['publications']) for user_info in user_profiles_info: conflicts = set() conflicts.update(author_domains.intersection(user_info['domains'])) conflicts.update(author_relations.intersection( user_info['emails'])) conflicts.update(author_emails.intersection( user_info['relations'])) conflicts.update(author_emails.intersection(user_info['emails'])) conflicts.update( author_publications.intersection(user_info['publications'])) if conflicts: edges.append( openreview.Edge( invitation=invitation.id, head=submission.id, tail=user_info['id'], weight=-1, label='Conflict', readers=_get_edge_readers(tail=user_info['id']), writers=[conference.id], signatures=[conference.id])) openreview.tools.post_bulk_edges(client, edges=edges) # Perform sanity check edges_posted = client.get_edges_count(invitation=invitation.id) if edges_posted < len(edges): raise openreview.OpenReviewException( 'Failed during bulk post of Conflict edges! Scores found: {0}, Edges posted: {1}' .format(len(edges), edges_posted)) return edges
def results(): """ Get the results of a single submitted job with the associated job_id If provided with a delete_on_get field, delete the job from the server after retrieving results :param token: Authorization from a logged in user, which defines the set of accessible data :type token: str :param job_id: The ID of a submitted job :type job_id: str :param delete_on_get: Decide whether to keep the data on the server after getting the results :type delete_on_get: bool """ openreview_client = get_client() user_id = get_user_id(openreview_client) if not user_id: flask.current_app.logger.error('No Authorization token in headers') return flask.jsonify( format_error(403, 'Forbidden: No Authorization token in headers')), 403 try: # Parse query parameters job_id = flask.request.args.get('job_id', None) if job_id is None or len(job_id) == 0: raise openreview.OpenReviewException( 'Bad request: job_id is required') delete_on_get = flask.request.args.get('deleteOnGet', 'False').lower() == 'true' result = ExpertiseService( openreview_client, flask.current_app.config, flask.current_app.logger).get_expertise_results( user_id, job_id, delete_on_get) flask.current_app.logger.debug('GET returns code 200') return flask.jsonify(result), 200 except openreview.OpenReviewException as error_handle: flask.current_app.logger.error(str(error_handle)) error_type = str(error_handle) status = 500 if 'not found' in error_type.lower(): status = 404 elif 'forbidden' in error_type.lower(): status = 403 elif 'bad request' in error_type.lower(): status = 400 return flask.jsonify(format_error(status, error_type)), status # pylint:disable=broad-except except Exception as error_handle: flask.current_app.logger.error(str(error_handle)) return flask.jsonify( format_error( 500, 'Internal server error: {}'.format(error_handle))), 500
def from_request(api_request: APIRequest, starting_config={}, openreview_client=None, server_config={}, working_dir=None): """ Sets default fields from the starting_config and attempts to override from api_request fields """ def _camel_to_snake(camel_str): camel_str = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', camel_str) return re.sub('([a-z0-9])([A-Z])', r'\1_\2', camel_str).lower() descriptions = JobDescription.VALS.value config = JobConfig() # Set metadata fields from request config.name = api_request.name config.user_id = get_user_id(openreview_client) config.job_id = shortuuid.ShortUUID().random(length=5) config.baseurl = server_config['OPENREVIEW_BASEURL'] config.baseurl_v2 = server_config['OPENREVIEW_BASEURL_V2'] root_dir = os.path.join(working_dir, config.job_id) config.dataset = starting_config.get('dataset', {}) config.dataset['directory'] = root_dir config.job_dir = root_dir config.cdate = int(time.time() * 1000) config.mdate = config.cdate config.status = JobStatus.INITIALIZED.value config.description = descriptions[JobStatus.INITIALIZED] # Handle Group cases config.match_group = starting_config.get('match_group', None) config.alternate_match_group = starting_config.get( 'alternate_match_group', None) if api_request.entityA['type'] == 'Group': config.match_group = [api_request.entityA['memberOf']] if api_request.entityB['type'] == 'Group': config.alternate_match_group = [api_request.entityB['memberOf']] # Handle Note cases config.paper_invitation = None config.paper_id = None config.exclusion_inv = None if api_request.entityA['type'] == 'Note': inv, id = api_request.entityA.get('invitation', None), api_request.entityA.get( 'id', None) excl_inv = api_request.entityA.get('expertise', None) if inv: config.paper_invitation = inv if id: config.paper_id = id if excl_inv: config.exclusion_inv = excl_inv.get('exclusion', {}).get( 'invitation', None) elif api_request.entityB['type'] == 'Note': inv, id = api_request.entityB.get('invitation', None), api_request.entityB.get( 'id', None) excl_inv = api_request.entityB.get('expertise', None) if inv: config.paper_invitation = inv if id: config.paper_id = id if excl_inv: config.exclusion_inv = excl_inv.get('exclusion', {}).get( 'invitation', None) # Validate that other paper fields are none if an alternate match group is present if config.alternate_match_group is not None and ( config.paper_id is not None or config.paper_invitation is not None): raise openreview.OpenReviewException( 'Bad request: Cannot provide paper id/invitation and alternate match group' ) # Load optional model params from default config path_fields = [ 'work_dir', 'scores_path', 'publications_path', 'submissions_path' ] allowed_model_params = [ 'name', 'sparseValue', 'useTitle', 'useAbstract', 'scoreComputation', 'skipSpecter' ] config.model = starting_config.get('model', None) model_params = starting_config.get('model_params', {}) config.model_params = {} config.model_params['use_title'] = model_params.get('use_title', None) config.model_params['use_abstract'] = model_params.get( 'use_abstract', None) config.model_params['average_score'] = model_params.get( 'average_score', None) config.model_params['max_score'] = model_params.get('max_score', None) config.model_params['skip_specter'] = model_params.get( 'skip_specter', None) config.model_params['batch_size'] = model_params.get('batch_size', 1) config.model_params['use_cuda'] = model_params.get('use_cuda', False) # Attempt to load any API request model params api_model = api_request.model if api_model: for param in api_model.keys(): # Handle special cases if param == 'scoreComputation': compute_with = api_model.get('scoreComputation', None) if compute_with == 'max': config.model_params['max_score'] = True config.model_params['average_score'] = False elif compute_with == 'avg': config.model_params['max_score'] = False config.model_params['average_score'] = True else: raise openreview.OpenReviewException( "Bad request: invalid value in field 'scoreComputation' in 'model' object" ) continue # Handle general case if param not in allowed_model_params: raise openreview.OpenReviewException( f"Bad request: unexpected fields in model: {[param]}") snake_param = _camel_to_snake(param) config.model_params[snake_param] = api_model[param] # Set server-side path fields for field in path_fields: config.model_params[field] = root_dir if 'specter' in config.model: config.model_params['specter_dir'] = server_config['SPECTER_DIR'] if 'mfr' in config.model: config.model_params['mfr_feature_vocab_file'] = server_config[ 'MFR_VOCAB_DIR'] config.model_params['mfr_checkpoint_dir'] = server_config[ 'MFR_CHECKPOINT_DIR'] return config
def get_conference_builder(client, request_form_id, support_user='******'): note = client.get_note(request_form_id) if not note.invitation.lower( ) == 'OpenReview.net/Support/-/Request_Form'.lower(): raise openreview.OpenReviewException('Invalid request form invitation') if not note.content.get('venue_id') and not note.content.get( 'conference_id'): raise openreview.OpenReviewException('venue_id is not set') builder = openreview.conference.ConferenceBuilder(client, support_user) builder.set_request_form_id(request_form_id) conference_start_date_str = 'TBD' conference_start_date = None start_date = note.content.get( 'Venue Start Date', note.content.get('Conference Start Date', '')).strip() if start_date: try: conference_start_date = datetime.datetime.strptime( start_date, '%Y/%m/%d %H:%M') except ValueError: conference_start_date = datetime.datetime.strptime( start_date, '%Y/%m/%d') conference_start_date_str = conference_start_date.strftime('%b %d %Y') submission_start_date_str = '' submission_start_date = note.content.get('Submission Start Date', '').strip() if submission_start_date: try: submission_start_date = datetime.datetime.strptime( submission_start_date, '%Y/%m/%d %H:%M') except ValueError: submission_start_date = datetime.datetime.strptime( submission_start_date, '%Y/%m/%d') submission_start_date_str = submission_start_date.strftime( '%b %d %Y %I:%M%p') else: submission_start_date = None submission_due_date_str = 'TBD' abstract_due_date_str = '' submission_second_due_date = note.content.get('Submission Deadline', '').strip() if submission_second_due_date: try: submission_second_due_date = datetime.datetime.strptime( submission_second_due_date, '%Y/%m/%d %H:%M') except ValueError: submission_second_due_date = datetime.datetime.strptime( submission_second_due_date, '%Y/%m/%d') submission_due_date = note.content.get( 'abstract_registration_deadline', '').strip() if submission_due_date: try: submission_due_date = datetime.datetime.strptime( submission_due_date, '%Y/%m/%d %H:%M') except ValueError: submission_due_date = datetime.datetime.strptime( submission_due_date, '%Y/%m/%d') abstract_due_date_str = submission_due_date.strftime( '%b %d %Y %I:%M%p') submission_due_date_str = submission_second_due_date.strftime( '%b %d %Y %I:%M%p') else: submission_due_date = submission_second_due_date submission_due_date_str = submission_due_date.strftime( '%b %d %Y %I:%M%p') submission_second_due_date = None else: submission_second_due_date = submission_due_date = None builder.set_conference_id( note.content.get('venue_id') if note.content. get('venue_id', None) else note.content.get('conference_id')) builder.set_conference_name( note.content.get('Official Venue Name', note.content.get('Official Conference Name'))) builder.set_conference_short_name( note.content.get('Abbreviated Venue Name', note.content.get('Abbreviated Conference Name'))) if conference_start_date: builder.set_conference_year(conference_start_date.year) homepage_header = { 'title': note.content['title'], 'subtitle': note.content.get('Abbreviated Venue Name', note.content.get('Abbreviated Conference Name')), 'deadline': 'Submission Start: ' + submission_start_date_str + ' UTC-0, End: ' + submission_due_date_str + ' UTC-0', 'date': conference_start_date_str, 'website': note.content['Official Website URL'], 'location': note.content.get('Location'), 'contact': note.content.get('contact_email') } if abstract_due_date_str: homepage_header[ 'deadline'] = 'Submission Start: ' + submission_start_date_str + ' UTC-0, Abstract Registration: ' + abstract_due_date_str + ' UTC-0, End: ' + submission_due_date_str + ' UTC-0' override_header = note.content.get('homepage_override', '') if override_header: for key in override_header.keys(): homepage_header[key] = override_header[key] builder.set_homepage_header(homepage_header) if note.content.get('Area Chairs (Metareviewers)', '') in [ 'Yes, our venue has Area Chairs', 'Yes, our conference has Area Chairs' ]: builder.has_area_chairs(True) if note.content.get( 'senior_area_chairs') == 'Yes, our venue has Senior Area Chairs': builder.has_senior_area_chairs(True) double_blind = (note.content.get('Author and Reviewer Anonymity', '') == 'Double-blind') public = (note.content.get('Open Reviewing Policy', '') in [ 'Submissions and reviews should both be public.', 'Submissions should be public, but reviews should be private.' ]) submission_additional_options = note.content.get( 'Additional Submission Options', {}) if isinstance(submission_additional_options, str): submission_additional_options = json.loads( submission_additional_options.strip()) submission_remove_options = note.content.get('remove_submission_options', []) withdrawn_submission_public = 'Yes' in note.content.get( 'withdrawn_submissions_visibility', '') email_pcs_on_withdraw = 'Yes' in note.content.get( 'email_pcs_for_withdrawn_submissions', '') desk_rejected_submission_public = 'Yes' in note.content.get( 'desk_rejected_submissions_visibility', '') # Authors can not be anonymized only if venue is double-blind withdrawn_submission_reveal_authors = 'Yes' in note.content.get( 'withdrawn_submissions_author_anonymity', '') desk_rejected_submission_reveal_authors = 'Yes' in note.content.get( 'desk_rejected_submissions_author_anonymity', '') # Create review invitation during submission process function only when the venue is public, single blind and the review stage is setup. submission_release = (note.content.get( 'submissions_visibility', '') == 'Yes, submissions should be immediately revealed to the public.' ) create_groups = (not double_blind) and public and submission_release create_review_invitation = create_groups and ( note.content.get('Open Reviewing Policy', '') == 'Submissions and reviews should both be public.' ) and note.content.get('make_reviews_public', None) author_names_revealed = 'Reveal author identities of all submissions to the public' in note.content.get( 'reveal_authors', '' ) or 'Reveal author identities of only accepted submissions to the public' in note.content.get( 'reveal_authors', '') papers_released = 'Release all submissions to the public' in note.content.get( 'release_submissions', '' ) or 'Release only accepted submission to the public' in note.content.get( 'release_submissions', '') builder.set_submission_stage( double_blind=double_blind, public=public, start_date=submission_start_date, due_date=submission_due_date, second_due_date=submission_second_due_date, additional_fields=submission_additional_options, remove_fields=submission_remove_options, email_pcs=False, ## Need to add this setting to the form create_groups=create_groups, create_review_invitation=create_review_invitation, withdrawn_submission_public=withdrawn_submission_public, withdrawn_submission_reveal_authors=withdrawn_submission_reveal_authors, email_pcs_on_withdraw=email_pcs_on_withdraw, desk_rejected_submission_public=desk_rejected_submission_public, desk_rejected_submission_reveal_authors= desk_rejected_submission_reveal_authors, author_names_revealed=author_names_revealed, papers_released=papers_released) paper_matching_options = note.content.get('Paper Matching', []) if 'OpenReview Affinity' in paper_matching_options: builder.set_expertise_selection_stage(due_date=submission_due_date) if not paper_matching_options or 'Organizers will assign papers manually' in paper_matching_options: builder.enable_reviewer_reassignment(enable=True) ## Contact Emails is deprecated program_chair_ids = note.content.get( 'Contact Emails', []) + note.content.get('program_chair_emails', []) builder.set_conference_program_chairs_ids(program_chair_ids) builder.use_legacy_anonids(note.content.get('reviewer_identity') is None) readers_map = { 'Program Chairs': openreview.Conference.IdentityReaders.PROGRAM_CHAIRS, 'All Senior Area Chairs': openreview.Conference.IdentityReaders.SENIOR_AREA_CHAIRS, 'Assigned Senior Area Chair': openreview.Conference.IdentityReaders.SENIOR_AREA_CHAIRS_ASSIGNED, 'All Area Chairs': openreview.Conference.IdentityReaders.AREA_CHAIRS, 'Assigned Area Chair': openreview.Conference.IdentityReaders.AREA_CHAIRS_ASSIGNED, 'All Reviewers': openreview.Conference.IdentityReaders.REVIEWERS, 'Assigned Reviewers': openreview.Conference.IdentityReaders.REVIEWERS_ASSIGNED } builder.set_reviewer_identity_readers( [readers_map[r] for r in note.content.get('reviewer_identity', [])]) builder.set_area_chair_identity_readers( [readers_map[r] for r in note.content.get('area_chair_identity', [])]) builder.set_senior_area_chair_identity_readers([ readers_map[r] for r in note.content.get('senior_area_chair_identity', []) ]) return builder
def get_submissions(self): invitation_ids = self.convert_to_list( self.config.get('paper_invitation', [])) paper_id = self.config.get('paper_id') submission_groups = self.convert_to_list( self.config.get('alternate_match_group', [])) submissions = [] # Fetch papers from alternate match group # If no alternate match group provided, aggregate papers from all other sources if submission_groups: aggregate_papers = self.get_papers_from_group(submission_groups) submissions.extend(aggregate_papers) else: for invitation_id in invitation_ids: # Assume invitation is valid for both APIs, but only 1 # will have the associated notes submissions_v1 = list( openreview.tools.iterget_notes(self.openreview_client, invitation=invitation_id)) submissions.extend(submissions_v1) submissions.extend( list( openreview.tools.iterget_notes( self.openreview_client_v2, invitation=invitation_id))) if paper_id: # If note not found, keep executing and raise an overall exception later # Otherwise if the exception is anything else, raise it again note_v1, note_v2 = None, None try: note_v1 = self.openreview_client.get_note(paper_id) submissions.append(note_v1) except openreview.OpenReviewException as e: err_name = e.args[0].get('name').lower() if err_name != 'notfounderror': raise e try: note_v2 = self.openreview_client_v2.get_note(paper_id) submissions.append(note_v2) except openreview.OpenReviewException as e: err_name = e.args[0].get('name').lower() if err_name != 'notfounderror': raise e if not note_v1 and not note_v2: raise openreview.OpenReviewException( f"Note {paper_id} not found") print('finding records of {} submissions'.format(len(submissions))) reduced_submissions = {} for paper in tqdm(submissions, total=len(submissions)): paper_id = paper.id # Get title + abstract depending on API version paper_title = paper.content.get('title') if isinstance(paper_title, dict): paper_title = paper_title.get('value') paper_abstr = paper.content.get('abstract') if isinstance(paper_abstr, dict): paper_abstr = paper_abstr.get('value') reduced_submissions[paper_id] = { 'id': paper_id, 'content': { 'title': paper_title, 'abstract': paper_abstr } } csv_submissions = self.config.get('csv_submissions') if csv_submissions: print('adding records from csv file ') with open(self.root.joinpath(csv_submissions)) as csv_file: csv_reader = csv.reader(csv_file, delimiter=',') for submission in tqdm(csv_reader): paper_id = submission[0] title = submission[1] abstract = submission[2] reduced_submissions[paper_id] = { 'id': paper_id, 'content': { 'title': title, 'abstract': abstract } } return reduced_submissions
def get_conference(client, request_form_id): note = client.get_note(request_form_id) if note.invitation not in 'OpenReview.net/Support/-/Request_Form': raise openreview.OpenReviewException('Invalid request form invitation') if not note.content.get('venue_id') and not note.content.get( 'conference_id'): raise openreview.OpenReviewException('venue_id is not set') builder = openreview.conference.ConferenceBuilder(client) builder.set_request_form_id(request_form_id) conference_start_date_str = 'TBD' start_date = note.content.get( 'Venue Start Date', note.content.get('Conference Start Date', '')).strip() if start_date: try: conference_start_date = datetime.datetime.strptime( start_date, '%Y/%m/%d %H:%M') except ValueError: conference_start_date = datetime.datetime.strptime( start_date, '%Y/%m/%d') conference_start_date_str = conference_start_date.strftime('%b %d %Y') submission_start_date_str = '' submission_start_date = note.content.get('Submission Start Date', '').strip() if submission_start_date: try: submission_start_date = datetime.datetime.strptime( submission_start_date, '%Y/%m/%d %H:%M') except ValueError: submission_start_date = datetime.datetime.strptime( submission_start_date, '%Y/%m/%d') submission_start_date_str = submission_start_date.strftime( '%b %d %Y %I:%M%p') else: submission_start_date = None submission_due_date_str = 'TBD' submission_due_date = note.content.get('Submission Deadline', '').strip() if submission_due_date: try: submission_due_date = datetime.datetime.strptime( submission_due_date, '%Y/%m/%d %H:%M') except ValueError: submission_due_date = datetime.datetime.strptime( submission_due_date, '%Y/%m/%d') submission_due_date_str = submission_due_date.strftime( '%b %d %Y %I:%M%p') else: submission_due_date = None builder.set_conference_id( note.content.get('venue_id') if note.content. get('venue_id', None) else note.content.get('conference_id')) builder.set_conference_name( note.content.get('Official Venue Name', note.content.get('Official Conference Name'))) builder.set_conference_short_name( note.content.get('Abbreviated Venue Name', note.content.get('Abbreviated Conference Name'))) builder.set_conference_year(conference_start_date.year) builder.set_homepage_header({ 'title': note.content['title'], 'subtitle': note.content.get('Abbreviated Venue Name', note.content.get('Abbreviated Conference Name')), 'deadline': 'Submission Start: ' + submission_start_date_str + ' GMT, End: ' + submission_due_date_str + ' GMT', 'date': conference_start_date_str, 'website': note.content['Official Website URL'], 'location': note.content.get('Location') }) if note.content.get('Area Chairs (Metareviewers)', '') in [ 'Yes, our venue has Area Chairs', 'Yes, our conference has Area Chairs' ]: builder.has_area_chairs(True) double_blind = (note.content.get('Author and Reviewer Anonymity', None) == 'Double-blind') public = (note.content.get('Open Reviewing Policy', '') in [ 'Submissions and reviews should both be public.', 'Submissions should be public, but reviews should be private.' ]) builder.set_override_homepage(True) submission_additional_options = note.content.get( 'Additional Submission Options', {}) if isinstance(submission_additional_options, str): submission_additional_options = json.loads( submission_additional_options.strip()) builder.set_submission_stage( double_blind=double_blind, public=public, start_date=submission_start_date, due_date=submission_due_date, additional_fields=submission_additional_options, allow_withdraw=True, reveal_authors_on_withdraw=True, allow_desk_reject=True, reveal_authors_on_desk_reject=True) paper_matching_options = note.content.get('Paper Matching', []) if 'OpenReview Affinity' in paper_matching_options: builder.set_expertise_selection_stage(due_date=submission_due_date) conference = builder.get_result() conference.set_program_chairs(emails=note.content['Contact Emails']) return conference
def get_expertise_results(self, user_id, job_id, delete_on_get=False): """ Gets the scores of a given job If delete_on_get is set, delete the directory after the scores are fetched :param user_id: The ID of the user accessing the data :type user_id: str :param job_id: ID of the specific job to fetch :type job_id: str :param delete_on_get: A flag indicating whether or not to clean up the directory after it is fetched :type delete_on_get: bool :returns: A dictionary that contains the calculated scores and metadata """ result = {'results': []} search_dir = os.path.join(self.working_dir, job_id) self.logger.info(f"Checking if {job_id} belongs to {user_id}") # Check for directory existence if not os.path.isdir(search_dir): raise openreview.OpenReviewException('Job not found') # Validate profile ID with open(os.path.join(search_dir, 'config.json'), 'r') as f: config = JobConfig.from_json(json.load(f)) if user_id != config.user_id and user_id.lower() not in SUPERUSER_IDS: raise OpenReviewException( "Forbidden: Insufficient permissions to access job") # Fetch status status = config.status description = config.description self.logger.info( f"Able to access job at {job_id} - checking if scores are found") # Assemble scores if status != JobStatus.COMPLETED: ## TODO: change it to Job not found raise openreview.OpenReviewException( f"Scores not found - status: {status} | description: {description}" ) else: # Search for scores files (only non-sparse scores) file_dir, metadata_dir = self._get_score_and_metadata_dir( search_dir) self.logger.info(f"Retrieving scores from {search_dir}") ret_list = [] # Check for output format group_group_matching = config.alternate_match_group is not None if not group_group_matching: with open(file_dir, 'r') as csv_file: data_reader = reader(csv_file) for row in data_reader: # For single paper retrieval, filter out scores against the dummy submission if row[0] == 'dummy': continue ret_list.append({ 'submission': row[0], 'user': row[1], 'score': float(row[2]) }) result['results'] = ret_list else: # If submission group, group under different keys with open(file_dir, 'r') as csv_file: data_reader = reader(csv_file) for row in data_reader: ret_list.append({ 'match_member': row[0], 'submission_member': row[1], 'score': float(row[2]) }) result['results'] = ret_list # Gather metadata with open(metadata_dir, 'r') as metadata: result['metadata'] = json.load(metadata) # Clear directory if delete_on_get: self.logger.info(f'Deleting {search_dir}') shutil.rmtree(search_dir) return result
def create_profile(client, email, first, last, middle=None, allow_duplicates=False): ''' Given email, first name, last name, and middle name (optional), creates and returns a user profile. If a profile with the same name exists, and allow_duplicates is False, an exception is raised. If a profile with the same name exists and allow_duplicates is True, a profile is created with the next largest number (e.g. if ~Michael_Spector1 exists, ~Michael_Spector2 will be created) ''' profile = get_profile(client, email) if not profile: # validate the name with just first and last names, # and also with first, middle, and last. # this is so that we catch more potential collisions; # let the caller decide what to do with false positives. username_response_FL_only = client.get_tildeusername(first, last, None) username_response_full = client.get_tildeusername(first, last, middle) # the username in each response will end with 1 # if profiles don't exist for those names username_FL_unclaimed = username_response_FL_only['username'].endswith( '1') username_full_unclaimed = username_response_full['username'].endswith( '1') if all([username_FL_unclaimed, username_full_unclaimed]): profile_exists = False else: profile_exists = True tilde_id = username_response_full['username'] if (not profile_exists) or allow_duplicates: tilde_group = openreview.Group(id=tilde_id, signatures=[super_user_id], signatories=[tilde_id], readers=[tilde_id], writers=[super_user_id], members=[email]) email_group = openreview.Group(id=email, signatures=[super_user_id], signatories=[email], readers=[email], writers=[super_user_id], members=[tilde_id]) profile_content = { 'emails': [email], 'preferredEmail': email, 'names': [{ 'first': first, 'middle': middle, 'last': last, 'username': tilde_id }] } client.post_group(tilde_group) client.post_group(email_group) profile = client.post_profile( openreview.Profile(id=tilde_id, content=profile_content)) return profile else: raise openreview.OpenReviewException( 'Failed to create new profile {tilde_id}: There is already a profile with the name: \"{first} {middle} {last}\"' .format(first=first, middle=middle, last=last, tilde_id=tilde_id)) else: raise openreview.OpenReviewException( 'There is already a profile with this email address: {}'.format( email))