def test_associate_device_with_invalid_token(candidate_first): """ Try to associate a valid device id to valid candidate but with invalid token, API should raise Unauthorized (401) error. """ data = {'one_signal_device_id': PUSH_DEVICE_ID} response = send_request('post', CandidateApiUrl.DEVICES % candidate_first['id'], 'invalid_token', data) logger.info(response.content) assert response.status_code == requests.codes.UNAUTHORIZED
def test_associate_device_to_non_existing_candidate(token_first): """ Try to associate a valid device id to a non-existing candidate. API should raise ResourceNotFound (404) error. :param token_first: authentication token :return: """ data = {'one_signal_device_id': PUSH_DEVICE_ID} candidate_id = sys.maxint response = send_request('post', CandidateApiUrl.DEVICES % candidate_id, token_first, data) logger.info(response.content) assert response.status_code == requests.codes.NOT_FOUND
def test_delete_candidate_device_with_invalid_candidate_id( token_first, candidate_device_first): """ In this test we will try to remove push device of a candidate that does not exists. API will raise 404. :param token_first: authentication token :param candidate_device_first: candidate push device """ data = {'one_signal_device_id': PUSH_DEVICE_ID} candidate_id = sys.maxint response = send_request('delete', CandidateApiUrl.DEVICES % candidate_id, token_first, data) logger.info(response.content) assert response.status_code == requests.codes.NOT_FOUND
def test_associate_device_with_invalid_one_signal_device_id( token_first, candidate_first): """ Try to associate an invalid one signal device id to a valid candidate. API should raise ResourceNotFound (404) error. :param token_first: authentication token :param candidate_first: candidate dict object """ data = {'one_signal_device_id': 'Invalid Id'} response = send_request('post', CandidateApiUrl.DEVICES % candidate_first['id'], token_first, data) logger.info(response.content) assert response.status_code == requests.codes.NOT_FOUND
def test_delete_candidate_device_in_diff_domain(token_second, candidate_first, candidate_device_first): """ In this test we will try to remove push device of a candidate from a user token that is from different domain and he is not the owner of candidate. We will get 403 response. :param token_second: authentication token :param candidate_first: candidate dict object :param candidate_device_first: candidate push device """ data = {'one_signal_device_id': PUSH_DEVICE_ID} response = send_request('delete', CandidateApiUrl.DEVICES % candidate_first['id'], token_second, data) logger.info(response.content) assert response.status_code == requests.codes.FORBIDDEN
def test_delete_candidate_device(token_first, candidate_first, candidate_device_first): """ Sending a DELETE request to /v1/candidates/:id/devices will remove candidate association with push device. Expected status is 200 :param token_first: authentication token :param candidate_first: candidate dict object :param candidate_device_first: candidate push device """ data = {'one_signal_device_id': PUSH_DEVICE_ID} response = send_request('delete', CandidateApiUrl.DEVICES % candidate_first['id'], token_first, data) logger.info(response.content) assert response.status_code == requests.codes.OK
def test_associate_device_using_diff_user_token_diff_domain( token_second, candidate_first): """ Try to associate a device to a candidate but authentication token belongs to a different user that is not owner of candidate and he is from different domain. We are expecting a Forbidden response (403). :param token_second: authentication token :param candidate_first: candidate dict object """ data = {'one_signal_device_id': PUSH_DEVICE_ID} response = send_request('post', CandidateApiUrl.DEVICES % candidate_first['id'], token_second, data) logger.info(response.content) assert response.status_code == requests.codes.FORBIDDEN
def test_delete_candidate_device_with_invalid_one_signal_id( token_first, candidate_first, candidate_device_first): """ In this test we will try to remove push device with invalid one signal device id that does not exist. We will get 404 response. :param token_first: authentication token :param candidate_first: candidate dict object :param candidate_device_first: candidate push device """ data = {'one_signal_device_id': 'Invalid Id'} response = send_request('delete', CandidateApiUrl.DEVICES % candidate_first['id'], token_first, data) logger.info(response.content) assert response.status_code == requests.codes.NOT_FOUND
def test_delete_candidate_device_in_same_domain(token_same_domain, candidate_first, candidate_device_first): """ In this test we will try to remove push device of a candidate from a user token that is in same domain but not the owner. We will get 200 response. :param token_same_domain: authentication token :param candidate_first: candidate dict object :param candidate_device_first: candidate push device """ data = {'one_signal_device_id': PUSH_DEVICE_ID} response = send_request('delete', CandidateApiUrl.DEVICES % candidate_first['id'], token_same_domain, data) logger.info(response.content) assert response.status_code == requests.codes.OK
def openweb_crawl(url): """ Sends a request to openweb to crawl social network link that does not currently exists there :param url: string :return: None """ if not isinstance(url, basestring): return False try: crawl_request = requests.get( "http://api.thesocialcv.com/v3/profile/crawl.json", params=dict(apiKey=SOCIALCV_API_KEY, profileUrl=url), timeout=10) if crawl_request.status_code == 200: logger.info("Sending crawl request to openweb for %s" % url) except Exception as e: logger.exception("Error: %s crawling link: %s", (e, url))
def upload_candidate_documents_to_us_west(candidate_ids, domain_id=None, max_number_of_candidate=10): """ Upload all the candidate documents to cloud search :param candidate_ids: id of candidates for documents to be uploaded :param domain_id: Domain Id :param max_number_of_candidate: Default value is 10 :return: """ if isinstance(candidate_ids, (int, long)): candidate_ids = [candidate_ids] for i in xrange(0, len(candidate_ids), max_number_of_candidate): try: logger.info( "Uploading {} candidate documents {}. Generating action dicts..." .format(len(candidate_ids[i:i + max_number_of_candidate]), candidate_ids[i:i + max_number_of_candidate])) with Timeout(seconds=120): # If _build_candidate_documents take more than 120 seconds Timeout will raise an exception action_dicts = _build_candidate_documents( candidate_ids[i:i + max_number_of_candidate], domain_id) adds, deletes = _send_batch_request(action_dicts) if deletes: logger.error( "Shouldn't have gotten any deletes in a batch add operation.Got %s " "deletes.candidate_ids: %s", deletes, candidate_ids[i:i + max_number_of_candidate]) if adds: logger.info( "{} Candidate documents {} have been uploaded".format( len(candidate_ids[i:i + max_number_of_candidate]), candidate_ids[i:i + max_number_of_candidate])) except TimeoutException: logger.exception( "Time Limit Exceeded for Candidate Upload for following Candidates: {}" .format(candidate_ids[i:i + max_number_of_candidate]))
def test_associate_device_with_valid_data(token_first, candidate_first): """ Try to associate a valid device id to a valid candidate. API should assign that device id to candidate in CandidateDevice table and return a success response (201). :param token_first: authentication token :param candidate_first: candidate dict object """ data = {'one_signal_device_id': PUSH_DEVICE_ID} response = send_request('post', CandidateApiUrl.DEVICES % candidate_first['id'], token_first, data) logger.info(response.content) assert response.status_code == requests.codes.CREATED response = send_request('get', CandidateApiUrl.DEVICES % candidate_first['id'], token_first) logger.info(response.content) assert response.status_code == requests.codes.OK response = response.json() assert 'devices' in response assert len(response['devices']) == 1
def test_associate_device_using_diff_user_token_same_domain( token_same_domain, candidate_first): """ Try to associate a device to a candidate but authentication token belongs to a different user that is not owner of candidate but he is from same domain as owner user. We are expecting a success response (201). :param token_same_domain: authentication token :param candidate_first: candidate dict object """ data = {'one_signal_device_id': PUSH_DEVICE_ID} response = send_request('post', CandidateApiUrl.DEVICES % candidate_first['id'], token_same_domain, data) logger.info(response.content) assert response.status_code == requests.codes.CREATED response = send_request('get', CandidateApiUrl.DEVICES % candidate_first['id'], token_same_domain) logger.info(response.content) assert response.status_code == requests.codes.OK response = response.json() assert 'devices' in response assert len(response['devices']) == 1
def test_associate_device_to_two_candidate_in_same_domain( token_first, candidate_first, candidate_same_domain): """ Try to associate a valid device id to a valid candidate. API should assign that device id to candidate in CandidateDevice table and return a success response (201). :param token_first: authentication token :param candidate_first: candidate dict object """ data = {'one_signal_device_id': PUSH_DEVICE_ID} response = send_request( 'post', CandidateApiUrl.DEVICES % candidate_same_domain['id'], token_first, data) logger.info(response.content) assert response.status_code == requests.codes.CREATED response = send_request( 'post', CandidateApiUrl.DEVICES % candidate_same_domain['id'], token_first, data) logger.info(response.content) # api raises invalid usage in production if we want to associate same device id to multiple candidates # but in dev or jenkins, this restriction is not applicable. # assert response.status_code == 400 assert response.status_code == requests.codes.CREATED
def convert_dice_candidate_dict_to_gt_candidate_dict(dice_candidate_dict, authed_user): """ ONLY converts the dict object. Won't put in `id` fields or do anything to the DB. :param dice_candidate_dict: Dice/OpenWeb candidate dict :return: getTalent-style Candidate dict """ social_profile_dict = dice_candidate_dict dice_profile_dict = dice_candidate_dict talent_pool_ids = [ tid.id for tid in TalentPool.query.filter_by(domain_id=authed_user.domain_id) ] dice_profile_contact_dict = dice_profile_dict.get('contact') or {} emails = [ social_profile_dict.get('email') or dice_profile_contact_dict.get('email') ] emails = filter(None, emails) # Get candidate's name, phone number, and address formatted_name = social_profile_dict.get( 'name') or dice_profile_contact_dict.get( 'formattedName') # socialProfile name over diceProfile name phones = [] social_profile_location_dict = social_profile_dict.get('location') or {} if dice_profile_contact_dict.get('phoneNumber'): phone_number = "%s%s".format( dice_profile_contact_dict.get('areaCode', ""), dice_profile_contact_dict.get('phoneNumber')) phone_number = format_phone_number( phone_number, country_code=social_profile_location_dict.get('country', '')) if phone_number: phones.append(phone_number) latitude = social_profile_location_dict.get('latitude') longitude = social_profile_location_dict.get('longitude') dice_profile_location_dict = dice_profile_contact_dict.get( 'location') or {} country_code_or_name = social_profile_location_dict.get('country') city = social_profile_location_dict.get( 'town') or dice_profile_location_dict.get('municipality') state = social_profile_location_dict.get('text') if state: try: state = state.split(',')[1] except IndexError: state = state zip_code = dice_profile_location_dict.get('postalCode') # Get candidate's social network profiles social_networks = [] web_profiles_dict = social_profile_dict.get('webProfiles') or {} if web_profiles_dict: social_networks_names = [] for social_network_name, info_dict in web_profiles_dict.items(): social_networks_names.append(social_network_name) all_social_networks = db.session.query( SocialNetwork.id, SocialNetwork.name).filter( SocialNetwork.name.in_(social_networks_names)).all() for social_network_name, info_dict in web_profiles_dict.items(): social_network_row = filter( lambda sn_row: sn_row.name.lower() == social_network_name. lower(), all_social_networks) # If no matching social_network found, we create a new social network if len(social_network_row): social_network_id = social_network_row[0][0] else: logger.warn( "Unknown social network from import_dice_candidates: %s. info_dict=%s. ", social_network_name, info_dict) from urlparse import urlparse parsed_obj = urlparse(url=info_dict.get('url')) social_network_homepage = "%s://%s" % (parsed_obj.scheme, parsed_obj.netloc) new_social_network = SocialNetwork(name=social_network_name, url=social_network_homepage) db.session.add(new_social_network) db.session.commit() social_network_id = new_social_network.id logger.info("Auto-created social_network, id=%s, homepage=%s", social_network_id, social_network_homepage) social_networks.append({ 'profile_url': info_dict.get('url'), 'name': social_network_name }) # Get CandidateExperience objects from OpenWeb and Dice dicts work_experiences = [] social_profile_dict_experience = social_profile_dict.get( 'experience') or {} history_dicts = social_profile_dict_experience.get('history') or [] start_year = None start_month = 1 end_year = None end_month = 1 for i, history_dict in enumerate(history_dicts): # Parse startedAt and endedAt start_date_obj = parse_openweb_date(history_dict.get('startedAt')) if start_date_obj: start_year = start_date_obj.year or None start_month = start_date_obj.month or 1 end_date_obj = parse_openweb_date(history_dict.get('endedAt')) if end_date_obj: end_year = end_date_obj.year or None end_month = end_date_obj.month or 1 # Parse out candidate_experience_bullets. candidate_experience_bullets = [] if history_dict.get('description'): candidate_experience_bullets.append( dict(description=history_dict.get('description'))) work_experiences.append( dict( organization=history_dict.get('company'), position=history_dict.get('jobTitle'), start_year=start_year, end_year=end_year, start_month=start_month, end_month=end_month, is_current=i == 0, # Assume the very first element is the current one bullets=candidate_experience_bullets)) start_year, start_month, end_year, end_month = None, None, None, None employment_history_list = dice_profile_dict.get( 'employmentHistoryList') or [] for i, employment_dict in enumerate(employment_history_list): # Parse out candidate_experience_bullets. candidate_experience_bullets = [] if employment_dict.get('description'): # using TalentCore._split_description candidate_experience_bullets.append( dict(text=employment_dict['description'])) start_year = int(employment_dict.get('startYear')) or None start_month = int(employment_dict.get('startMonth', 1)) or 1 end_year = int(employment_dict.get('endYear')) or None end_month = int(employment_dict.get('endMonth', 1)) or 1 work_experiences.append( dict(organization=history_dict.get('company'), position=history_dict.get('jobTitle'), start_year=start_year, end_year=end_year, start_month=start_month, end_month=end_month, bullets=candidate_experience_bullets)) # Skills skills = [] # Array of CandidateSkill objects social_profile_dict_skills = social_profile_dict.get('skills') or {} for skill_name, months_experience in social_profile_dict_skills.iteritems( ): skills.append( dict(name=skill_name, months_used=months_experience if months_experience > 0 else None)) if dice_profile_dict and dice_profile_dict.get('skillList'): for skill_dict in dice_profile_dict['skillList']: # Try to parse out 'years'. Sometimes it can be -1 try: years = int(skill_dict['years']) if years < 0: years = None except (KeyError, ValueError): years = None # Try to convert 'lastUsed' to date object (it's a year) try: from datetime import date year = int(skill_dict['lastUsed']) last_used = date(year=year, month=1, day=1) if ( year <= (date.today().year + 1)) else None # In case year is 2050 or some shit except (KeyError, ValueError): last_used = None # Add skill skills.append( dict(name=skill_dict.get('name'), months_used=years * 12 if years else None, last_used=last_used.isoformat())) # Interests (an array of strings), a text comment candidate_text_comment = None interests_array = social_profile_dict.get('interests') if interests_array: candidate_text_comment = "Interests: %s" % (', '.join( [interest.strip() for interest in interests_array])) text_comments = [{ 'comment': candidate_text_comment, 'created_at_datetime': datetime.datetime.utcnow().isoformat(), }] # Preferred Locations preferred_locations = dice_profile_dict.get('preferredLocationList', []) preferred_locations = [ { 'address_line_1': loc.get('addrOne'), 'address_line_2': loc.get('addrTwo'), 'zip_code': loc.get("postalCode"), 'city': loc.get( 'municipality' ), # I know, municipality is different than city, but for API consistency's sake we're putting city 'region': loc.get('region'), 'country': loc.get('country') } for loc in preferred_locations ] # Work preferences dice_work_preference = dice_profile_dict.get('completedStatus', dict()).get('workDetails') work_preferences = dict() if 'true' in [str(dice_work_preference).lower()]: work_preferences["authorization"] = str( dice_profile_dict.get('desiredEmployment', dict()).get('workAuthorization')) work_preferences["employment_type"] = str( dice_profile_dict.get('desiredEmployment', dict()).get('type')) work_preferences["security_clearance"] = True if 'true' in [ str( dice_profile_dict.get( 'desiredEmployment', dict()).get('securityClearance')).lower() ] else False work_preferences["willing_to_relocate"] = True if 'true' in [ str(dice_profile_dict.get('willingToRelocate')).lower() ] else False work_preferences["travel_percentage"] = int( dice_profile_dict['willingToTravel'].split()[0] ) if dice_profile_dict.get('willingToTravel', '').split() else None work_preferences["telecommute"] = True if 'true' in [ str(dice_profile_dict.get('willingToTelecommute')).lower() ] else False work_preferences["third_party"] = True if 'true' in [ str(dice_profile_dict.get('thirdParty')).lower() ] else False # Education universities_list = social_profile_dict.get('education') or [ ] # 'education' is a list of universities dice_profile_dict_education_list = dice_profile_dict.get( 'educationList') or [] universities_list.extend([ education_dict.get('institution') for education_dict in dice_profile_dict_education_list ]) universities_list = filter( None, universities_list) # Remove empty university names educations = [{ 'school_name': university_name, 'city': None, 'state': None, 'country': None } for university_name in universities_list] image_url = social_profile_dict.get('imageUrl', '') # Addresses addresses = [{ 'address_line_1': None, 'address_line_2': None, 'city': city, 'state': state, 'zip_code': zip_code, 'country': country_code_or_name, 'is_default': True, }] gt_candidate_dict = { 'full_name': formatted_name, 'emails': [{ 'address': email } for email in emails], 'phones': [{ 'value': phone } for phone in phones], 'addresses': addresses, 'preferred_locations': preferred_locations, 'work_preference': work_preferences, 'work_experiences': work_experiences, 'educations': educations, 'social_networks': social_networks, # 'military_services': candidate_military_services, 'skills': skills, # 'text_comments': text_comments, 'openweb_id': social_profile_dict.get('id'), 'dice_profile_id': dice_profile_dict.get('id'), 'talent_pool_ids': { "add": talent_pool_ids }, 'image_url': image_url } return gt_candidate_dict
""" Run Celery Worker For Celery to run from command line, script runs as separate process with celery command Usage: open terminal cd to talent-flask-services directory Run the following command to start celery worker: celery -A candidate_service.candidate_app.celery_app worker --concurrency=4 --loglevel=info """ # Service Specific from candidate_service.common.talent_celery import CELERY_WORKER_ARGS from candidate_service.candidate_app import logger, celery_app as celery logger.info("Celery worker has been started successfully") celery.start(argv=CELERY_WORKER_ARGS + ['celery_candidate_documents_scheduler'] + ['-n', 'celery_candidate_documents_scheduler'])
def get(self, **kwargs): """ Function will return Pipelines for which given candidate is part of. :rtype: dict[list[dict]] Usage: >>> requests.get('host/v1/candidates/:candidate_id/pipelines') <Response [200]> """ # Authenticated user & candidate ID authed_user, candidate_id = request.user, kwargs['candidate_id'] # Ensure candidate exists and belongs to user's domain get_candidate_if_validated(user=authed_user, candidate_id=candidate_id) # Maximum number of Talent Pipeline objects used for searching. # This is to prevent client from waiting too long for a response max_requests = request.args.get('max_requests', 30) is_hidden = request.args.get('is_hidden', 0) if not is_number(is_hidden) or int(is_hidden) not in (0, 1): raise InvalidUsage('`is_hidden` can be either 0 or 1') # Candidate's talent pool ID candidate_talent_pool_ids = [ tp.talent_pool_id for tp in TalentPoolCandidate.query.filter_by( candidate_id=candidate_id).all() ] added_pipelines = TalentPipelineIncludedCandidates.query.filter_by( candidate_id=candidate_id).all() added_pipelines = map(lambda x: x.talent_pipeline, added_pipelines) logger.info('added_pipelines are:{}. candidate_id:{}'.format( len(added_pipelines), candidate_id)) removed_pipeline_ids = map( lambda x: x[0], TalentPipelineExcludedCandidates.query.with_entities( TalentPipelineExcludedCandidates.talent_pipeline_id).filter_by( candidate_id=candidate_id).all()) # Get User-domain's 30 most recent talent pipelines in order of added time talent_pipelines = TalentPipeline.query.join(User).filter( TalentPipeline.is_hidden == is_hidden, TalentPipeline.talent_pool_id.in_(candidate_talent_pool_ids), TalentPipeline.id.notin_(removed_pipeline_ids)).order_by( TalentPipeline.added_time.desc()).limit(max_requests).all() logger.info( 'Going for CS for {} talent_pipelines for candidate_id:{}'.format( len(talent_pipelines), candidate_id)) # Use Search API to retrieve candidate's domain-pipeline inclusion found_talent_pipelines = [] futures = [] for talent_pipeline in talent_pipelines: search_params = talent_pipeline.search_params if search_params: search_future = search_candidates_from_params( search_params=format_search_params( talent_pipeline.search_params), access_token=request.oauth_token, url_args='?id={}&talent_pool_id={}'.format( candidate_id, talent_pipeline.talent_pool_id), facets='none') search_future.talent_pipeline = talent_pipeline search_future.search_params = search_params futures.append(search_future) # Wait for all the futures to complete completed_futures = wait(futures) for completed_future in completed_futures[0]: if completed_future._result.ok: search_response = completed_future._result.json() if search_response.get('candidates'): found_talent_pipelines.append( completed_future.talent_pipeline) logger.info( "\ncandidate_id: {}\ntalent_pipeline_id: {}\nsearch_params: {}\nsearch_response: {}" .format(candidate_id, completed_future.talent_pipeline.id, completed_future.search_params, search_response)) else: logger.error( "Couldn't get candidates from Search API because %s" % completed_future._result.text) result = [] found_talent_pipelines += added_pipelines found_talent_pipelines = list(set(found_talent_pipelines)) found_talent_pipelines = sorted( found_talent_pipelines, key=lambda talent_pipeline: talent_pipeline.added_time, reverse=True) logger.info("\nFound {} talent_pipelines:{}".format( len(found_talent_pipelines), found_talent_pipelines)) if found_talent_pipelines: pipeline_engagements = top_most_engaged_pipelines_of_candidate( candidate_id) for talent_pipeline in found_talent_pipelines: result.append({ "id": talent_pipeline.id, "candidate_id": candidate_id, "name": talent_pipeline.name, "description": talent_pipeline.description, "open_positions": talent_pipeline.positions, "pipeline_engagement": pipeline_engagements.get(int(talent_pipeline.id), None), "datetime_needed": str(talent_pipeline.date_needed), 'is_hidden': talent_pipeline.is_hidden, "user_id": talent_pipeline.user_id, "added_datetime": str(talent_pipeline.added_time) }) return {'candidate_pipelines': result}