def get_vendors_based_on_group_id(request, endpoint, program_id, group_id): ''' Returns Vendor_group data based on group_id_list provided. ''' headers = {'Authorization': request.headers["Authorization"]} content_type = request.headers.get("Content-Type") user_agent = request.headers.get("User-Agent") if content_type: headers["Content-Type"] = content_type if user_agent: headers["User-Agent"] = user_agent url_dict = { "configurator_base_url": endpoint, "program_id": program_id, "group_id": group_id } url = settings.GET_VENDORS_BASED_ON_GROUP_ID.format(**url_dict) cache_key = 'svms:job-manager:programs:{}:vendor-groups:{}'.format( program_id, group_id) cache_data = RedisCacheHandler.get(cache_key) if cache_data: return cache_data['vendor_groups']['vendors'] else: # url = "https://dev-services.simplifysandbox.net/configurator/programs/ecb4d785-5b17-4546-8ed2-ca1a3f0cc653/vendor-groups/9b8226eb-37c8-47bd-9f9e-33bdf02c9841" try: response = requests.get(url, headers=headers, timeout=(settings.CONNECTIVITY, settings.RESPONSE_TIMEOUT)) response.raise_for_status() except requests.exceptions.HTTPError as error: raise Exception( 'error: could not get content from url because of {}'.format( response.status_code)) except requests.exceptions.ConnectTimeout: raise Exception( 'error: requests.exceptions.ConnectTimeout while {}'.format( "getting vendors based on group id")) RedisCacheHandler.set(cache_key, response.json()) return response.json()['vendor_groups']['vendors']
def job_response(request, program_id, queryset, serializer_obj, program_obj, cache_status, set_cache, uid=None): logger.info('Handling request: %s', request) if cache_status: all_job_data = queryset else: data = retrieve_jobs(program_id, serializer_obj, request, uid) parallel_request_data = ConfiguratorService().concurrent_response( request, url_list, program_id, check_duplicate_url) url_list.clear() check_duplicate_url.clear() all_job_data = update_response_data(data, parallel_request_data, uid) if uid and set_cache: cache_key = settings.REDIS_JOB_KEY.format(program_id=program_id, job_uid=uid) logger.info('setting cache data of Job id: %s', uid) RedisCacheHandler.set(cache_key, all_job_data) if uid: context = { "program_id": program_id, "program_name": program_obj, "data": all_job_data } elif all_job_data: context = { "total_count": len(queryset), "items_per_page": len(all_job_data), "program_id": program_id, "program_name": program_obj, "data": all_job_data } else: context = { "total_count": len(queryset), "items_per_page": len(all_job_data), "data": all_job_data } return context, True
def get_vendor_details(request, endpoint, vendor_id): ''' Returns Vendor_group data based on group_id_list provided. ''' # vendor_id = "52b42aee-4680-49b7-9897-db26be8e7354" url_dict = {"configurator_base_url": endpoint, "vendor_id": vendor_id} # url = '{}/organizations/{}'.format(endpoint, vendor_id) url = settings.GET_VENDOR_DETAIL.format(**url_dict) headers = {'Authorization': request.headers["Authorization"]} content_type = request.headers.get("Content-Type") user_agent = request.headers.get("User-Agent") if content_type: headers["Content-Type"] = content_type if user_agent: headers["User-Agent"] = user_agent cache_key = 'svms:job-manager:organizations:{}'.format(vendor_id) cache_data = RedisCacheHandler.get(cache_key) if cache_data: return cache_data else: try: # url = "http://dev-awsnlb.simplifyvms.com:8003/configurator/organizations/52b42aee-4680-49b7-9897-db26be8e7354" response = requests.get(url, headers=headers, timeout=(settings.CONNECTIVITY, settings.RESPONSE_TIMEOUT)) response.raise_for_status() except requests.exceptions.HTTPError as error: raise Exception( 'error: could not get vendor details from url {} because of {}' .format(url, response.status_code)) except requests.exceptions.ConnectTimeout: raise Exception( 'error: requests.exceptions.ConnectTimeout while {}'.format( "getting vendor details")) RedisCacheHandler.set(cache_key, response.json()) return response.json()
def delete(self, request, uid, program_id=None): """ delete job data based on the given uid :param request: :type request: :param uid: :type uid: :return: :rtype: """ logger.info("JOBViewSet >> delete >> request: {}".format(uid)) try: check_template_in_job = Job.objects.filter(template__uid=uid, is_delete=False, program_id=program_id) if check_template_in_job: logger.info("Template used in job so it can't delete") return Response( {"message": "Template used in job so it can't delete"}, status=status.HTTP_200_OK) else: saved_job = Job.objects.filter( uid=uid, program_id=program_id, is_delete=False).update(is_delete=True) custom_job = JobCustom.objects.filter(job__uid=uid).update( is_delete=True) logger.info( "JOBViewSet >> delete >> response: {}".format(custom_job)) if saved_job: cache_key = settings.REDIS_JOB_KEY.format( program_id=program_id, job_uid=uid) cache_data = RedisCacheHandler.purge(cache_key) return Response({"job": { "uid": uid }}, status=status.HTTP_200_OK) else: return Response( { 'error': { "ref": "JOB_ID_NOT_FOUND", "message": "Job id not found" } }, status=status.HTTP_400_BAD_REQUEST) except Exception as e: logger.error(e) return Response( { 'error': { "ref": "JOB_ID_NOT_FOUND", "message": "Job id not found" } }, status=status.HTTP_400_BAD_REQUEST)
def get_redis_data(key): status = False cache_data = RedisCacheHandler.get(key) if cache_data: status = True logger.info('getting cache data of key: {}: data: {}'.format( key, cache_data)) else: logger.error('unable to get cache data of key: {}: data: {}'.format( key, cache_data)) return status, cache_data
def authenticate_credentials(self, token, ua, request): data = (None, token) try: # validate JWT token from remote server # url = '{}/profile-manager/authentication/validate'.format( # config('PROFILE_BASE_URL')) # url = settings.AUTH_VALIDATE.format(profile_base_url=settings.PROFILE_BASE_URL) # url = 'http://uat-wipro-nlb.simplifysandbox.net:8004/profile-manager/authentication/validate' # user_info_response = requests.get(url, headers={ # 'Authorization': 'Bearer {}'.format(token), # 'user-agent': ua # }) key = settings.TOKEN_KEY.format( hashlib.md5(token.encode('utf-8')).hexdigest()) user_info = RedisCacheHandler.get(key) logger.info( 'Recieved Response from cache key: {}: data: {}'.format( key, user_info)) if not user_info: logger.info( 'Unable to get cache data of key: {}: data: {}'.format( key, user_info)) url = settings.AUTH_VALIDATE.format( profile_base_url=settings.PROFILE_BASE_URL) logger.info('Validate API Request {}'.format(url)) user_info_response = requests.get( url, headers={ 'Authorization': 'Bearer {}'.format(token), 'user-agent': ua }) logger.info('Validate API Response {}'.format( user_info_response.status_code)) if user_info_response.status_code != 200: raise exceptions.AuthenticationFailed() else: logger.info('Validate API data {}'.format( user_info_response.text)) user_data = user_info_response.text user_info = json.loads(user_data) user_info = user_info['user'] request.session['created_by'] = user_info['id'] request.session['modified_by'] = user_info['id'] keys_to_remove = [ '_id' # 'provider', # 'educational_qualifications', # 'social_profiles', # 'email_addresses', # 'contact_numbers', # 'addresses' ] for key in keys_to_remove: user_info.pop(key) user_info = namedtuple("user", user_info.keys())(*user_info.values()) if user_info is not None: data = (user_info, token) logger.info("Authenticated!!") return data except Exception as e: logger.error("Authentication Failed: {}".format(e)) raise exceptions.AuthenticationFailed()
def job_returned_queryset(view_name, request, program_id, uid=None): cache_flag = False logger.info('Handling request: %s', request) # Program Name program_obj = get_program_name(request, program_id) logger.debug('Program Object: %s..', program_obj) status = True message = None # Invalid program_id if program_obj == None: queryset = [] else: # Get Vendors vendor = organization_id(request, program_id, request.session['created_by']) logger.info("Vendor for {} is {}".format(view_name, vendor)) queryset = decide_on_queryset(view_name, program_id, vendor) # To get Single Jobs if uid: # Get Cache details cache_key = settings.REDIS_JOB_KEY.format(program_id=program_id, job_uid=uid) cache_data = RedisCacheHandler.get(cache_key) if cache_data: cache_flag = True logger.info('Retrieving Cache Object: %s..', cache_data) queryset = cache_data else: queryset = queryset.filter(uid=uid) elif request.GET: try: query = request.GET.get("q") if query: q_object = Q() q_object.add((Q(category__category_name__icontains=query) | Q(template_name__icontains=query) | Q(title__title__icontains=query) | Q(description__icontains=query)), Q.OR) queryset = queryset.filter(q_object).distinct() and_condition = get_filter_data(request) queryset = queryset.filter(and_condition) if "order_by" in request.GET: order_by = request.GET["order_by"] if order_by == "desc": queryset = queryset.order_by('-title') try: field_name = request.GET['key'] if field_name: if order_by == "desc": queryset = queryset.order_by('-' + field_name) else: queryset = queryset.order_by(field_name) except Exception as e: logger.error("order by issue -- {}".format(e)) except Exception as error: logger.error("error -- {}".format(error)) status = False message = { "error": { "ref": "ERROR_OCCURRED", "message": error } } return queryset, program_obj, cache_flag, message, status
def put(self, request, uid, program_id=None): """ update job data based on the given uid :param request: :type request: :param uid: :type uid: :return: :rtype: """ # Checking for valid program id program_obj = get_program_name(request, program_id) logger.debug('Program Object: %s..', program_obj) if program_obj: modified_by = request.session['modified_by'] job = request.data if not job: return Response( { 'error': { "ref": "ERROR_OCCURRED", "message": "Kindly provide the data" } }, status.HTTP_400_BAD_REQUEST) logger.info("JOBViewSet >> put >> request: {}".format( request.data)) job.update({"modified_by": modified_by}) settings.LOGGER.info("JOBViewSet >> put >> request: {}".format( request.data)) saved_job = get_object_or_404(Job.objects.all(), uid=uid, is_delete=False) cache_key = settings.REDIS_JOB_KEY.format(program_id=program_id, job_uid=uid) cache_data = RedisCacheHandler.purge(cache_key) job = retrieve_custom_columns(job) # After edit, job status will be pending for approval job["status"] = "pending_approval" job_serializer = JobSerializer(instance=saved_job, data=job, partial=True) try: if job_serializer.is_valid(raise_exception=True): job_saved_data = job_serializer.save() check_job_obj = FoundationQualificationData.objects.filter( job=job_saved_data) if 'foundational_data' in job: for foundational in job['foundational_data']: qid = foundational["foundational_data_type_id"] for foundational_dict in foundational['values']: if not check_job_obj: insert_foundational = FoundationQualificationData.objects.create( job=job_saved_data, program_id=program_id, entity_id=qid, entity_name="foundational", entity_key=foundational_dict['id']) if insert_foundational: logger.info( "Foundational data Created") else: update_foundational = FoundationQualificationData.objects.filter( job=job_saved_data).update( program_id=program_id, entity_id=qid, entity_name="foundational", entity_key=foundational_dict['id']) if update_foundational: logger.info( "Foundational data Updated") if 'qualifications' in job: for qualification in job['qualifications']: qtype = qualification["qualification_type"] qid = qualification["qualification_type_id"] for qualification_dict in qualification['values']: if not check_job_obj: create_qualification = FoundationQualificationData.objects.create( job=job_saved_data, program_id=program_id, entity_type=qtype, entity_id=qid, entity_name="qualification", entity_key=qualification_dict['id'], entity_value=qualification_dict[ 'level'], entity_is_active=qualification_dict[ 'is_active']) if create_qualification: logger.info("Qualification Created") else: update_qualification = FoundationQualificationData.objects.filter( job=job_saved_data ).update( program_id=program_id, entity_type=qtype, entity_id=qid, entity_name="qualification", entity_key=qualification_dict['id'], entity_value=qualification_dict[ 'level'], entity_is_active=qualification_dict[ 'is_active']) if update_qualification: logger.info("Qualification updated") try: custom_saved_job = get_object_or_404( JobCustom.objects.all(), job__id=job_saved_data.id, is_delete=False) if custom_saved_job: serializer = CustomSerializer( instance=custom_saved_job, data=job, partial=True) if serializer.is_valid(raise_exception=True): job_saved = serializer.save() except Exception as e: logger.error('Error Occurred: %s', e) pass # GET CACHE KEY, then PURGE IT cache_key = settings.REDIS_JOB_KEY.format( program_id=program_id, job_uid=uid) logger.info('getting cache data of Job id: %s', uid) cache_data = RedisCacheHandler.get(cache_key) if cache_data: RedisCacheHandler.purge(cache_key) data = [] queryset_job = Job.objects.filter(id=job_saved_data.id) queryset, program_obj, cache_flag, error_message, error_status = job_returned_queryset( 'job', request, program_id, job_saved_data.uid) if error_status: data, job_status = job_response( request, program_id, queryset_job, queryset_job, program_obj, False, False, job_saved_data.uid) # check_for_approval_status = validate_approval(job) # if check_for_approval_status: logger.info( "JOBViewSet >> put >> Sending task to approval: {}" .format(data)) # Approval Workflow for PUT Request stomp_connectivity(data["data"], program_id) logger.info("JOBViewSet >> put >> response: {}".format( job_saved_data.id)) logger.info(" Response: Job updated successfully") return Response({"data": data}, status=status.HTTP_200_OK) else: return Response(error_message, status=status.HTTP_400_BAD_REQUEST) except Exception as e: logger.error('Error Occurred: %s', e) return Response( { 'error': { "ref": "ERROR_OCCURRED", "message": "Job not updated, {}".format(e) } }, status=status.HTTP_400_BAD_REQUEST) else: logger.error("Invalid Program Id - {}".format(program_id)) return Response( { 'error': { "ref": "INVALID PROGRAM_ID", "message": "Invalid Program Id - {}".format(program_id) } }, status=status.HTTP_400_BAD_REQUEST)
def set_redis_data(data, key, program_id): cache_key = 'program:{}:key:{}'.format(program_id, key) logger.info('setting cache data of key: {}'.format(key)) RedisCacheHandler.set(cache_key, data) return True