def get_job_by_id(id: str) -> dict:
    client = pymongo.MongoClient(f"mongodb+srv://{username}:{password}@ekko-test-qbczn.mongodb.net/jobs?retryWrites=true&w=majority")
    db = client.jobs 
    try:
        id_obj = objectid.ObjectId(id)
    except:
        raise HTTPException(description='That is invalid formatting for an ObjectID', response=400)
    job = db.jobs.find_one({"_id": id_obj})
    client.close()
    if job is None:
        raise HTTPException(description=f'No Job With ID {id} Was Found', response=200)
    return json.loads(json_util.dumps(job))
def get_place_id(place: str) -> str:
	location_headers = {
		'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.01',
		'accept-encoding': 'gzip, deflate, sdch, br',
		'accept-language': 'en-GB,en-US;q=0.8,en;q=0.6',
		'referer': 'https://www.glassdoor.com/',
		'upgrade-insecure-requests': '1',
		'user-agent': random.choice(user_agent_list),
		'Cache-Control': 'no-cache',
		'Connection': 'keep-alive'
	}

	data = {
		"term": place,
		"maxLocationsToReturn": 10
	}

	location_url = "https://www.glassdoor.co.in/findPopularLocationAjax.htm?"

	try:
		location_response = requests.post(location_url, headers=location_headers, data=data).json()
		place_id = location_response[0]['locationId']
	except:
		raise HTTPException(description=f'Error Loading Glassdoor Location Data For: {place}', response=500)

	return place_id
def build_location_query(args: dict) -> dict:
    try:
        return {
            '$geoWithin': {
                '$centerSphere': [
                    args['coordinates'], 
                    args['distance'] / 3963.2
                ]
            }
        }
    except:
        raise HTTPException(description="Issue With Distance Query", response=500)
def scrape_for_jobs(name: str, zipcode: str) -> list:
    print(f'Starting Scrape For: {name} in {zipcode}')
    jobs = []
    linkedin_failed = False
    try:
        jobs.extend(linkedin.scrape(name, zipcode))
    except:
        linkedin_failed = True
    try:
        jobs.extend(glassdoor.scrape(name, zipcode))
    except:
        if linkedin_failed:
            raise HTTPException(description="Scraping for new data failed",
                                response=500)
    return jobs
예제 #5
0
 def __init__(self, message, status_code=None, payload=None):
     HTTPException.__init__(self)
     self.message = message
     self.status_code = status_code
     self.payload = payload
def check_for_arg_issues(args: dict):
    if args['zipcode'] == 0 and (args['city'] == '*' and args['state'] == '*') and args['distance'] > 0:
        raise HTTPException(description='Queried distance without specifying either zipcode or city/state pair', response=400)
    if args['max_returns'] == 0:
        raise HTTPException(description='Queried For No Returns', response=403)
예제 #7
0
 def __init__(self, description='API Error'):
     HTTPException.__init__(self)
     self.description = self.description if hasattr(
         self, 'description') else description