def operation(): job = Job.get_by_id(item.get('id') or create_id(item)) job.item = item for attr, value in response_data.items(): setattr(job, attr, value) job.save() log.debug(f"Updated job '{job.id}' with monitoring data") self.stats.inc_value('monitoring/job_saved')
def item_scraped(self, item, response, spider): with db: job = Job.get_by_id(item_to_job_id(item)) job.response_url = response.url job.response_backup_path = get_response_backup_path(response.url) job.item = item job.save() logger.debug(f"Updated job '{job.id}' with monitoring data") self.stats.inc_value('monitoring/job_saved')
def job(job_id): with db: job = Job.get_by_id(job_id) or abort(404) jobs_count = Job.count() companies_count = Job.companies_count() return render_template('job.html', job=job, jobs_count=jobs_count, companies_count=companies_count, thumbnail=thumbnail(job_title=job.title, job_company=job.company_name, job_location=job.location))
def test_employment_types_sorts_from_the_most_to_the_least_serious(db_connection): sorted_value = [ 'full-time', 'part-time', 'contract', 'paid internship', 'unpaid internship', 'internship', 'volunteering', ] job = create_job('1', employment_types=shuffled(sorted_value)) assert Job.get_by_id('1').employment_types == sorted_value
def test_employment_types_sorts_extra_types_last_alphabetically(db_connection): job = create_job('1', employment_types=[ 'ahoj', 'full-time', 'bob', 'part-time', 'foo', ]) assert Job.get_by_id('1').employment_types == [ 'full-time', 'part-time', 'ahoj', 'bob', 'foo', ]
def test_employment_types_are_unique_sorted_lists(db_connection): job = create_job('1', employment_types=['part-time', 'full-time', 'part-time']) assert Job.get_by_id('1').employment_types == ['full-time', 'part-time']