def test_export_formats(self): self.fixture['export_formats'] = [] job = Job(**self.fixture) with self.assertRaises(ValidationError) as e: job.full_clean() self.assertTrue('export_formats' in e.exception.message_dict) self.fixture['export_formats'] = ['not_a_format'] job = Job(**self.fixture) with self.assertRaises(ValidationError) as e: job.full_clean() self.assertTrue('export_formats' in e.exception.message_dict)
def test_validates_feature_selection(self): self.fixture['feature_selection'] = "" job = Job(**self.fixture) with self.assertRaises(ValidationError) as e: job.full_clean() self.assertTrue('feature_selection' in e.exception.message_dict) self.fixture['feature_selection'] = """ - a list - not a dict """ job = Job(**self.fixture) with self.assertRaises(ValidationError) as e: job.full_clean() self.assertTrue('feature_selection' in e.exception.message_dict) self.assertEqual(e.exception.message_dict['feature_selection'],[u'YAML must be dict, not list'])
def test_success(db): article = Node.objects.create(json={}) doi = Doi.objects.create(node=article) # Simulate creating a job to register the DOI job = doi.register() assert list(job.params.keys()) == ["node", "doi", "url", "batch"] # Simulate callback on job completion doi.register_callback( Job( result=dict( deposited="2020-11-20T22:03:57.603438Z", deposit_request=dict(), deposit_response=dict(), deposit_success=True, ) ) ) assert isinstance(doi.deposited, datetime) assert isinstance(doi.deposit_request, dict) assert isinstance(doi.deposit_response, dict) assert doi.deposit_success is True # Simulate receiving response email receive_registration_email(None, success_email) doi = Doi.objects.get(id=doi.id) assert doi.registered is not None assert doi.registration_success assert doi.registration_response == success_email["text"]
def save_job(job_info, skills, employer_info, city): if employer_info: employer, _ = Employer.objects.get_or_create(**employer_info) else: employer, _ = Employer.objects.get_or_create(name=job_info['company']) job_info['company'] = employer job_info['city'], _ = City.objects.get_or_create(name=city) job_db = Job(**job_info) try: job_db.save() print(f"Job {job_info['hh_id']} is added") except Exception as e: print(f"Error while job {job_info['hh_id']} adding: {e}") try: employer.vacancies.add(job_db) print(f"Employer {employer}: job {job_info['hh_id']} is added") except Exception as e: print( f"Error while adding job {job_info['hh_id']} to employer {employer}: {e}" ) if skills: for i in skills: skill, _ = Skill.objects.get_or_create(name=i) try: job_db.skills.add(skill) print(f"Skill {skill} is added") except Exception as e: print(f"Error while skill {skill} adding: {e}")
def create(self, validated_data): # noqa def slice_dict(in_dict, wanted_keys): return dict((k, in_dict[k]) for k in wanted_keys if k in in_dict) job_dict = slice_dict(validated_data, [ 'the_geom', 'export_formats', 'feature_selection', 'buffer_aoi' ]) job_dict['user'] = self.context['request'].user job_dict['name'] = validated_data.get('dataset_prefix') job_dict['description'] = validated_data.get('name') region_dict = slice_dict(validated_data, [ 'extra_notes', 'is_private', 'locations', 'license', 'schedule_period', 'schedule_hour', 'subnational','planet_file' ]) job = Job(**job_dict) job.hidden = True job.unlimited_extent = True validate_model(job) with transaction.atomic(): job.save() region_dict['job'] = job region = HDXExportRegion(**region_dict) validate_model(region) region.save() return region
def create(self, validated_data): # noqa def slice_dict(in_dict, wanted_keys): return dict((k, in_dict[k]) for k in wanted_keys if k in in_dict) job_dict = slice_dict(validated_data, [ 'the_geom', 'export_formats', 'feature_selection', ]) job_dict['user'] = self.context['request'].user job_dict['name'] = validated_data.get('name') job_dict['event'] = validated_data.get('event') or "" job_dict['description'] = validated_data.get('description') or "" region_dict = slice_dict(validated_data, [ 'schedule_period', 'schedule_hour','group','planet_file' ]) job = Job(**job_dict) job.hidden = True job.unlimited_extent = True validate_model(job) # check on creation that i'm a member of the group if not self.context['request'].user.groups.filter(name=region_dict['group'].name).exists(): raise serializers.ValidationError({'group':'You are not a member of this group.'}) with transaction.atomic(): job.save() region_dict['job'] = job region = PartnerExportRegion(**region_dict) validate_model(region) region.save() return region
def fetchFromFeed(): feedurl = settings.FEED_URL mlogger.debug( "Fetching new jobs from feedurl.................................") d = feedparser.parse(settings.FEED_URL) latestjob = Job.objects.filter( viafeed=True).order_by('-viafeeddatetime')[:1] for entry in d['entries']: #post each job into the admin title = entry['title'] description = entry['description'] link = entry['link'] description = description + "<br/> <br/> More information : <br/> " + link date = datetime.datetime(*entry['date_parsed'][:6]) if len(latestjob) == 0 or (date > latestjob[0].viafeeddatetime): #for each job mlogger.debug("saving a job") j = Job() j.title = title j.description = description j.viafeed = True j.viafeeddatetime = date j.save()
def test_job_creation(self): job = Job(**self.fixture) job.full_clean() job.save() self.assertIsNotNone(job.uid) self.assertIsNotNone(job.created_at) self.assertIsNotNone(job.updated_at)
def handle(self, *args, **options): start_time = time.time() r = 0 for i in range(options['count']): name = fake.company() c = Company(name=name, location=f'{fake.city()}, {fake.state_abbr()}', url=name.lower().replace(' ', '-') + '.com', logo=Company.objects.get(pk=1).logo) c.save() r += 1 j = Job( title=fake.job() + ' Intern', company=c, hours_per_week=fake.pyint(min_value=10, max_value=40, step=5), application_link='scoretwo.com', category=random.choice(Category.objects.all()), description=fake.text(max_nb_chars=1500, ext_word_list=None), qualifications=fake.text(max_nb_chars=1500, ext_word_list=None), ) j.save() r += 1 sys.stdout.write( f"\rCreating Company/Job {i} of {options['count']}") sys.stdout.flush() end_time = time.time() print( f'\rDone. Created {r} records in {round(end_time-start_time, 2)} seconds.' )
def obj_create(self, bundle, **kwargs): """ Create or delete a new job """ print 1 try: if 'deactivate' in bundle.data.keys( ) and bundle.data['deactivate']: print 2 existing_job = Job.objects.get(id=bundle.data['job_id']) existing_job.deactivate = True existing_job.save() bundle.obj = existing_job else: print 3 company = Company.objects.get(id=bundle.data['company_id']) new_job = Job(name=bundle.data['name'], job_type=bundle.data['job_type'], location=bundle.data['location'], description=bundle.data['description'], company=company) new_job.save() bundle.obj = new_job except Exception, e: print e raise e
def handle(self, *args, **options): listings = self.get_for_page(1) jobs = Job.objects.all() for job in jobs: job.delete() for listing in listings: try: location = listing['company']['location']['city'] except: location = '' try: company = str(listing['company']['name']) except: company = '' newjob = Job(title=listing['title'], company=company, location=location, url=listing['url'], data=json.dumps(listing), created=listing['post_date']) newjob.save()
def test_work(self): job = Job(job_name='some-job') job.put() response = self.client.post(reverse('jobs.work'), { 'job_key': str(job.key()), 'params': '{}' }) self.assertEqual(response.status_code, 403)
def test_job_process_should_return_Statistical_Matching_Adapter(self): job = Job(user=self.user, covariate_variables='forest_cov', outcome_variables='forest_cov') abstractfeature = AbstractFeature() data = Data(abstractfeature) sma = job.process(data) self.assertIsInstance(sma, StatisticalMatchingAdapter)
def test_region_validates_job_name(self): self.job_fixture['name'] = 'InvalidPrefixWithCaps' another_job = Job(**self.job_fixture) region = HDXExportRegion(job=another_job) with self.assertRaises(ValidationError) as e: region.full_clean() self.assertTrue('dataset_prefix' in e.exception.message_dict)
def test_new_match_should_set_header_in_psm(self): job = Job(user=self.user, covariate_variables='ttime_min,slope', outcome_variables='forest_cov') abstractfeature = AbstractFeature() data = Data(abstractfeature) sma = job.process(data) self.assertEqual(sma.names, job.covariate_variables.split(','))
def test_get_nonexistant_job_product(self): # make a deleted job: job = Job(job_name="counter") job.save() job_key = job.key() job.delete() response = self.client.get(reverse('jobs.product', args=(job_key, ))) self.assertEqual(response.status_code, 404)
def test_get_matched_should_return_ids(self): job = Job(user=self.user, covariate_variables='forest_cov', outcome_variables='forest_cov') abstractfeature = AbstractFeature() data = Data(abstractfeature) sma = job.process(data) self.assertTrue( all([(match in self.control_set) for match in sma.matches]))
def mutate_and_get_payload(root, info, **_input): # pylint: disable=no-self-argument job = Job( name=_input.get('name'), per_meter=_input.get('per_meter'), value_per_meter=_input.get('value_per_meter'), job_group=_input.get('job_group'), ) job.save() return CreateJob(job=job)
def create_job(self): department = self.create_department() job = Job( id="87990", department=department, business_title="Account Manager", civil_service_title="'CONTRACT REVIEWER (OFFICE OF L", title_code_no="40563", level="1", job_category="", ft_pt_indicator="", salary_start=Decimal("42405.0000000000"), salary_end=Decimal("65485.0000000000"), salary_frequency="Annual", work_location="110 William St. N Y", division="Strategy & Analytics", job_description="Spend time working on accounts. Use a calculator.", min_qualifications="1.\tA baccalaureate degree \ from an accredited college and\ two years of experience in community \ work or community centered activities in\ an area related to the duties described \ above; or 2.\tHigh school graduation\ or equivalent", preferred_skills="Excellent interpersonal and \ organizational skills", additional_info="", to_apply="For DoITT Employees Only Please go to \ Employee Self Service (ESS), \ click on Recruiting Activities > Careers, and \ search for Job ID #184328 -or- \ If you do not have access to a computer, \ please mail resume indicating Job ID # to: \ Department of Information Technology and \ Telecommunications (DoITT) Recruitment Office\ - 255 Greenwich Street - 9th Floor - New York,\ NY 10007 SUBMISSION OF A RESUME \ IS NOT A GUARANTEE THAT YOU WILL RECEIVE \ AN INTERVIEW APPOINTMENTS ARE SUBJECT TO \ OVERSIGHT APPROVAL", hours_info="Day - Due to the necessary \ technical management duties of this position \ in a 24/7 operation, candidate may be \ required to be on call and/or work various \ shifts such as weekends and/or nights/evenings.", secondary_work_location="Brooklyn, NY", recruitment_contact="", residency_requirement="New York City Residency \ is not required for this position", posting_date=datetime.date(2015, 2, 19), post_until=None, posting_updated=datetime.date(2015, 2, 19), process_date=datetime.date(2019, 10, 15), ) job.save()
def setUp(self): self.client = Client() self.job = Job() self.job.position = "test" self.job.company_name = "company test" self.job.website = "pythonjobs.ie" self.job.category = "full" self.job.description = "Testing" self.job.email = "*****@*****.**" self.job.location = "Testing" self.job.save()
def setUp(self): self.factory = RequestFactory() self.user = User.objects.create(username='******') self.job = job = Job(user=self.user, caliper_distance=0, common_support='true', covariate_variables='forest_cov,dis_to_cit', matching_estimator='NN', matching_method='PSM', outcome_variables='forest_cov', standard_error_type='SIMPLE') job.save()
def submit(request): if request.method == "POST": j = Job() j.latitude = request.POST["latitude"] j.longitude = request.POST["longitude"] j.image1 = request.POST["image1"] j.image2 = request.POST["image2"] j.image3 = request.POST["image3"] j.save() return HttpResponse("Successfully Submitted") else: return HttpResponse("Failed")
def create_file_job(name, sf_id, *args, **kwargs): """MD5, backup, etc""" jobargs = [sf_id] + list(args) job = Job(funcname=name, jobtype=jobmap[name]['type'], timestamp=datetime.now(), state=Jobstates.PENDING, args=json.dumps(jobargs), kwargs=json.dumps(kwargs)) job.save() FileJob.objects.create(storedfile_id=sf_id, job_id=job.id) return job
def test_job_reaper_kills_old_jobs(self): # make an old job: job = Job(job_name='counter') job.started = datetime.datetime.now() - timedelta(days=3) job.save() old_job_key = job.key() response = self.client.post(reverse('jobs.start'), {'job_name': 'counter'}) json_response = simplejson.loads(response.content) self.assert_json_success(json_response) self.assertEqual(Job.get(old_job_key), None)
def store_ds_job(name, prejob_args, **kwargs): pjres = jobmap[name]['getfns'](*prejob_args) sf_ids = [x.id for x in pjres] jobargs = prejob_args + sf_ids job = Job(funcname=name, jobtype=jobmap[name]['type'], timestamp=datetime.now(), state=Jobstates.PENDING, args=json.dumps(jobargs), kwargs=json.dumps(kwargs)) job.save() FileJob.objects.bulk_create( [FileJob(storedfile_id=sf_id, job_id=job.id) for sf_id in sf_ids]) return job
def add_job(command, enabled, interval): logger.debug('command:' + str(command)) logger.debug('enabled:' + str(enabled)) logger.debug('interval:' + str(interval)) try: j = Job.objects.get(command=Command.objects.get(name=command)) except ObjectDoesNotExist: j = Job(command=Command.objects.get(name=command), enabled=enabled, interval=interval) j.save() logger.info("New Job Added: " + command) finally: return j
def test_only_admin_can_access(self): self.client.logout() assert self.client.login(email="*****@*****.**", roles=[roles.DJ]) r = self.client.post(reverse('jobs.start'), data={'job_name': 'build-trafficlog-report'}) eq_(r.status_code, 403) job = Job(job_name='build-trafficlog-report') job.put() r = self.client.post(reverse('jobs.work'), data={ 'job_key': job.key(), 'params': '{}' }) eq_(r.status_code, 403) r = self.client.post(reverse('jobs.product', args=[str(job.key())])) eq_(r.status_code, 403)
def create_job_instance(row): job = Job( description=row['description'], expiration_date=row['expiration_date'], employment_type=row['employment_type'], education=row['education'], headline=row['headline'], post_date=row['post_date'], slug=row['slug'], location=row['location'], salary=row['salary'], salary_frequency=row['salary_frequency'], benefits=row['benefits'], link=row['link'], job_title=row['job_title'], ) job.save()
def start_job(request): init_jobs() # TODO(kumar) check for already running jobs reap_dead_jobs() job_name = request.POST['job_name'] job = Job(job_name=job_name) job.put() worker = get_worker(job_name) if worker['pre_request']: early_response = worker['pre_request'](request) if early_response is not None: return early_response @as_json def data(request): return {'job_key': str(job.key()), 'success': True} return data(request)
def create_job(kwargs): if 'name' not in kwargs or len(kwargs['name']) == 0: logger.error('The job name was not got') raise BridgeException() try: Job.objects.get(name=kwargs['name']) except ObjectDoesNotExist: pass else: raise BridgeException(_('The job name is already used')) if 'author' not in kwargs or not isinstance(kwargs['author'], User): logger.error('The job author was not got') raise BridgeException() newjob = Job(name=kwargs['name'], change_date=now(), change_author=kwargs['author'], parent=kwargs.get('parent')) if 'identifier' in kwargs and kwargs['identifier'] is not None: if Job.objects.filter(identifier=kwargs['identifier']).count() > 0: # This exception will be occurred only on jobs population (if for preset jobs identifier would be set) # or jobs uploading raise BridgeException(_('The job with specified identifier already exists')) newjob.identifier = kwargs['identifier'] else: time_encoded = now().strftime("%Y%m%d%H%M%S%f%z").encode('utf-8') newjob.identifier = hashlib.md5(time_encoded).hexdigest() newjob.save() new_version = create_version(newjob, kwargs) if 'filedata' in kwargs: try: SaveFileData(kwargs['filedata'], new_version) except Exception as e: logger.exception(e) newjob.delete() raise BridgeException() if 'absolute_url' in kwargs: # newjob_url = reverse('jobs:job', args=[newjob.pk]) # Notify(newjob, 0, {'absurl': kwargs['absolute_url'] + newjob_url}) pass else: # Notify(newjob, 0) pass return newjob