def run(job, *args, **kwargs):
    try:
        servers = job.server_set.all()
        for server in servers:
            job.set_progress("Checking server {} for snapshots...".format(server))
            snapshots = ServerSnapshot.objects.filter(server_id=server.id)
            if not snapshots:
                job.set_progress('No snapshots exist for server {}'.format(server))
                continue

            logger.info("Found snapshot, noting for deletion...")
            job_params = DeleteSnapshotsParameters.objects.create()
            job_params.snapshots.add(*snapshots)

            logger.info("Creating job...")
            child_job = Job(
                type="delete_snapshots",
                job_parameters=job_params,
                owner=job.owner,
                parent_job=job,
            )
            child_job.save()
            child_job.server_set.add(server)

            msg = (' Job #{} has been created to delete {} snapshot{} from server '
                   '{}.').format(
                child_job.pk, len(snapshots),
                's' if len(snapshots) > 1 else '', server)
            job.set_progress(msg)
        job.set_progress('Finished looking for snapshots on servers. Check child' +
                         ' job(s) for progress updates.')
    except Exception as err:
        return ('FAILURE', '', err)

    return ('', '', '')
Example #2
0
 def test_do_work(self):
     response = self.client.post(reverse('jobs.start'), {
         'job_name': 'counter'
     })
     json_response = simplejson.loads(response.content)
     self.assert_json_success(json_response)
     job_key = json_response['job_key']
     
     # work the counter three times:
     response = self.client.post(reverse('jobs.work'), {
         'job_key': job_key
     })
     json_response = simplejson.loads(response.content)
     self.assert_json_success(json_response)
     job = Job.all().filter('job_name =', 'counter')[0]
     self.assertEqual(simplejson.loads(job.result), {'count':1})
     self.assertEqual(json_response['finished'], False)
     
     response = self.client.post(reverse('jobs.work'), {
         'job_key': job_key
     })
     json_response = simplejson.loads(response.content)
     self.assert_json_success(json_response)
     job = Job.all().filter('job_name =', 'counter')[0]
     self.assertEqual(simplejson.loads(job.result), {'count':2})
     self.assertEqual(json_response['finished'], False)
     
     response = self.client.post(reverse('jobs.work'), {
         'job_key': job_key
     })
     json_response = simplejson.loads(response.content)
     self.assert_json_success(json_response)
     job = Job.all().filter('job_name =', 'counter')[0]
     self.assertEqual(simplejson.loads(job.result), {'count':3})
     self.assertEqual(json_response['finished'], True)
Example #3
0
    def handle(self, *args, **options):
        start_time = time.time()
        r = 0
        for i in range(options['count']):
            name = fake.company()
            c = Company(name=name,
                        location=f'{fake.city()}, {fake.state_abbr()}',
                        url=name.lower().replace(' ', '-') + '.com',
                        logo=Company.objects.get(pk=1).logo)
            c.save()
            r += 1

            j = Job(
                title=fake.job() + ' Intern',
                company=c,
                hours_per_week=fake.pyint(min_value=10, max_value=40, step=5),
                application_link='scoretwo.com',
                category=random.choice(Category.objects.all()),
                description=fake.text(max_nb_chars=1500, ext_word_list=None),
                qualifications=fake.text(max_nb_chars=1500,
                                         ext_word_list=None),
            )
            j.save()
            r += 1

            sys.stdout.write(
                f"\rCreating Company/Job {i} of {options['count']}")
            sys.stdout.flush()

        end_time = time.time()
        print(
            f'\rDone. Created {r} records in {round(end_time-start_time, 2)} seconds.'
        )
Example #4
0
    def test_do_work(self):
        response = self.client.post(reverse('jobs.start'),
                                    {'job_name': 'counter'})
        json_response = simplejson.loads(response.content)
        self.assert_json_success(json_response)
        job_key = json_response['job_key']

        # work the counter three times:
        response = self.client.post(reverse('jobs.work'), {'job_key': job_key})
        json_response = simplejson.loads(response.content)
        self.assert_json_success(json_response)
        job = Job.all().filter('job_name =', 'counter')[0]
        self.assertEqual(simplejson.loads(job.result), {'count': 1})
        self.assertEqual(json_response['finished'], False)

        response = self.client.post(reverse('jobs.work'), {'job_key': job_key})
        json_response = simplejson.loads(response.content)
        self.assert_json_success(json_response)
        job = Job.all().filter('job_name =', 'counter')[0]
        self.assertEqual(simplejson.loads(job.result), {'count': 2})
        self.assertEqual(json_response['finished'], False)

        response = self.client.post(reverse('jobs.work'), {'job_key': job_key})
        json_response = simplejson.loads(response.content)
        self.assert_json_success(json_response)
        job = Job.all().filter('job_name =', 'counter')[0]
        self.assertEqual(simplejson.loads(job.result), {'count': 3})
        self.assertEqual(json_response['finished'], True)
Example #5
0
    def handle(self, *args, **options):
        listings = self.get_for_page(1)

        jobs = Job.objects.all()
        for job in jobs:
            job.delete()

        for listing in listings:

            try:
                location = listing['company']['location']['city']
            except:
                location = ''

            try:
                company = str(listing['company']['name'])
            except:
                company = ''

            newjob = Job(title=listing['title'],
                         company=company,
                         location=location,
                         url=listing['url'],
                         data=json.dumps(listing),
                         created=listing['post_date'])
            newjob.save()
Example #6
0
 def obj_create(self, bundle, **kwargs):
     """
     Create or delete a new job
     """
     print 1
     try:
         if 'deactivate' in bundle.data.keys(
         ) and bundle.data['deactivate']:
             print 2
             existing_job = Job.objects.get(id=bundle.data['job_id'])
             existing_job.deactivate = True
             existing_job.save()
             bundle.obj = existing_job
         else:
             print 3
             company = Company.objects.get(id=bundle.data['company_id'])
             new_job = Job(name=bundle.data['name'],
                           job_type=bundle.data['job_type'],
                           location=bundle.data['location'],
                           description=bundle.data['description'],
                           company=company)
             new_job.save()
             bundle.obj = new_job
     except Exception, e:
         print e
         raise e
Example #7
0
def save_job(job_info, skills, employer_info, city):
    if employer_info:
        employer, _ = Employer.objects.get_or_create(**employer_info)
    else:
        employer, _ = Employer.objects.get_or_create(name=job_info['company'])
    job_info['company'] = employer

    job_info['city'], _ = City.objects.get_or_create(name=city)

    job_db = Job(**job_info)
    try:
        job_db.save()
        print(f"Job {job_info['hh_id']} is added")
    except Exception as e:
        print(f"Error while job {job_info['hh_id']} adding: {e}")

    try:
        employer.vacancies.add(job_db)
        print(f"Employer {employer}: job {job_info['hh_id']} is added")
    except Exception as e:
        print(
            f"Error while adding job {job_info['hh_id']} to employer {employer}: {e}"
        )

    if skills:
        for i in skills:
            skill, _ = Skill.objects.get_or_create(name=i)
            try:
                job_db.skills.add(skill)
                print(f"Skill {skill} is added")
            except Exception as e:
                print(f"Error while skill {skill} adding: {e}")
Example #8
0
    def create(project: Project, user: User) -> Job:
        """
        Snapshot the project.
        """
        snapshot = Snapshot.objects.create(project=project, creator=user)

        subjobs = []

        # Clean the project's working directory
        subjobs.append(project.cleanup(user))

        # Pull the project's sources
        subjobs.append(project.pull(user))

        # "Reflow" the project (regenerate derived files)
        reflow = project.reflow(user)
        if reflow:
            subjobs.append(reflow)

        # Pin the container image
        subjobs.append(
            project.pin(user, **Job.create_callback(snapshot, "pin_callback")))

        # Create an index.html if a "main" file is defined
        main = project.get_main()
        if main:
            options = {}

            theme = project.get_theme()
            if theme:
                options["theme"] = theme

            subjobs.append(main.convert(user, "index.html", options=options))

        # This is currently required to populate field `zip_name` below
        snapshot.save()

        # Archive the working directory to the snapshot directory
        subjobs.append(
            project.archive(
                user,
                snapshot=snapshot.id,
                path=f"{project.id}/{snapshot.id}/{snapshot.zip_name}",
                **Job.create_callback(snapshot, "archive_callback"),
            ))

        job = Job.objects.create(
            method=JobMethod.series.name,
            description="Snapshot project '{0}'".format(project.name),
            project=project,
            creator=user,
        )
        job.children.set(subjobs)
        job.dispatch()

        snapshot.job = job
        snapshot.save()

        return snapshot
Example #9
0
 def test_work(self):
     job = Job(job_name='some-job')
     job.put()
     response = self.client.post(reverse('jobs.work'), {
         'job_key': str(job.key()),
         'params': '{}'
     })
     self.assertEqual(response.status_code, 403)
 def test_new_match_should_set_header_in_psm(self):
     job = Job(user=self.user,
               covariate_variables='ttime_min,slope',
               outcome_variables='forest_cov')
     abstractfeature = AbstractFeature()
     data = Data(abstractfeature)
     sma = job.process(data)
     self.assertEqual(sma.names, job.covariate_variables.split(','))
 def test_job_process_should_return_Statistical_Matching_Adapter(self):
     job = Job(user=self.user,
               covariate_variables='forest_cov',
               outcome_variables='forest_cov')
     abstractfeature = AbstractFeature()
     data = Data(abstractfeature)
     sma = job.process(data)
     self.assertIsInstance(sma, StatisticalMatchingAdapter)
Example #12
0
 def post(self, request):
     json_data = json.loads(request.body)
     company = Company.objects.get(user=request.user)
     job = Job(**json_data)
     job.company = company
     job.user = request.user
     job.save()
     return JsonResponse({"status": "ok", "id": job.id, 'msg': ''})
Example #13
0
 def test_work(self):
     job = Job(job_name='some-job')
     job.put()
     response = self.client.post(reverse('jobs.work'), {
         'job_key': str(job.key()),
         'params': '{}'
     })
     self.assertEqual(response.status_code, 403)
 def test_get_matched_should_return_ids(self):
     job = Job(user=self.user,
               covariate_variables='forest_cov',
               outcome_variables='forest_cov')
     abstractfeature = AbstractFeature()
     data = Data(abstractfeature)
     sma = job.process(data)
     self.assertTrue(
         all([(match in self.control_set) for match in sma.matches]))
Example #15
0
    def mutate_and_get_payload(root, info, **_input):  # pylint: disable=no-self-argument
        job = Job(
            name=_input.get('name'),
            per_meter=_input.get('per_meter'),
            value_per_meter=_input.get('value_per_meter'),
            job_group=_input.get('job_group'),
        )
        job.save()

        return CreateJob(job=job)
Example #16
0
def create_file_job(name, sf_id, *args, **kwargs):
    """MD5, backup, etc"""
    jobargs = [sf_id] + list(args)
    job = Job(funcname=name, jobtype=jobmap[name]['type'],
              timestamp=timezone.now(),
              state=Jobstates.PENDING, args=json.dumps(jobargs),
              kwargs=json.dumps(kwargs))
    job.save()
    FileJob.objects.create(storedfile_id=sf_id, job_id=job.id)
    return job
Example #17
0
 def create_job(self):
     department = self.create_department()
     job = Job(
         id="87990",
         department=department,
         business_title="Account Manager",
         civil_service_title="'CONTRACT REVIEWER (OFFICE OF L",
         title_code_no="40563",
         level="1",
         job_category="",
         ft_pt_indicator="",
         salary_start=Decimal("42405.0000000000"),
         salary_end=Decimal("65485.0000000000"),
         salary_frequency="Annual",
         work_location="110 William St. N Y",
         division="Strategy & Analytics",
         job_description="Spend time working on accounts. Use a calculator.",
         min_qualifications="1.\tA baccalaureate degree \
         from an accredited college and\
          two years of experience in community \
          work or community centered activities in\
           an area related to the duties described \
           above; or  2.\tHigh school graduation\
            or equivalent",
         preferred_skills="Excellent interpersonal and \
         organizational skills",
         additional_info="",
         to_apply="For DoITT Employees Only  Please go to \
         Employee Self Service (ESS), \
         click on Recruiting Activities > Careers, and \
         search for Job ID #184328  -or-   \
         If you do not have access to a computer, \
         please mail resume indicating Job ID # to:  \
         Department of Information Technology and \
         Telecommunications (DoITT)  Recruitment Office\
          - 255 Greenwich Street - 9th Floor - New York,\
           NY 10007    SUBMISSION OF A RESUME \
          IS NOT A GUARANTEE THAT YOU WILL RECEIVE \
          AN INTERVIEW  APPOINTMENTS ARE SUBJECT TO \
          OVERSIGHT APPROVAL",
         hours_info="Day - Due to the necessary \
         technical management duties of this position \
         in a 24/7 operation, candidate may be \
         required to be on call and/or work various \
         shifts such as weekends and/or nights/evenings.",
         secondary_work_location="Brooklyn, NY",
         recruitment_contact="",
         residency_requirement="New York City Residency \
         is not required for this position",
         posting_date=datetime.date(2015, 2, 19),
         post_until=None,
         posting_updated=datetime.date(2015, 2, 19),
         process_date=datetime.date(2019, 10, 15),
     )
     job.save()
Example #18
0
 def test_only_admin_can_access(self):
     self.client.logout()
     assert self.client.login(email="*****@*****.**", roles=[roles.DJ])
     r = self.client.post(reverse("jobs.start"), data={"job_name": "build-trafficlog-report"})
     eq_(r.status_code, 403)
     job = Job(job_name="build-trafficlog-report")
     job.put()
     r = self.client.post(reverse("jobs.work"), data={"job_key": job.key(), "params": "{}"})
     eq_(r.status_code, 403)
     r = self.client.post(reverse("jobs.product", args=[str(job.key())]))
     eq_(r.status_code, 403)
Example #19
0
def store_ds_job(name, prejob_args, **kwargs):
    pjres = jobmap[name]['getfns'](*prejob_args)
    sf_ids = [x.id for x in pjres]
    jobargs = prejob_args + sf_ids
    job = Job(funcname=name, jobtype=jobmap[name]['type'],
              timestamp=timezone.now(),
              state=Jobstates.PENDING, args=json.dumps(jobargs),
              kwargs=json.dumps(kwargs))
    job.save()
    FileJob.objects.bulk_create([FileJob(storedfile_id=sf_id, job_id=job.id) for sf_id in sf_ids])
    return job
Example #20
0
 def setUp(self):
     self.client = Client()
     self.job = Job()
     self.job.position = "test"
     self.job.company_name = "company test"
     self.job.website = "pythonjobs.ie"
     self.job.category = "full"
     self.job.description = "Testing"
     self.job.email = "*****@*****.**"
     self.job.location = "Testing"
     self.job.save()
 def setUp(self):
     self.factory = RequestFactory()
     self.user = User.objects.create(username='******')
     self.job = Job(user=self.user,
                    caliper_distance=0,
                    common_support='true',
                    covariate_variables='forest_cov,dis_to_cit',
                    matching_estimator='NN',
                    matching_method='PSM',
                    outcome_variables='forest_cov',
                    standard_error_type='SIMPLE')
     self.job.save()
Example #22
0
def create(request):
    if request.method == 'POST':
        form = JobForm(request.POST)
        if form.is_valid():
            job = Job(**form.cleaned_data)
            if users.is_current_user_admin():
                job.publish()
            job.put()
            return HttpResponseRedirect('/jobs/my')
    else:
        form = JobForm()
    return _custom_render_to_response('job_form.html', {'form': form, 'title': 'Add new job'})
Example #23
0
def create_file_job(name, sf_id, *args, **kwargs):
    """MD5, backup, etc"""
    jobargs = [sf_id] + list(args)
    job = Job(funcname=name,
              jobtype=jobmap[name]['type'],
              timestamp=datetime.now(),
              state=Jobstates.PENDING,
              args=json.dumps(jobargs),
              kwargs=json.dumps(kwargs))
    job.save()
    FileJob.objects.create(storedfile_id=sf_id, job_id=job.id)
    return job
Example #24
0
def fetchFromFeed():
    feedurl = settings.FEED_URL
    mlogger.debug(
        "Fetching new jobs from feedurl.................................")
    d = feedparser.parse(settings.FEED_URL)
    latestjob = Job.objects.filter(
        viafeed=True).order_by('-viafeeddatetime')[:1]

    for entry in d['entries']:

        #post each job into the admin
        title = entry['title']
        description = entry['description']
        link = entry['link']
        description = description + "<br/> <br/> More information : <br/> " + link
        date = datetime.datetime(*entry['date_parsed'][:6])

        if len(latestjob) == 0 or (date > latestjob[0].viafeeddatetime):
            #for each job
            mlogger.debug("saving a job")
            j = Job()
            j.title = title
            j.description = description
            j.viafeed = True
            j.viafeeddatetime = date
            j.save()
Example #25
0
def add_job(command, enabled, interval):
    logger.debug('command:' + str(command))
    logger.debug('enabled:' + str(enabled))
    logger.debug('interval:' + str(interval))
    try:
        j = Job.objects.get(command=Command.objects.get(name=command))
    except ObjectDoesNotExist:
        j = Job(command=Command.objects.get(name=command),
                enabled=enabled,
                interval=interval)
        j.save()
        logger.info("New Job Added: " + command)
    finally:
        return j
Example #26
0
def store_ds_job(name, prejob_args, **kwargs):
    pjres = jobmap[name]['getfns'](*prejob_args)
    sf_ids = [x.id for x in pjres]
    jobargs = prejob_args + sf_ids
    job = Job(funcname=name,
              jobtype=jobmap[name]['type'],
              timestamp=datetime.now(),
              state=Jobstates.PENDING,
              args=json.dumps(jobargs),
              kwargs=json.dumps(kwargs))
    job.save()
    FileJob.objects.bulk_create(
        [FileJob(storedfile_id=sf_id, job_id=job.id) for sf_id in sf_ids])
    return job
Example #27
0
def cancel_job(job: Job) -> Job:
    """
    Cancel a job.

    This uses Celery's terminate options which will kill the worker child process.
    This is not normally recommended but in this case is OK because there is only
    one task per process.
    See `worker/worker.py` for the reasoning for using `SIGUSR1`.
    See https://docs.celeryproject.org/en/stable/userguide/workers.html#revoke-revoking-tasks
    """
    if not JobStatus.has_ended(job.status):
        celery.control.revoke(str(job.id), terminate=True, signal="SIGUSR1")
        job.status = JobStatus.CANCELLED.value
        job.save()
    return job
Example #28
0
 def test_only_admin_can_access(self):
     self.client.logout()
     assert self.client.login(email="*****@*****.**",
                              roles=[roles.DJ])
     r = self.client.post(reverse('jobs.start'),
                          data={'job_name': 'build-trafficlog-report'})
     eq_(r.status_code, 403)
     job = Job(job_name='build-trafficlog-report')
     job.put()
     r = self.client.post(reverse('jobs.work'),
                          data={'job_key': job.key(),
                                'params': '{}'})
     eq_(r.status_code, 403)
     r = self.client.post(reverse('jobs.product', args=[str(job.key())]))
     eq_(r.status_code, 403)
Example #29
0
def submit(request):
	if request.method == "POST":
		j = Job()
		j.latitude = request.POST["latitude"]
		j.longitude = request.POST["longitude"]
		j.image1 = request.POST["image1"]
		j.image2 = request.POST["image2"]
		j.image3 = request.POST["image3"]
		j.save()
		return HttpResponse("Successfully Submitted")
	else:
		return HttpResponse("Failed")
Example #30
0
    def extract(self,
                review,
                user: Optional[User] = None,
                filters: Optional[Dict] = None) -> Job:
        """
        Extract a review from a project source.

        Creates a job, and adds it to the source's `jobs` list.
        Note: the jobs callback is `Review.extract_callback`.
        """
        source = self.to_address()
        source["type"] = source.type_name

        description = "Extract review from {0}"
        description = description.format(self.address)

        job = Job.objects.create(
            project=self.project,
            creator=user or self.creator,
            method=JobMethod.extract.value,
            params=dict(source=source, filters=filters),
            description=description,
            secrets=self.get_secrets(user),
            **Job.create_callback(review, "extract_callback"),
        )
        self.jobs.add(job)
        return job
Example #31
0
 def setUp(self):
     self.central = pytz.timezone('US/Central')
     self.now = datetime.datetime.now(tz=self.central)
     self.race = Race(race_name='Test Race', race_type=Race.RACE_TYPE_FINALS, race_start_time=self.now)
     self.race.save()
     self.race_control = RaceControl(current_race=self.race)
     self.race_control.save()
     
     self.pick_checkpoint = Checkpoint(checkpoint_number=1, checkpoint_name="Test Checkpoint 1")
     self.pick_checkpoint.save()
     self.drop_checkpoint = Checkpoint(checkpoint_number=2, checkpoint_name="Test Checkpoint 2")
     self.drop_checkpoint.save()
     self.other_checkpoint = Checkpoint(checkpoint_number=3, checkpoint_name="Test Checkpoint 3")
     self.other_checkpoint.save()
     
     self.ready_now_job = Job(job_id=1, race=self.race, pick_checkpoint=self.pick_checkpoint, drop_checkpoint=self.drop_checkpoint, minutes_ready_after_start=0)
     self.ready_now_job.save()
             
     self.racer = Racer(racer_number=320, first_name='Doug', last_name='Suriano', category=Racer.RACER_CATEGORY_MESSENGER)
     self.racer.save()
     
     self.raceentry = RaceEntry(racer=self.racer, race=self.race)
     self.raceentry.save()
     
     self.run = Run(pk=1, job=self.ready_now_job, race_entry=self.raceentry, status=Run.RUN_STATUS_PICKED, utc_time_picked=datetime.datetime.now(tz=pytz.utc))
     self.run.save()
Example #32
0
def do_job_work(request):
    init_jobs()
    try:
        job = Job.get(request.POST['job_key'])
        params = request.POST.get('params', '{}')
        worker = get_worker(job.job_name)
        if worker['pre_request']:
            early_response = worker['pre_request'](request)
            if early_response is not None:
                return early_response
        if job.result:
            result_for_worker = simplejson.loads(job.result)
        else:
            result_for_worker = None
        finished, result = worker['callback'](result_for_worker,
                                              simplejson.loads(params))
        job.result = simplejson.dumps(result)
        job.save()
    except:
        traceback.print_exc()
        raise
    @as_json
    def data(request):
        return {
            'finished': finished,
            'success': True
        }
    return data(request)
Example #33
0
def _get_job_or_404(job_id):
    job = Job.get_by_id(int(job_id))
    current_user = users.get_current_user() # only job owner or admin can edit, check or delete the job
    if job and job.owned_by(current_user, users.is_current_user_admin()):
        return job
    else:
        raise Http404
Example #34
0
    def pull(self, user: Optional[User] = None) -> Job:
        """
        Pull the source to the filesystem.

        Creates a job, and adds it to the source's `jobs` list.
        """
        source = self.to_address()
        source["type"] = source.type_name

        description = "Pull {0}"
        if self.type_class == "UploadSource":
            description = "Collect {0}"
        description = description.format(self.address)

        job = Job.objects.create(
            project=self.project,
            creator=user or self.creator,
            method=JobMethod.pull.value,
            params=dict(source=source, path=self.path),
            description=description,
            secrets=self.get_secrets(user),
            **Job.create_callback(self, "pull_callback"),
        )
        self.jobs.add(job)
        return job
Example #35
0
def do_job_work(request):
    init_jobs()
    try:
        job = Job.get(request.POST['job_key'])
        params = request.POST.get('params', '{}')
        worker = get_worker(job.job_name)
        if worker['pre_request']:
            early_response = worker['pre_request'](request)
            if early_response is not None:
                return early_response
        if job.result:
            result_for_worker = simplejson.loads(job.result)
        else:
            result_for_worker = None
        finished, result = worker['callback'](result_for_worker,
                                              simplejson.loads(params))
        job.result = simplejson.dumps(result)
        job.save()
    except:
        traceback.print_exc()
        raise

    @as_json
    def data(request):
        return {'finished': finished, 'success': True}

    return data(request)
Example #36
0
def test_success(db):
    article = Node.objects.create(json={})
    doi = Doi.objects.create(node=article)

    # Simulate creating a job to register the DOI
    job = doi.register()
    assert list(job.params.keys()) == ["node", "doi", "url", "batch"]

    # Simulate callback on job completion
    doi.register_callback(
        Job(
            result=dict(
                deposited="2020-11-20T22:03:57.603438Z",
                deposit_request=dict(),
                deposit_response=dict(),
                deposit_success=True,
            )
        )
    )
    assert isinstance(doi.deposited, datetime)
    assert isinstance(doi.deposit_request, dict)
    assert isinstance(doi.deposit_response, dict)
    assert doi.deposit_success is True

    # Simulate receiving response email
    receive_registration_email(None, success_email)
    doi = Doi.objects.get(id=doi.id)
    assert doi.registered is not None
    assert doi.registration_success
    assert doi.registration_response == success_email["text"]
Example #37
0
    def convert(self,
                user: User,
                output: str,
                options: Dict = {},
                snapshot: bool = False) -> Job:
        """
        Convert a file to another format.

        Creates a `convert` job which returns a list of files produced (may be
        more than one e.g a file with a media folder). Each of the files will have this
        file as an upstream dependency.

        For certain target formats (e.g. `gdoc`), a source is also created (e.g. `GoogleDocsSource`)
        in the job callback. The source will have this new file as a downstream dependant,
        and this file will have the new file as an upstream dependency.

        Do not call back if this conversion is for a snapshot (do
        not want a file entry for those at present).
        """
        if self.mimetype:
            options["from"] = self.mimetype

        return Job.objects.create(
            project=self.project,
            creator=user,
            description="Convert '{0}' to '{1}'".format(self.path, output),
            method=JobMethod.convert.name,
            params=dict(input=self.path, output=output, options=options),
            secrets=GoogleSourceMixin().get_secrets(user)
            if output.endswith(".gdoc") else None,
            **(Job.create_callback(self, "convert_callback")
               if not snapshot else {}))
Example #38
0
def create_job_instance(row):
    job = Job(
        description=row['description'],
        expiration_date=row['expiration_date'],
        employment_type=row['employment_type'],
        education=row['education'],
        headline=row['headline'],
        post_date=row['post_date'],
        slug=row['slug'],
        location=row['location'],
        salary=row['salary'],
        salary_frequency=row['salary_frequency'],
        benefits=row['benefits'],
        link=row['link'],
        job_title=row['job_title'],
    )
    job.save()
Example #39
0
    def test_region_validates_job_name(self):
        self.job_fixture['name'] = 'InvalidPrefixWithCaps'
        another_job = Job(**self.job_fixture)
        region = HDXExportRegion(job=another_job)

        with self.assertRaises(ValidationError) as e:
            region.full_clean()
        self.assertTrue('dataset_prefix' in e.exception.message_dict)
Example #40
0
def show(request, job_id):
    job = Job.get_by_id(int(job_id)) 
    if not job:
        raise Http404
    if not job.is_published: # only job owner or admin can view unpublished jobs
        if not job.owned_by(users.get_current_user(), users.is_current_user_admin()):
            raise Http404
    return _custom_render_to_response('show_job.html', {'job': job, 'title': job.title})
    def create(self, validated_data):  # noqa
        def slice_dict(in_dict, wanted_keys):
            return dict((k, in_dict[k]) for k in wanted_keys if k in in_dict)

        job_dict = slice_dict(validated_data, [
            'the_geom', 'export_formats', 'feature_selection', 'buffer_aoi'
        ])
        job_dict['user'] = self.context['request'].user
        job_dict['name'] = validated_data.get('dataset_prefix')
        job_dict['description'] = validated_data.get('name')

        region_dict = slice_dict(validated_data, [
            'extra_notes', 'is_private', 'locations', 'license',
            'schedule_period', 'schedule_hour', 'subnational','planet_file'
        ])
        job = Job(**job_dict)
        job.hidden = True
        job.unlimited_extent = True
        job.per_theme = True
        validate_model(job)
        with transaction.atomic():
            job.save()
            region_dict['job'] = job
            region = HDXExportRegion(**region_dict)
            validate_model(region)
            region.save()
        return region
Example #42
0
 def obj_update(self, bundle, **kwargs):
     """
     Update to deactive mostly
     """
     try:
         if 'deactivate' in bundle.data.keys() and bundle.data['deactivate']:
             existing_job = Job.objects.get(id=bundle.data['job_id'])
             existing_job.deactivate = True
             existing_job.save()
             bundle.obj = existing_job
         else:
             company = Company.objects.get(id=bundle.data['company_id'])
             new_job = Job(name=bundle.data['name'], job_type=bundle.data['job_type'], location=bundle.data['location'], description=bundle.data['description'], company=company)
             new_job.save()
             bundle.obj = new_job
     except Exception, e:
         print e
         raise e
Example #43
0
def start_job(request):
    init_jobs()
    # TODO(kumar) check for already running jobs
    reap_dead_jobs()
    job_name = request.POST['job_name']
    job = Job(job_name=job_name)
    job.put()
    worker = get_worker(job_name)
    if worker['pre_request']:
        early_response = worker['pre_request'](request)
        if early_response is not None:
            return early_response

    @as_json
    def data(request):
        return {'job_key': str(job.key()), 'success': True}

    return data(request)
Example #44
0
    def test_start(self):
        response = self.client.post(reverse('jobs.start'),
                                    {'job_name': 'counter'})
        json_response = simplejson.loads(response.content)
        self.assert_json_success(json_response)

        job = Job.all().filter('job_name =', 'counter')[0]
        self.assertEqual(json_response['job_key'], str(job.key()))
        self.assertEqual(job.result, None)
Example #45
0
 def test_start(self):
     response = self.client.post(reverse('jobs.start'), {
         'job_name': 'counter'
     })
     json_response = simplejson.loads(response.content)
     self.assert_json_success(json_response)
     
     job = Job.all().filter('job_name =', 'counter')[0]
     self.assertEqual(json_response['job_key'], str(job.key()))
     self.assertEqual(job.result, None)
Example #46
0
def start_job(request):
    init_jobs()
    # TODO(kumar) check for already running jobs
    reap_dead_jobs()
    job_name = request.POST['job_name']
    job = Job(job_name=job_name)
    job.put()
    worker = get_worker(job_name)
    if worker['pre_request']:
        early_response = worker['pre_request'](request)
        if early_response is not None:
            return early_response
    @as_json
    def data(request):
        return {
            'job_key': str(job.key()),
            'success': True
        }
    return data(request)
Example #47
0
 def obj_create(self, bundle, **kwargs):
     """
     Create or delete a new job
     """
     print 1
     try:
         if 'deactivate' in bundle.data.keys() and bundle.data['deactivate']:
             print 2
             existing_job = Job.objects.get(id=bundle.data['job_id'])
             existing_job.deactivate = True
             existing_job.save()
             bundle.obj = existing_job
         else:
             print 3
             company = Company.objects.get(id=bundle.data['company_id'])
             new_job = Job(name=bundle.data['name'], job_type=bundle.data['job_type'], location=bundle.data['location'], description=bundle.data['description'], company=company)
             new_job.save()
             bundle.obj = new_job
     except Exception, e:
         print e
         raise e
Example #48
0
def rss(request):
    jobs = Job.all().filter('status =', 'published').order('-published_at').fetch(20)
    jobs_published_at = [job.published_at for job in jobs]
    if jobs_published_at:
        pub_date = max(jobs_published_at)
    else:
        pub_date = None
    return render_to_response('rss.xml',
                              {'jobs': jobs,
                               'host': request.get_host(),
                               'pub_date': pub_date},
                              mimetype='application/rss+xml')
Example #49
0
 def test_get_nonexistant_job_product(self):
     # make a deleted job:
     job = Job(job_name="counter")
     job.save()
     job_key = job.key()
     job.delete()
     
     response = self.client.get(reverse('jobs.product', args=(job_key,)))
     self.assertEqual(response.status_code, 404)
Example #50
0
    def test_saving_and_retrieving_jobs(self):
        first_job = Job()
        first_job.name = 'first_job_ever'
        first_job.save()

        second_job = Job()
        second_job.name = 'job_number_two'
        second_job.save()

        saved_jobs = Job.objects.all()
        self.assertEqual(saved_jobs.count(), 2)

        first_saved_job = saved_jobs[0]
        second_saved_job = saved_jobs[1]

        self.assertEqual(first_saved_job.name, 'first_job_ever')
        self.assertEqual(second_saved_job.name, 'job_number_two')
Example #51
0
 def setUp(self):
     self.central = pytz.timezone('US/Central')
     self.now = datetime.datetime.now(tz=self.central)
     self.race = Race(race_name='Test Race', race_type=Race.RACE_TYPE_FINALS, race_start_time=self.now)
     self.race.save()
     self.race_control = RaceControl(current_race=self.race)
     self.race_control.save()
     
     self.pick_checkpoint = Checkpoint(checkpoint_number=1, checkpoint_name="Test Checkpoint 1")
     self.pick_checkpoint.save()
     self.drop_checkpoint = Checkpoint(checkpoint_number=2, checkpoint_name="Test Checkpoint 2")
     self.drop_checkpoint.save()
     self.other_checkpoint = Checkpoint(checkpoint_number=3, checkpoint_name="Test Checkpoint 3")
     self.other_checkpoint.save()
     
     self.ready_now_job = Job(job_id=1, race=self.race, pick_checkpoint=self.pick_checkpoint, drop_checkpoint=self.drop_checkpoint, minutes_ready_after_start=0)
     self.ready_now_job.save()
     
     self.test_minutes_offset = 60
     self.not_ready_job = Job(job_id=2, race=self.race, pick_checkpoint=self.pick_checkpoint, drop_checkpoint=self.drop_checkpoint, minutes_ready_after_start=self.test_minutes_offset)
     self.not_ready_job.save()
     
     self.dead_job = Job(job_id=3, race=self.race, pick_checkpoint=self.pick_checkpoint, drop_checkpoint=self.drop_checkpoint, minutes_ready_after_start=0, minutes_due_after_start=1)
     self.dead_job.save()
     
     self.racer = Racer(racer_number=320, first_name='Doug', last_name='Suriano', category=Racer.RACER_CATEGORY_MESSENGER)
     self.racer.save()
     
     self.dq_racer = Racer(racer_number=666, first_name='Doug', last_name='Suriano', category=Racer.RACER_CATEGORY_MESSENGER)
     self.dq_racer.save()
     
     self.dq_entry = RaceEntry(racer=self.dq_racer, race=self.race, entry_status=RaceEntry.ENTRY_STATUS_DQD)
     self.dq_entry.save()
     
     self.raceentry = RaceEntry(racer=self.racer, race=self.race)
     self.raceentry.save()
def delete_server_and_send_email(server, job):
    """
    If the server is old enough, delete it and send an email to inform the owner
    """
    decom_jobs = create_decom_job_for_servers([server], parent_job=job)

    email_body = (
        'This is an email notifying you that your server "{}" '
        'has been deleted. Please contact your CloudBolt administrator '
        'for more information.'.format(
            server.hostname)
    )
    email_owner(email_body, server)

    return Job.wait_for_jobs(decom_jobs)
Example #53
0
def get_job_product(request, job_key):
    init_jobs()
    job = Job.get(job_key)
    if job is None:
        raise Http404(
            "The requested job product does not exist.  It may have expired, "
            "in which case you will have to run the job again.")
    # TODO(kumar) make sure job is finished
    producer = get_producer(job.job_name)
    if producer['pre_request']:
        early_response = producer['pre_request'](request)
        if early_response is not None:
            return early_response
    result = simplejson.loads(job.result)
    return producer['callback'](result)
Example #54
0
def check_queued(request):
    job = Job.all().filter('status =', 'queued').order('queued_at').fetch(1)
    if job:
        job = job[0] # fetch() returns list of items
        result = urlfetch.fetch(job.owner_profile_url)
        if result.status_code == 200:
            try: # if owner's profile contents code, this will pass silently
                result.content.index(job.code)
                job.publish()
                job.put()
                return HttpResponse('Published')
            except ValueError:
                pass
        job.fail()
        job.put()
        return HttpResponse('Failed')
    else:
        return HttpResponse('No queued jobs.')
Example #55
0
 def test_job_reaper_kills_old_jobs(self):
     # make an old job:
     job = Job(job_name='counter')
     job.started = datetime.datetime.now() - timedelta(days=3)
     job.save()
     old_job_key = job.key()
     
     response = self.client.post(reverse('jobs.start'), {
         'job_name': 'counter'
     })
     json_response = simplejson.loads(response.content)
     self.assert_json_success(json_response)
     
     self.assertEqual(Job.get(old_job_key), None)
 def setUp(self,):
     self.path = os.path.dirname(os.path.realpath(__file__))
     self.formats = ExportFormat.objects.all() #pre-loaded by 'insert_export_formats' migration
     Group.objects.create(name='TestDefaultExportExtentGroup')
     self.user = User.objects.create(username='******', email='*****@*****.**', password='******')
     bbox = Polygon.from_bbox((-7.96, 22.6, -8.14, 27.12))
     the_geom = GEOSGeometry(bbox, srid=4326)
     self.job = Job(name='TestJob',
                              description='Test description', event='Nepal activation',
                              user=self.user, the_geom=the_geom)
     self.job.save()
     self.uid = self.job.uid
     # add the formats to the job
     self.job.formats = self.formats
     self.job.save()
     self.tags = [('building','yes'), ('place','city'), ('highway','service'), ('aeroway','helipad')]
     for tag in self.tags:
         tag = Tag.objects.create(
             key = tag[0],
             value = tag[1],
             job = self.job
         )
Example #57
0
class JobModelTest(TestCase):

    def setUp(self):
        self.job = Job()
        self.job.audit = AuditFactory()
        self.job.user = UserFactory()

    def test_jobs_can_be_created(self):
        self.job.full_clean()  # Should not raise
        self.job.save()        # Should not raise

    def test_job_initial_state(self):
        "Job initial state should be RECEIVED"
        self.assertEqual(self.job.state, Job.RECEIVED_STATE)

    def test_job_creation_timestamp(self):
        start = timezone.now()
        job = JobFactory()
        end = timezone.now()

        self.assertLessEqual(start, job.created_at)
        self.assertLessEqual(job.created_at, end)

    def test_job_report_url(self):
        "Fresh and unfinished jobs have no report file"
        job = JobFactory()
        with self.assertRaises(ValueError):
            job.report_file.url


    @patch('django.core.files.storage.default_storage._wrapped')
    def test_finished_job_report_url(self, storage_mock):
        file_mock = MagicMock(spec=File, name='FileMock')
        file_mock.name = 'test_file.txt'

        storage_mock.url = MagicMock('url')
        storage_mock.url.return_value = file_mock.name
        storage_mock.save.return_value = file_mock

        self.job.add_report(file_mock)

        self.assertEqual(self.job.report_file, file_mock)
        self.assertEqual(self.job.state, Job.SUCCESS_STATE)
Example #58
0
def create(request, create_form=CreateJobForm,
           template_name='jobs/create_form.html', success_url=None,
           extra_context=None):
	
	#profile = Profile.objects.get(user_id=request.user.id)
	#if profile and not(profile.if_employer):
	#	return render_to_response('jobs/index.html', {'form': form}, context_instance=RequestContext(request))

	form = create_form()

	if request.method == 'POST':
		form = create_form(request.POST, request.FILES)

		if form.is_valid():
			job = Job()
			job.user = request.user
			job.name = form.cleaned_data['name']
			job.location = form.cleaned_data['location']
			job.intern_project = form.cleaned_data['intern_project']
			job.save()

			# split by new line
			names_values = form.cleaned_data['names_values'].split('\n')
			# go trough all names and values
			for name_value in names_values:
				# skip extra spaces and split by first colon
				result = ' '.join(name_value.split()).split(":", 1)
				# when name and value
				if len(result) == 2:
					parameter = JobParameter()
					parameter.name = result[0].strip()
					parameter.value = result[1].strip()
					parameter.container = job
					parameter.save()

			return redirect('/jobs')
	return render_to_response('jobs/create_form.html', {'form': form}, context_instance=RequestContext(request))