Ejemplo n.º 1
0
    def test_run_result(self):

        from job_dictator import JobDictator
        from job import Job
        from worker import Worker

        dictator = JobDictator()
        dictator.client = mock.MagicMock()
        dictator.client.keys.return_value = ['job-', 'jm-']
        job = Job('run_succeeded', 'something')
        worker = Worker('job-', None)
        dictator.client.get.side_effect = [pickle.dumps(job), pickle.dumps(worker)]
        self.request_mock.get = mock.MagicMock()
        dictator.settings = mock.MagicMock()
        dictator.headers = mock.MagicMock()
        returner = mock.MagicMock()
        returner.content = 'status:ok'
        self.request_mock.get.return_value = returner
        dictator.pull = mock.MagicMock()

        dictator.aladeen()

        assert dictator.client.keys.call_count == 2
        assert dictator.client.get.call_count == 2
        assert dictator.client.set.call_count == 1
        assert dictator.client.publish.call_count == 1
        assert dictator.pull.call_count == 1
Ejemplo n.º 2
0
    def call_method(self, app, message):
        """Call method from websocket"""
        params = message.get('params') or []
        service, method_name = message['method'].rsplit('.', 1)
        methodobj = getattr(self.get_service(service), method_name)

        if not app.authenticated and not hasattr(methodobj, '_no_auth_required'):
            app.send_error(message, 'Not authenticated')
            return

        args = []
        if hasattr(methodobj, '_pass_app'):
            args.append(app)

        # If the method is marked as a @job we need to create a new
        # entry to keep track of its state.
        job_options = getattr(methodobj, '_job', None)
        if job_options:
            # Create a job instance with required args
            job = Job(self, message['method'], methodobj, args, job_options)
            # Add the job to the queue.
            # At this point an `id` is assinged to the job.
            self.__jobs.add(job)
        else:
            job = None

        args.extend(params)
        if job:
            return job.id
        else:
            return methodobj(*args)
 def test_dequeue_three_internal(self):
     """
     Dequeuing from a three-element queue removes each dequeued value from
     the internal list, highest-priority first.
     """
     pq = NaivePriorityQueue()
     lower_priority = Job(1, 'top')
     middle_priority = Job(3, 'of')
     higher_priority = Job(5, 'this')
     pq.enqueue(higher_priority)
     pq.enqueue(lower_priority)
     pq.enqueue(middle_priority)
     _ = pq.dequeue()
     self.assertEqual(lower_priority, pq.data[0])
     _ = pq.dequeue()
     self.assertEqual(lower_priority, pq.data[0])
Ejemplo n.º 4
0
 def test_attributes(self):
     """
     A Job is instantiated with a priority and a message.
     """
     j = Job(23, "Fake message")
     self.assertEqual(23, j.priority)
     self.assertEqual("Fake message", j.message)
Ejemplo n.º 5
0
    def cut_it(self, target_ip, target_mac):
        #global LOCK_CUT
        #LOCK_CUT = True
        self.lock_cut = True
        conf.verb = 0
        cut_thread = Job(target=self.cut_target,
                         args=(GATEWAY, GATEWAY_MAC, target_ip, target_mac))
        cut_thread.setDaemon(True)
        cut_thread.start()
        try:
            print "[*] Starting attack  {} mac:{}".format(
                target_ip, target_mac)
            time.sleep(TIME)  #断网十分钟

            #global LOCK_CUT
            #LOCK_CUT = False
            self.lock_cut = False
            cut_thread.stop()

            # 还原网络配置
            restore_target(GATEWAY, GATEWAY_MAC, target_ip, target_mac)
            return True
        except Exception as e:
            # 还原网络配置
            restore_target(GATEWAY, GATEWAY_MAC, target_ip, target_mac)
            return False
Ejemplo n.º 6
0
    def test_parse_metadata_csv(self):
        """It should parse the metadata.csv into a dict."""
        # Create metadata.csv
        data = [
            ['Filename', 'dc.title', 'dc.date', 'Other metadata'],
            ['objects/foo.jpg', 'Foo', '2000', 'Taken on a sunny day'],
            ['objects/bar/', 'Bar', '2000', 'All taken on a rainy day'],
        ]
        with open('metadata.csv', 'wb') as f:
            writer = csv.writer(f)
            for row in data:
                writer.writerow(row)

        # Run test
        dc = archivematicaCreateMETSMetadataCSV.parseMetadataCSV(Job("stub", "stub", []), 'metadata.csv')
        # Verify
        assert dc
        assert 'objects/foo.jpg' in dc
        assert 'dc.title' in dc['objects/foo.jpg']
        assert dc['objects/foo.jpg']['dc.title'] == ['Foo']
        assert 'dc.date' in dc['objects/foo.jpg']
        assert dc['objects/foo.jpg']['dc.date'] == ['2000']
        assert 'Other metadata' in dc['objects/foo.jpg']
        assert dc['objects/foo.jpg']['Other metadata'] == ['Taken on a sunny day']
        assert list(dc['objects/foo.jpg'].keys()) == ['dc.title', 'dc.date', 'Other metadata']

        assert 'objects/bar' in dc
        assert 'dc.title' in dc['objects/bar']
        assert dc['objects/bar']['dc.title'] == ['Bar']
        assert 'dc.date' in dc['objects/bar']
        assert dc['objects/bar']['dc.date'] == ['2000']
        assert 'Other metadata' in dc['objects/bar']
        assert dc['objects/bar']['Other metadata'] == ['All taken on a rainy day']
        assert list(dc['objects/bar'].keys()) == ['dc.title', 'dc.date', 'Other metadata']
def create_jobs(jobs_types, number_jobs, param):
    """
    This function takes as input the total number of jobs and the a list of dicts with the jobs types probabilities
    and characteristics
    Returns a list with the jobs
    """
    job_list = []

    # Store in a list the probabilities of each job type
    prob_seq = [job_type['probability'] for job_type in jobs_types]

    for i in range(number_jobs):
        job_type = random.choices(jobs_types, prob_seq)[0]
        if job_type['distr'] == 'n':
            x = np.random.normal(loc=param.mean, scale=param.std)
            x = np.round(x)
            while (x % 2 != 0 or x < 2):
                x = np.random.normal(loc=param.mean, scale=param.std)
                x = np.round(x)
            job_type['file_size'] = x
        if job_type['distr_mem']:
            job_type['memory'] = np.random.choice(job_type['distr_mem'])
        job_list.append(
            Job(number_jobs - i, job_type['cpu'], job_type['memory'],
                job_type['file_size'], job_type['transmit']))

    return job_list
Ejemplo n.º 8
0
    def test_no_wake_up_call_for_delayed(self):
        from machine_midwife import MachineMidwife
        Apprentice = MachineMidwife.Apprentice
        from job import Job
        from worker import Worker

        apprentice = Apprentice()
        apprentice.settings = mock.MagicMock()
        apprentice.settings.max_instances = 1
        apprentice.client = mock.MagicMock()
        apprentice.client.exists.return_value = True
        job = Job('delayed', 'batch-')
        apprentice.client.keys.side_effect = [['jm-1', 'jm-2'], ['job-']]
        w1 = Worker(None, None)
        w1.instance = 'a'
        w2 = Worker(None, None)
        w2.instance = 'b'
        apprentice.client.get.side_effect = [
            pickle.dumps(w1),
            pickle.dumps(w2),
            pickle.dumps(job)
        ]
        apprentice.client.publish = mock.MagicMock()

        apprentice.rise_and_shine()

        assert apprentice.client.keys.call_count == 2
        assert apprentice.client.get.call_count == 3
        assert apprentice.client.publish.call_count == 0
Ejemplo n.º 9
0
    def test_stale_request(self):
        from machine_midwife import MachineMidwife
        Apprentice = MachineMidwife.Apprentice
        from job import Job
        from worker import Worker

        apprentice = Apprentice()
        apprentice.settings = mock.MagicMock()
        apprentice.settings.aws_req_max_wait = 1
        apprentice.client = mock.MagicMock()
        apprentice.client.exists.return_value = True
        job = Job('requested', 'batch-')
        worker = Worker(None, None)
        worker.reservation = 'some'
        worker.request_time = datetime.now() - timedelta(minutes=5)

        apprentice.client.keys.return_value = ['jm-']
        apprentice.client.get.side_effect = [
            pickle.dumps(worker), pickle.dumps(job)
        ]
        apprentice.client.set = mock.MagicMock()
        apprentice.client.publish = mock.MagicMock()
        apprentice.client.delete = mock.MagicMock()

        apprentice.check_newborn()

        assert apprentice.client.keys.call_count == 1
        assert apprentice.client.get.call_count == 2
        assert apprentice.client.set.call_count == 1
        assert apprentice.client.publish.call_count == 1
        assert apprentice.client.delete.call_count == 1
        assert pickle.loads(
            apprentice.client.set.call_args_list[0][0][1]).state == 'received'
        assert apprentice.client.delete.call_args_list[0][0][0] == 'jm-'
Ejemplo n.º 10
0
 def test_run_too_soon(self):
     job = Job('test')
     job.status = Status.WAIT
     job.status = Status.RUN
     # Status.WAIT should be removed so the len should be 2 rather
     # than 3.
     assert len(job.status_history) == 2, 'history length should be 2'
Ejemplo n.º 11
0
    def test_delayed_machine_state_flow_for_requested_with_recycle(self):
        from machine_midwife import MachineMidwife
        from job import Job
        from worker import Worker

        midwife = MachineMidwife()
        midwife.apprentice = mock.MagicMock()
        midwife.settings = mock.MagicMock()
        midwife.client = mock.MagicMock()
        midwife.job_pub_sub = mock.MagicMock()
        midwife.job_pub_sub.listen.return_value = [{'data': 'test'}]
        midwife.client.exists.return_value = True
        job = Job('delayed', 'batch-')
        worker = Worker(None, 'batch-')
        worker.reservation = 'reservation'
        worker.request_time = datetime.now()
        midwife.client.keys.return_value = ['jm-']
        midwife.client.get.side_effect = [
            pickle.dumps(job), pickle.dumps(worker)
        ]
        midwife.client.set = mock.MagicMock()
        midwife.client.publish = mock.MagicMock()

        midwife.run()

        assert midwife.client.exists.call_count == 2
        assert len(midwife.client.set.call_args_list) == 2
        assert pickle.loads(
            midwife.client.set.call_args_list[0][0][1]).job_id == 'test'
        assert pickle.loads(
            midwife.client.set.call_args_list[1][0][1]).state == 'booted'
Ejemplo n.º 12
0
def run_task(data, filer_version):
    task_name = data['executors'][0]['metadata']['labels']['taskmaster-name']
    pvc = None

    if data['volumes'] or data['inputs'] or data['outputs']:

        filer = Filer(task_name + '-filer', data, filer_version,
                      args.pull_policy_always)
        if os.environ.get('TESK_FTP_USERNAME') is not None:
            filer.set_ftp(os.environ['TESK_FTP_USERNAME'],
                          os.environ['TESK_FTP_PASSWORD'])

        pvc = init_pvc(data, filer)

    for executor in data['executors']:
        run_executor(executor, args.namespace, pvc)

    # run executors
    logging.debug("Finished running executors")

    # upload files and delete pvc
    if data['volumes'] or data['inputs'] or data['outputs']:
        filerjob = Job(filer.get_spec('outputs', args.debug),
                       task_name + '-outputs-filer', args.namespace)

        global created_jobs
        created_jobs.append(filerjob)

        # filerjob.run_to_completion(poll_interval)
        status = filerjob.run_to_completion(poll_interval, check_cancelled)
        if status != 'Complete':
            exit_cancelled('Got status ' + status)
        else:
            pvc.delete()
Ejemplo n.º 13
0
def init_pvc(data, filer):
    task_name = data['executors'][0]['metadata']['labels']['taskmaster-name']
    pvc_name = task_name + '-pvc'
    pvc_size = data['resources']['disk_gb']
    pvc = PVC(pvc_name, pvc_size, args.namespace)

    mounts = generate_mounts(data, pvc)
    logging.debug(mounts)
    logging.debug(type(mounts))
    pvc.set_volume_mounts(mounts)
    filer.add_volume_mount(pvc)

    pvc.create()
    # to global var for cleanup purposes
    global created_pvc
    created_pvc = pvc

    filerjob = Job(filer.get_spec('inputs', args.debug),
                   task_name + '-inputs-filer', args.namespace)

    global created_jobs
    created_jobs.append(filerjob)
    # filerjob.run_to_completion(poll_interval)
    status = filerjob.run_to_completion(poll_interval, check_cancelled)
    if status != 'Complete':
        exit_cancelled('Got status ' + status)

    return pvc
Ejemplo n.º 14
0
    def test_run_timeout(self):

        from job_dictator import JobDictator
        from job import Job
        from worker import Worker

        dictator = JobDictator()
        dictator.client = mock.MagicMock()
        dictator.client.keys.return_value = ['job-', 'jm-']
        job = Job('running', 'something')
        job.run_started_on = datetime.now() - timedelta(minutes=10)
        worker = Worker('job-', None)
        dictator.client.get.side_effect = [pickle.dumps(job), pickle.dumps(worker)]
        self.request_mock.get = mock.MagicMock()
        dictator.settings = mock.MagicMock()
        dictator.settings.job_timeout = 1
        dictator.headers = mock.MagicMock()
        returner = mock.MagicMock()
        returner.content = 'status:ok'
        self.request_mock.get.return_value = returner
        dictator.pull = mock.MagicMock()

        dictator.aladeen()

        assert dictator.client.keys.call_count == 2
        assert dictator.client.get.call_count == 2
        assert dictator.client.set.call_count == 1
        assert dictator.client.publish.call_count == 1
        assert dictator.pull.call_count == 0
        assert pickle.loads(dictator.client.set.call_args_list[0][0][1]).state == 'broken'
Ejemplo n.º 15
0
 def test_dmdsec_from_csv_parsed_metadata_other_only(self):
     """It should only create an Other dmdSec from parsed metadata."""
     data = collections.OrderedDict([
         ("Title", ["Yamani Weapons"]),
         ("Contributor", [u"雪 ユキ".encode('utf8')]),
         ("Long Description", ['This is about how glaives are used in the Yamani Islands'])
     ])
     # Test
     ret = create_mets_v2.createDmdSecsFromCSVParsedMetadata(Job("stub", "stub", []), data)
     # Verify
     assert ret
     assert len(ret) == 1
     dmdsec = ret[0]
     assert dmdsec.tag == '{http://www.loc.gov/METS/}dmdSec'
     assert 'ID' in dmdsec.attrib
     mdwrap = dmdsec[0]
     assert mdwrap.tag == '{http://www.loc.gov/METS/}mdWrap'
     assert 'MDTYPE' in mdwrap.attrib
     assert mdwrap.attrib['MDTYPE'] == 'OTHER'
     assert 'OTHERMDTYPE' in mdwrap.attrib
     assert mdwrap.attrib['OTHERMDTYPE'] == 'CUSTOM'
     xmldata = mdwrap[0]
     assert xmldata.tag == '{http://www.loc.gov/METS/}xmlData'
     # Elements are direct children of xmlData
     assert len(xmldata) == 3
     assert xmldata[0].tag == 'title'
     assert xmldata[0].text == 'Yamani Weapons'
     assert xmldata[1].tag == 'contributor'
     assert xmldata[1].text == u'雪 ユキ'
     assert xmldata[2].tag == 'long_description'
     assert xmldata[2].text == 'This is about how glaives are used in the Yamani Islands'
Ejemplo n.º 16
0
    def test_normal_machine_recycle(self):
        from consuela import Consuela
        from job import Job
        from worker import Worker

        cleaner = Consuela()
        cleaner.job_pub_sub = mock.MagicMock()
        cleaner.job_pub_sub.listen.return_value = [{'data': 'test'}]
        worker = Worker(None, None)
        worker.instance = 'some'
        cleaner.get_worker = mock.MagicMock()
        cleaner.get_worker.return_value = 'id', worker
        cleaner.client = mock.MagicMock()
        cleaner.client.exists.return_value = True
        cleaner.client.get.return_value = pickle.dumps(
            Job('finished', 'something'))
        cleaner.settings = mock.MagicMock()
        cleaner.settings.recycle_workers = True
        cleaner.recycle_worker = mock.MagicMock()
        cleaner.recycle_worker.return_value = True

        cleaner.run()

        assert cleaner.client.exists.call_count == 1
        assert cleaner.client.get.call_count == 1
        assert pickle.loads(
            cleaner.client.set.call_args_list[0][0][1]).job_id is None
Ejemplo n.º 17
0
 def test_dmdsec_from_csv_parsed_metadata_no_data(self):
     """It should not create dmdSecs with no parsed metadata."""
     data = {}
     # Test
     ret = create_mets_v2.createDmdSecsFromCSVParsedMetadata(Job("stub", "stub", []), data)
     # Verify
     assert ret == []
Ejemplo n.º 18
0
    def test_normal_machine_state_flow_for_received(self):
        self.aws_mock.start_machine = mock.MagicMock()
        self.aws_mock.start_machine.return_value = 'jm-', 'res0'

        from machine_midwife import MachineMidwife
        from job import Job
        from worker import Worker

        midwife = MachineMidwife()
        midwife.apprentice = mock.MagicMock()
        midwife.settings = mock.MagicMock()
        midwife.client = mock.MagicMock()
        midwife.job_pub_sub = mock.MagicMock()
        midwife.job_pub_sub.listen.return_value = [{'data': 'test'}]
        midwife.client.exists.return_value = True
        midwife.client.keys.return_value = ['job-', 'jm-']
        job = Job('received', 'something')
        worker = Worker('job-', None)
        midwife.client.get.side_effect = [
            pickle.dumps(job), pickle.dumps(worker)
        ]
        midwife.client.set = mock.MagicMock()
        midwife.client.publish = mock.MagicMock()

        midwife.run()

        assert midwife.client.exists.call_count == 2
        assert len(midwife.client.set.call_args_list) == 2
        assert midwife.client.set.call_args_list[0][0][0] == 'jm-'
        assert midwife.client.set.call_args_list[1][0][0] == 'test'
        assert self.aws_mock.start_machine.call_count == 1
        assert pickle.loads(
            midwife.client.set.call_args_list[1][0][1]).state == 'requested'
Ejemplo n.º 19
0
 def test_create_rights_granted(self):
     # Setup
     elem = etree.Element("{info:lc/xmlns/premis-v2}rightsStatement", nsmap={'premis': NSMAP['premis']})
     statement = RightsStatement.objects.get(id=1)
     # Test
     archivematicaCreateMETSRights.getrightsGranted(Job("stub", "stub", []), statement, elem)
     # Verify
     assert len(elem) == 1
     rightsgranted = elem[0]
     assert rightsgranted.tag == '{info:lc/xmlns/premis-v2}rightsGranted'
     assert len(rightsgranted.attrib) == 0
     assert len(rightsgranted) == 4
     assert rightsgranted[0].tag == '{info:lc/xmlns/premis-v2}act'
     assert rightsgranted[0].text == 'Disseminate'
     assert len(rightsgranted[0].attrib) == 0
     assert len(rightsgranted[0]) == 0
     assert rightsgranted[1].tag == '{info:lc/xmlns/premis-v2}restriction'
     assert rightsgranted[1].text == 'Allow'
     assert len(rightsgranted[1].attrib) == 0
     assert len(rightsgranted[1]) == 0
     assert rightsgranted[2].tag == '{info:lc/xmlns/premis-v2}termOfGrant'
     assert len(rightsgranted[2].attrib) == 0
     assert len(rightsgranted[2]) == 2
     assert rightsgranted[2][0].tag == '{info:lc/xmlns/premis-v2}startDate'
     assert rightsgranted[2][0].text == '2000'
     assert rightsgranted[2][1].tag == '{info:lc/xmlns/premis-v2}endDate'
     assert rightsgranted[2][1].text == 'OPEN'
     assert rightsgranted[3].tag == '{info:lc/xmlns/premis-v2}rightsGrantedNote'
     assert rightsgranted[3].text == 'Attribution required'
     assert len(rightsgranted[3].attrib) == 0
     assert len(rightsgranted[3]) == 0
Ejemplo n.º 20
0
    def test_failed_job_machine_removal(self):
        from consuela import Consuela
        from job import Job
        from worker import Worker

        with mock.patch('consuela.terminate_worker') as worker_mock:

            cleaner = Consuela()
            cleaner.job_pub_sub = mock.MagicMock()
            cleaner.job_pub_sub.listen.return_value = [{'data': 'test'}]
            worker = Worker(None, None)
            worker.instance = 'some'
            cleaner.get_worker = mock.MagicMock()
            cleaner.get_worker.return_value = 'id', worker
            cleaner.client = mock.MagicMock()
            cleaner.client.exists.return_value = True
            cleaner.client.get.return_value = pickle.dumps(
                Job('failed', 'something'))
            cleaner.settings = mock.MagicMock()
            cleaner.settings.recycle_workers = True
            cleaner.recycle_worker = mock.MagicMock()
            cleaner.recycle_worker.return_value = False

            cleaner.run()

            assert cleaner.client.exists.call_count == 1
            assert worker_mock.call_count == 0
Ejemplo n.º 21
0
 def test_default_attributes(self):
     """
     A default Job has a priority and message that are None.
     """
     j = Job()
     self.assertEqual(None, j.priority)
     self.assertEqual(None, j.message)
Ejemplo n.º 22
0
    def run_test_job(self, hosts, simulation=False, logdir='trace'):
        """

        :param hosts: list
        :param logdir: str
        :param simulation: bool
        :return:
        """
        monitor = self._get_monitor(hosts, list(), logdir, simulation)

        sensors = self._get_primary_sensors()
        for sensor in sensors:
            sensor_period = sensor.get_period()
            date = sensor_period.get_start_date() - datetime.timedelta(days=1)
            data_version = sensor.get_data_version()
            while date < sensor_period.get_end_date():
                chunk = self._get_next_period(date)
                start_string = self._get_year_day_of_year(chunk.get_start_date())
                end_string = self._get_year_day_of_year(chunk.get_end_date())
                sensor_name = sensor.get_name()
                job_name = 'dummy_job-' + sensor_name + '-' + start_string + '-' + end_string
                post_condition = 'stored-' + sensor_name + '-' + start_string + '-' + end_string

                job = Job(job_name, 'dummy_job_start.sh', [job_name], [post_condition],
                          [sensor_name, start_string, end_string, data_version, self._get_config_dir()])
                monitor.execute(job)

                date = chunk.get_end_date()

        monitor.wait_for_completion()
Ejemplo n.º 23
0
    def _generate(self):
        tic = time.time()

        job_id = 1
        random.seed(params.RANDOM_SEED)  # make each run repeatable
        numpy.random.seed(params.RANDOM_SEED)
        accum_t = 0
        cwd = os.getcwd() + '/'

        for i in xrange(params.TOT_NUM_JOBS):
            if params.JOB_DISTRIBUTION == "uniform":
                # uniform randomly choose one
                index = random.randint(0, len(jobrepo.job_repos) - 1)
                (type, model) = jobrepo.job_repos[index]
                job = Job(job_id, type, model, index, cwd, self.logger)
                jobrepo.set_config(job)

            # randomize job arrival time
            if params.JOB_ARRIVAL == "uniform":
                t = random.randint(1, params.T)  # clock start from 1
                job.arrival_slot = t
            if job.arrival_slot in self.job_dict:
                self.job_dict[job.arrival_slot].append(job)
            else:
                self.job_dict[job.arrival_slot] = [job]

            job_id += 1

        toc = time.time()
        self.logger.debug(self.name + ":: " + "has generated " +
                          str(job_id - 1) + " jobs")
        self.logger.debug(self.name + ":: " + "time to generate jobs: " +
                          '%.3f' % (toc - tic) + " seconds.")
Ejemplo n.º 24
0
    def run_matchup(self, hosts, num_parallel_tasks, simulation=False, logdir='trace'):
        """

        :param hosts: list
        :param num_parallel_tasks: int
        :param simulation: bool
        :param logdir: str
        :return:
        """
        monitor = self._get_monitor(hosts, [('matchup_start.sh', num_parallel_tasks)], logdir, simulation)

        sensors = self._get_sensor_pairs()
        for sensor_pair in sensors:
            name = sensor_pair.get_name()
            """:type : str"""
            sensor_period = sensor_pair.get_period()
            date = sensor_period.get_start_date() - datetime.timedelta(days=1)
            while date < sensor_period.get_end_date():
                chunk = self._get_next_period(date)
                start_string = self._get_year_day_of_year(chunk.get_start_date())
                end_string = self._get_year_day_of_year(chunk.get_end_date())

                job_name = 'matchup-' + name + '-' + start_string + '-' + end_string + '-' + self.usecase_config
                primary_name = sensor_pair.get_primary_name()
                pre_condition = 'ingest-' + primary_name + '-' + start_string + '-' + end_string
                post_condition = 'matchup-' + name + '-' + start_string + '-' + end_string + '-' + self.usecase_config

                job = Job(job_name, 'matchup_start.sh', [pre_condition], [post_condition],
                          [start_string, end_string, self._get_config_dir(), self.usecase_config])
                monitor.execute(job)

                date = chunk.get_end_date()

        monitor.wait_for_completion()
Ejemplo n.º 25
0
    def scrapeJobPage(self, url) -> Job:
        try:
            html = get_soup(url)
            table = html.find('table')
            rows = table.findAll('tr')
            company_name = rows[0].findAll('td')[1].text
            company_website = rows[1].findAll('td')[1].text
            job_role = rows[2].findAll('td')[1].text
            id = rows[3].findAll('td')[1].text
            salary = None
            for i in range(4, 8):
                r = rows[i].findAll('td')
                if 'salary' in r[0].text.lower():
                    salary = r[1].text
                    break

            spans = html.find('div',
                              attrs={
                                  'class': 'td-post-content tagdiv-type'
                              }).findAll('span')
            link = None
            for span in spans:
                if span.find('a'):
                    link = span.find('a')['href']
        except:
            return None

        return Job(company_name,
                   company_website,
                   job_role,
                   job_link=link,
                   salary=salary)
Ejemplo n.º 26
0
    def run_post_processing(self, hosts, num_parallel_tasks, simulation=False, logdir='trace'):
        """

        :param hosts: list
        :param num_parallel_tasks: int
        :param simulation: bool
        :param logdir: str
        :return:
        """
        monitor = self._get_monitor(hosts, [('post_processing_start.sh', num_parallel_tasks)], logdir, simulation)
        production_period = self.get_production_period()
        date = production_period.get_start_date()
        while date < production_period.get_end_date():
            chunk = self._get_next_period(date)

            start_string = self._get_year_day_of_year(chunk.get_start_date())
            end_string = self._get_year_day_of_year(chunk.get_end_date())

            job_name = 'post-processing-' + start_string + '-' + end_string + '-' + self.usecase_config
            pre_condition = 'mmd-' + start_string + '-' + end_string
            post_condition = 'post-processing-' + start_string + '-' + end_string + '-' + self.usecase_config

            job = Job(job_name, 'post_processing_start.sh', [pre_condition], [post_condition],
                      [self.input_dir, start_string, end_string, self.usecase_config, self._get_config_dir()])
            monitor.execute(job)

            date = chunk.get_end_date()

        monitor.wait_for_completion()
Ejemplo n.º 27
0
def run(url):
    soup = get_javascript_soup(url)
    jobs_list = soup.find(
        'table', {'class': 'srJobList'}).tbody.find_all('tr')[1:]
    job_class= Job(organization, "")
    job_class.organization_id= organization_id
    insert_count= 0
    for job_entry in jobs_list:
        job_class.title = job_entry.find(
            'td', {'class': 'srJobListJobTitle'}).text.strip()
        onClickLink = job_entry['onclick']
        job_class.info_link = onClickLink[13:len(onClickLink) - 3]
        job_class.full_or_part = job_entry.find(
            'td', {'class': 'srJobListTypeOfEmployment'}).text
        job_class.location = job_entry.find(
            'td', {'class': 'srJobListLocation'}).text
        location_parts = job_class.location.split(',')
        if len(location_parts) > 1 and len(
                location_parts[-1]) and location_parts[-1].strip().lower() != 'ca':
            # skip job if state is not CA
            print('Skip location: %s' % job_class.location)
            continue
        job_class.zip_code = city_to_zip(location_parts[0])
        insert_count+= job_insert(job_class)
    return insert_count
Ejemplo n.º 28
0
 def create_job(self, filename, **kwargs):
     """Create a job and try to set the source. Returns bool success."""
     job = Job(**kwargs)
     # Get the default material
     job.material = self.get_material()
     try:
         job.set_source(filename)
         self.job = job
         self.session.add(self.job)
         msg = 'Loaded %s' % os.path.basename(job.name or 'File')
         self.get_window('inkcut').set_title("*%s - Inkcut" % job.name)
         self.flash(msg)
         self.on_plot_feed_distance_changed(
             self.get_widget('plot-properties', 'plot-feed'))
         self._update_ui()
         return False
     except Exception, err:
         # update the ui with job info
         log.debug(traceback.format_exc())
         msg = Gtk.MessageDialog(type=Gtk.MessageType.ERROR,
                                 buttons=Gtk.ButtonsType.OK,
                                 message_format="Issue loading file")
         msg.format_secondary_text(err)
         msg.run()
         msg.destroy()
         return False
 def test_not_empty(self):
     """
     A queue with one enqueued value is not empty.
     """
     pq = NaivePriorityQueue()
     pq.enqueue(Job(1, 'People'))
     self.assertFalse(pq.is_empty())
Ejemplo n.º 30
0
def test():
    print "Start test"
    global dbName
    dbName = "test.db"

    clear()
    jobs = loadJobs()
    assert len(jobs) == 0

    newJob = Job("test")
    addNewJob(newJob)
    jobs = loadJobs()
    assert len(jobs) == 1
    assert jobs.index(newJob) >= 0

    newJob = jobs[newJob.id]
    newJob.recipeRef = "new test"
    saveJobs(jobs)
    jobs = loadJobs()
    assert newJob.recipeRef == jobs[newJob.id].recipeRef

    clear()
    jobs = loadJobs()
    assert len(jobs) == 0

    print "End test"