コード例 #1
0
def test_input_file(tmpdir):
    source_dir = tmpdir.join('-1')
    os.makedirs('%s' % source_dir)

    shutil.copyfile('%s' % files_dir.join('pg5903.epub'),
                    '%s' % source_dir.join('pg5903.epub'))

    input_dir = tmpdir.join('0', 'files')
    os.makedirs('%s' % input_dir)

    os.symlink('%s' % source_dir.join('pg5903.epub'),
               '%s' % input_dir.join('pg5903_0.epub'))

    output_dir = tmpdir.join('1')
    os.makedirs('%s' % output_dir)

    fallback_dir = tmpdir.join('fallback')
    os.makedirs('%s' % fallback_dir)

    t = task.Task(input_dir='%s' % input_dir,
                  output_dir='%s' % output_dir,
                  fallback_path='%s' % fallback_dir)

    assert source_dir.join('pg5903_0.epub').check() is True
コード例 #2
0
def test_input_file(tmpdir):
    input_dir = tmpdir.join('0')
    os.makedirs('%s' % input_dir)

    file_one = input_dir.join('foo.txt')
    file_one.write('baz\n')

    file_two = input_dir.join('bar.txt')
    file_two.write('bar\n\n\n\n\nfoo')

    output_dir = tmpdir.join('1')
    os.makedirs('%s' % output_dir)

    t = task.Task(input_dir='%s' % input_dir, output_dir='%s' % output_dir)

    output_file_one_lines = codecs.open('%s' % output_dir.join('foo.txt'),
                                        encoding='utf-8',
                                        mode='r').readlines()
    assert len(output_file_one_lines) == 1

    output_file_two_lines = codecs.open('%s' % output_dir.join('bar.txt'),
                                        encoding='utf-8',
                                        mode='r').readlines()
    assert len(output_file_two_lines) == 3
コード例 #3
0
def addTask():
    print("Button was pressed!")
    runtime = int(runEntry.get())
    deadline = int(deadlineEntry.get())
    initRuntime = initEntry.get()
    initRunAmt  = initAmtEntry.get()
    if len(initRunAmt) == 0:
        initRunAmt = 0
    if len(initRuntime) == 0:
        initRuntime = 0
    initRunAmt = float(initRunAmt)
    initRuntime = int(initRuntime)
    if initRunAmt > 1:
        initRunAmt = 1
    if initRuntime * initRunAmt > runtime:
        initRuntime = runtime
    print("{}{}{}{}{}{}{}{}".format("Runtime: ",runtime," Deadline:",deadline," Boot Runtime: ",initRuntime," Boot Minimum Amount: ",initRunAmt))
    runEntry.delete(0, 'end')
    deadlineEntry.delete(0, 'end')
    initEntry.delete(0, 'end')
    initAmtEntry.delete(0, 'end')
    tempTask = task.Task(runtime, deadline, initRuntime, initRunAmt)
    taskList.append(tempTask)
    generateScrollbox()
コード例 #4
0
ファイル: chart.py プロジェクト: sachinvenkat/ganttchart
    def draw(self, render):
        i = 0
        y = 20
        today = date.today()
        # Pools
        for pool in sorted(render.owners_by_pools.iterkeys()):
            render.border(pool, y - (2 if i else 0))
            # Users
            for n in sorted(render.owners_by_pools[pool]):
                owner_tasks = render.tasks_by_owners[n]
                tasks_num = len(owner_tasks)
                if i % 2:
                    render.opaque_rectangle(
                        8, y - 1,
                        11 + render.left_offset + render.active_width,
                        render.task_height + self.vertical_offset *
                        (tasks_num - 1) - 1, "#0040FF", 32)
                render.text(10, y - 2, n)
                # Tasks dates
                md, d, t = date.min, None, None
                for d in sorted(owner_tasks.iterkeys()):
                    # Tasks
                    for t in owner_tasks[d]:
                        render.draw_task(t, y)
                        y += self.vertical_offset
                        if t.till_date > md:
                            md = t.till_date
                if md < today:
                    t1 = task.Task("", self.get_category("Bench"), t.pool,
                                   t.owner,
                                   render.de_weekend(md + timedelta(days=1)),
                                   render.max_date)
                    render.draw_task(t1, y - self.vertical_offset)

                i += 1
                y += render.task_height - self.vertical_offset
コード例 #5
0
ファイル: g.py プロジェクト: wassname/singularity
def load_tasks():
    global tasks
    tasks = collections.OrderedDict()  # Keep order (important)

    task_list = generic_load("tasks.dat")
    for task_dict in task_list:

        # Certain keys are absolutely required for each entry.  Make sure
        # they're there.
        check_required_fields(task_dict, ("id", "type", "value"), "Task")

        task_id = task_dict["id"]
        task_type = task_dict["type"]

        # Only jobs are possible for now
        if task_type != "jobs":
            sys.stderr.write("Only jobs task are supported\n")
            sys.exit(1)

        # Make sure prerequisites, if any, are lists.
        task_pre = task_dict.get("pre", [])
        if type(task_pre) != list:
            task_pre = [task_pre]

        tasks[task_id] = task.Task(task_id, task_type, int(task_dict["value"]),
                                   task_pre)

    if (all(
            len(t.prerequisites) > 0 for t in (tasks[k] for k in tasks)
            if t.type == "jobs")):
        sys.stderr.write(
            "A minimun of one job task without prerequisite is needed for the game\n"
        )
        sys.exit(1)

    load_task_defs()
コード例 #6
0
ファイル: run.py プロジェクト: gao462/InfinitySGD
    mcsim.PROCESS = 'poisson'
else:
    raise RuntimeError('there is no default sampling process.')

# task environment
if device is not None:
    device = "cuda:{}".format(device)
else:
    device = 'cpu'

# create task
t_tit = 'u' if useful_trans else 'n'
title = os.path.join('logs', "{}_{}{}_{}".format(dataset, t_tit, num_trans,
                                                 struct))
if dataset in ('benchmark', 'mm1k', 'mmmmr', 'mm1k-small', 'mm1k-large'):
    task = task.Task(data_cls=data.MMmKData, root=title)
elif dataset in ('mmul', 'mmul-small', 'mmul-large'):
    task = task.Task(data_cls=data.MMMulKData, root=title)
elif dataset == 'emu':
    task = task.Task(data_cls=data.EmuData, root=title)
# // elif dataset == 'lbwb':
# //     task = task.Task(data_cls=data.WebBrowseData, root=title)
else:
    raise NotImplementedError
prefix = "{}_{}_g{}_a{}_l{}".format(title, config, expog, expoa, expol)


# define model
class PriorModel(torch.nn.Module):
    r"""Prior Struture Model"""
    def __init__(self, data_prior, tensor=torch.Tensor):
コード例 #7
0
 def test_get(self, mock1, mock2, mock3, mock4, mock5):
     self.assertEqual(self.task.get(), task.Task(True).get())
コード例 #8
0
def submit_task(request):
    print request.POST #打印测试
    tas_obj = task.Task(request)# 传给request到函数  下周讲django起并发任务
    res = tas_obj.handle()  #task中处理多命令数据是一个字典返回的return的
    #上面返回了一个字典 返回到前端需要 json序列化一下
    return HttpResponse(json.dumps(res))#json序列化一下
コード例 #9
0
 def testNoRunBranchMismatch(self):
     """Test running a recurring task with no matching builds."""
     t = task.Task(self._TASK_NAME, self._SUITE, task.BARE_BRANCHES)
     self.mox.ReplayAll()
     self.assertTrue(t.Run(self.sched, self._MAP, self._BOARD))
コード例 #10
0
import json,task

with open('test.json') as json_file:
    data = json.load(json_file)
    print(data)

    tasks = []
    for uselesskey,json_dict in data.iteritems():
        tasks.append(task.Task(json_dict))

    for mTask in tasks:
        print mTask
コード例 #11
0
ファイル: ruleParser.py プロジェクト: metalsky/mvst
def getRules():

    rule_list = []
    task_ptr = None
    # There are two rules files:
    # 1) user rules file - any rules specific to that job
    # 2) common rules file - rules common all jobs
    for file_name in [config.getUserRulesFile(), config.getCommonRulesFile()]:
        if not os.path.isfile(file_name):
            continue

        file = open(file_name)
        lines = file.readlines()
        file.close()

        #Get the names of all the rules and import them into a list.
        for x in range(len(lines)):
            line = lines[x]
            name, tmp, tmp1, tmp2, fatal = parser.parseLine(line)

            if name == None:
                continue

            #We dont need to check the lib paths because that was
            #already done in the parsing stage

            u_py_file, u_task_ptr = parser.findTask(name,
                                                    config.getUserLibPath())
            c_py_file, c_task_ptr = parser.findTask(name,
                                                    config.getCommonLibPath())

            error.sysDebug("GetRules: user: "******", " +
                           str(u_task_ptr))
            error.sysDebug("GetRules: common: " + str(c_py_file) + ", " +
                           str(c_task_ptr))

            #did we find a py file?
            if not u_py_file and not c_py_file:
                error.error("GetRules: rule: " + name +
                            " Does not exist. See Debug output for traceback")
                continue

            if u_py_file == c_py_file and u_task_ptr and c_task_ptr:
                error.warning("Overriding common task with user task: " + \
                 name + " from: " + u_py_file)
                py_file = u_py_file
                task_ptr = u_task_ptr
            elif u_py_file:
                py_file = u_py_file
                task_ptr = u_task_ptr
            else:
                py_file = c_py_file
                task_ptr = c_task_ptr

            if not task_ptr:
                error.error("Can't find rule: " + str(name))
            else:
                #Setup rule task
                task_item = task.Task(name, py_file, file_name, x + 1, None,
                                      task_ptr, fatal)
                rule_list.append(task_item)
                error.sysDebug("getRules: adding rule:" + name)

    return rule_list
コード例 #12
0
            print('đã sinh con')
            self.population.pop.extend(Offspring)
            print('đã xác nhập')
            self.population.pop = self.Selection()
            print('đã chọn thế hệ tiếp')
            self.population.update_rank()
            print('đã đánh giá thế hệ')
            self.iteration += 1
        best = self.population.getBestIndiOfTask(self.listTask[0])
        for i in best.chromosome:
            print(i.number)
        # đến đây chỉ cần chích xuất ra kết quả


#-----------------------------------------------
task1 = task.Task(6)
task2 = task.Task(10)
listTask = [task1, task2]
# đặt các đỉnh
set_of_city = []
f = open(
    "C:\\Users\\Laptop NamPhong\\Desktop\\MFO_TSP\\data\\tsp\\berlin52.tsp")
for i in range(6):
    a = f.readline()
for i in range(10):
    b = f.readline()
    b = list(b.split("\n"))
    c = [int(float(i)) for i in list(b[0].split(" "))]
    city = gen.Gen(c[0], c[1], c[2])
    set_of_city.append(city)
mfea = MultiTasking(listTask, 100, 0.3, set_of_city)
コード例 #13
0
ファイル: test_task.py プロジェクト: jordan-schneider/task
    def test_integration(self):
        tasks: task.TaskDict = dict()
        active_task: Optional[task.Task] = None

        tags = ["chores"]
        names = ["Wash dishes", "Take out garbage", "Write task tracker"]

        with Replace("task.datetime", test_datetime(NOW, delta=0)):
            dateparser.conf.settings.RELATIVE_BASE = NOW
            tasks = task.add(
                tasks,
                name=names[0],
                due_input="5pm",
                estimate_input="ten minutes",
                tags=tags,
            )

        expected = [
            task.Task(name=names[0],
                      due=FIVE_PM,
                      estimate=TEN_MINUTES,
                      tags=tags)
        ]
        self.assertEqual(list(tasks.values()), expected)

        noon_tomorrow = NOW.replace(hour=12) + datetime.timedelta(days=1)

        with Replace("task.datetime", test_datetime(NOW + ONE_SECOND,
                                                    delta=0)):
            dateparser.conf.settings.RELATIVE_BASE = NOW + ONE_SECOND
            tasks = task.add(
                tasks,
                name=names[1],
                due_input="noon tomorrow",
                estimate_input="90 seconds",
                tags=tags,
            )

        expected.append(
            task.Task(name=names[1],
                      due=noon_tomorrow,
                      estimate=90 * ONE_SECOND,
                      tags=tags))
        self.assertEqual(list(tasks.values()), expected)

        with Replace("task.datetime",
                     test_datetime(NOW + 2 * ONE_SECOND, delta=0)):
            dateparser.conf.settings.RELATIVE_BASE = NOW + 2 * ONE_SECOND
            tasks, active_task = task.start(
                tasks=tasks,
                active_task=active_task,
                name=names[0],
            )

        expected[0].spans = [task.Span(start=NOW + 2 * ONE_SECOND)]
        self.assertEqual(list(tasks.values()), expected)
        self.assertEqual(active_task, tasks[names[0]])

        with Replace("task.datetime",
                     test_datetime(NOW + 3 * ONE_SECOND, delta=0)):
            dateparser.conf.settings.RELATIVE_BASE = NOW + 3 * ONE_SECOND
            tasks, active_task = task.start(
                tasks=tasks,
                active_task=active_task,
                name=names[1],
            )

        expected[0].spans[0].stop = NOW + 3 * ONE_SECOND
        expected[1].spans = [task.Span(start=NOW + 3 * ONE_SECOND)]
        self.assertEqual(list(tasks.values()), expected)
        self.assertEqual(active_task, tasks[names[1]])

        with Replace("task.datetime",
                     test_datetime(NOW + 4 * ONE_SECOND, delta=0)):
            dateparser.conf.settings.RELATIVE_BASE = NOW + 4 * ONE_SECOND
            tasks = task.add(
                tasks=tasks,
                name=names[2],
            )

        expected.append(task.Task(name=names[2], tags=list()))
        self.assertEqual(list(tasks.values()), expected)
        self.assertEqual(active_task, tasks[names[1]])

        with Replace("task.datetime",
                     test_datetime(NOW + 5 * ONE_SECOND, delta=0)):
            dateparser.conf.settings.RELATIVE_BASE = NOW + 5 * ONE_SECOND
            tasks, active_task = task.close(
                tasks=tasks,
                active_task=active_task,
                name=names[0],
            )

        expected[0].open = False
        self.assertEqual(list(tasks.values()), expected)
        self.assertEqual(active_task, tasks[names[1]])

        with Replace("task.datetime",
                     test_datetime(NOW + 6 * ONE_SECOND, delta=0)):
            dateparser.conf.settings.RELATIVE_BASE = NOW + 6 * ONE_SECOND
            tasks, active_task = task.close(
                tasks=tasks,
                active_task=active_task,
                name=names[1],
            )

        expected[1].open = False
        expected[1].spans[0].stop = NOW + 6 * ONE_SECOND
        self.assertEqual(list(tasks.values()), expected)
        self.assertEqual(active_task, None)

        with Replace("task.datetime",
                     test_datetime(NOW + 7 * ONE_SECOND, delta=0)):
            dateparser.conf.settings.RELATIVE_BASE = NOW + 7 * ONE_SECOND
            tasks, active_task = task.start(tasks=tasks,
                                            active_task=active_task,
                                            name=names[2])

        expected[2].spans = [task.Span(start=NOW + 7 * ONE_SECOND)]
        self.assertEqual(list(tasks.values()), expected)
        self.assertEqual(active_task, tasks[names[2]])

        with Replace("task.datetime",
                     test_datetime(NOW + 8 * ONE_SECOND, delta=0)):
            dateparser.conf.settings.RELATIVE_BASE = NOW + 8 * ONE_SECOND
            active_task = task.stop(active_task=active_task)

        expected[2].spans[0].stop = NOW + 8 * ONE_SECOND
        self.assertEqual(list(tasks.values()), expected)
        self.assertEqual(active_task, None)

        with Replace("task.datetime",
                     test_datetime(NOW + 9 * ONE_SECOND, delta=0)):
            dateparser.conf.settings.RELATIVE_BASE = NOW + 9 * ONE_SECOND
            tasks, active_task = task.start(tasks=tasks,
                                            active_task=active_task,
                                            name=names[2])

        expected[2].spans.append(task.Span(start=NOW + 9 * ONE_SECOND))
        self.assertEqual(list(tasks.values()), expected)
        self.assertEqual(active_task, tasks[names[2]])
コード例 #14
0
def worker_thread(url, queue_name, results_queue):
    """Worker thread, for each instance
    """
    global log
    empty_queue = False
    while not empty_queue:
        connection = amqpstorm.UriConnection(url)
        channel = connection.channel(rpc_timeout=120)
        channel.queue.declare(queue_name, durable=True)
        channel.basic.qos(1)  # Fetch one message at a time
        log.info('Waiting for tasks')
        while True:
            message = channel.basic.get(queue=queue_name, no_ack=False)
            # If the queue is empty, task_data will contain only None
            if message is None:
                log.info('Nothing else to do.')
                connection.close()
                empty_queue = True
                break

            work = task.Task(message.body)
            log.info('Got a task %s', work.get_id())

            ret_code = 0
            if work.get_id() in IDS_DONE:
                log.warning('Task ID already done. Skipping')
                work = JOBS_DONE[work.get_id()]
            else:
                ret_code = work.run()
                JOBS_DONE[work.get_id()] = work

            try:
                if ret_code != 0:
                    log.warning('Unexpected exit code: %d', ret_code)
                    stdout = work.get_stdout()
                    stderr = work.get_stderr()
                    if stdout is not None:
                        log.error('STDOUT: %s', stdout)
                    if stderr is not None:
                        log.error('STDERR: %s', stderr)
                    message.nack()
                    continue

                log.debug('Task execution finished')

                message.ack()
            except amqpstorm.AMQPConnectionError as conn_error:
                log.error('Connection to server died before publish')
                IDS_DONE.append(work.get_id())
                break
            except Exception as ex:
                log.exception(ex)
                break

            with amqpstorm.UriConnection(url) as res_conn:
                with res_conn.channel() as res_channel:
                    res_channel.queue.declare(results_queue, durable=True)
                    for result in work.result():
                        props = {
                            'delivery_mode': 2
                        }
                        res = amqpstorm.Message.create(res_channel,
                                                       result,
                                                       props)
                        res.publish(results_queue, exchange='')
            log.debug('Task and result processing completed')

        connection.close()

    log.info('Thread exiting. (empty queue? %s)',
             repr(empty_queue))
コード例 #15
0
    def add_job(self, job_id):
        logger.debug("Got Job '%s' -> schedule it" % job_id)

        database.setup_db()

        with database.get_session() as session:
            try:
                job_data = session.query(Job).filter(
                    Job.job_id == job_id).one()
                cloud_data = job_data.clouddef

                if job_data.source_datadef_id is None or job_data.destination_datadef_id is None:
                    raise ValueError("Missing dataprovider information")

                if job_data.run_layer_id is None:
                    raise ValueError("Missing run layer")

                if job_data.script_id is None:
                    raise ValueError("Missing script")

                source_dp_data = job_data.source_datadef_relation
                destination_dp_data = job_data.destination_datadef_relation

                source_dp, source_dp_kwargs = BaseDataProvider.dataprovider_by_model(
                    source_dp_data)
                destination_dp, destiantion_dp_kwargs = BaseDataProvider.dataprovider_by_model(
                    destination_dp_data)

                # We only need to instantiate the source data provider, as only the settings of the destination data provider are relevant here.
                # A destination data provider instance is needed on the corresponding task worker
                source_dp = source_dp(**source_dp_kwargs)
                source_files = source_dp.get_files(source_dp_data.source)

                # Check for a path filter and return only files with a path matching the filter
                if source_dp_data.source_path_filter is not None and len(
                        source_dp_data.source_path_filter) > 0:
                    source_filter = re.compile(
                        source_dp_data.source_path_filter)
                    source_files = [
                        x for x in source_files if source_filter.search(x.path)
                    ]

                # Check for a filename filter and return only files matching the filter
                if source_dp_data.source_filename_filter is not None and len(
                        source_dp_data.source_filename_filter) > 0:
                    source_filter = re.compile(
                        source_dp_data.source_filename_filter)
                    source_files = [
                        x for x in source_files
                        if source_filter.search(x.filename)
                    ]

                running_worker = celerycontrol.get_running_worker()

                for f in source_files:
                    task_name = (job_data.name
                                 or job_data.job_id) + " - " + f.filename

                    logger.debug(
                        "Sourcefile (path='%s', filename='%s') received -> generating task '%s'"
                        % (f.path, f.filename, task_name))

                    task_destination_path = source_path_re.sub(
                        f.path, destination_dp_data.destination)
                    task_destination_path = source_file_re.sub(
                        f.filename, task_destination_path)

                    logger.debug(
                        "Destination path created. Org='%s', created='%s'" %
                        (destination_dp_data.destination,
                         task_destination_path))

                    task_source_dp = task.TaskDataProvider(
                        source_dp_data.dataprovider_relation.name, f.fullpath,
                        None, source_dp_kwargs)
                    logger.debug(
                        "Created source data provider. Type='%s', source='%s'"
                        % (source_dp_data.dataprovider_relation.name,
                           f.fullpath))

                    task_destination_dp = task.TaskDataProvider(
                        destination_dp_data.dataprovider_relation.name, None,
                        task_destination_path, destiantion_dp_kwargs)
                    logger.debug(
                        "Created destination data provider. Type='%s', destination='%s'"
                        % (destination_dp_data.dataprovider_relation.name,
                           task_destination_path))

                    virtual_device_layer_content = '\n'.join([
                        l.content for l in _get_layer_tree(
                            cloud_data.virtual_device_layer_relation.layer_id)
                    ])
                    test_layer_content = '\n'.join([
                        l.content for l in _get_layer_tree(
                            cloud_data.test_layer_relation.layer_id)
                    ])
                    run_layer_content = '\n'.join([
                        l.content for l in _get_layer_tree(
                            cloud_data.run_layer_relation.layer_id)
                    ])

                    task_virtual_device_layer = task.TaskLayer(
                        virtual_device_layer_content,
                        cloud_data.virtual_device_layer_relation.tag)
                    task_test_layer = task.TaskLayer(
                        test_layer_content, cloud_data.test_layer_relation.tag)
                    task_run_layer = task.TaskLayer(run_layer_content,
                                                    job_data.run_relation.tag)
                    task_script = task.TaskScript(
                        job_data.script_relation.content,
                        job_data.script_relation.name)

                    task_filename = f.filename

                    logger.debug("Adding task to internal database")
                    database_task = database.model.task.Task(
                        job_id=job_data.job_id,
                        filename=task_filename,
                        completed=False)

                    session.add(database_task)
                    session.commit()

                    logger.debug("Task added to database with id='%s'" %
                                 database_task.task_id)

                    new_task = task.Task(database_task.task_id, task_name,
                                         task_source_dp, task_destination_dp,
                                         task_virtual_device_layer,
                                         task_test_layer, task_run_layer,
                                         task_script, task_filename,
                                         job_data.task_timeout)

                    celery_queue = None

                    if config.master_scheduling_algo == "history":
                        # Look for a worker which previously executed a task with the same filename
                        task_hist = session.query(Task).join(Job).filter(
                            Job.clouddef_id == cloud_data.clouddef_id,
                            Task.filename == task_filename).all()

                        for row in task_hist:
                            if row.worker_hostname is not None and row.worker_hostname in running_worker:
                                # We found a worker which is running, executed the same file in the past and probably still has the file in its local cache
                                celery_queue = "worker-" + row.worker_hostname
                                break

                    # Fallback in case no worker was found or another schedule algo was specified
                    if celery_queue is None:
                        celery_queue = "cloud-" + str(cloud_data.clouddef_id)

                    try:
                        celery_task = celerycontrol.add_task_uds.apply_async(
                            (new_task, ),
                            queue=celery_queue)  # @UndefinedVariable
                    except Exception as exc:
                        logger.error(traceback.format_exc())
                        database_task.exception = str(exc)
                        database_task.traceback = traceback.format_exc()
                        database_task.completed = True

                    logger.debug(
                        "Task submitted to celery queue '%s' with id='%s'" %
                        (celery_queue, celery_task.id))

                    # Add the backend id (in this case the celery id) to the database
                    database_task.backend_id = celery_task.id
                    session.commit()
            except (MultipleResultsFound, NoResultFound):
                raise ValueError("Received unknown or invalid job id")
コード例 #16
0
 def __init__(self):
     self.tasks = []
     for i in range(100):
         self.tasks.append(task.Task('task ' + str(i)))
コード例 #17
0
ファイル: main.py プロジェクト: petersohn/photo-organizer
    def __init__(self, paths: List[str]) -> None:
        super(MainWindow, self).__init__()
        self.setWindowTitle('Photo Organizer')
        self.resize(config.config['width'], config.config['height'])
        if config.config['maximized']:
            self.setWindowState(C.Qt.WindowMaximized)
        self.picture_size = config.config['picture_size']

        self.loaded_files: Set[str] = set()

        self.mime_db = C.QMimeDatabase()
        self.current_index = 0

        self.from_model = G.QStandardItemModel()
        self.from_list = W.QListView()
        self.from_list.setViewMode(W.QListView.IconMode)
        self.from_list.setMovement(W.QListView.Static)
        self.from_list.setResizeMode(W.QListView.Adjust)
        self.from_list.setSelectionMode(W.QAbstractItemView.ExtendedSelection)
        self.from_list.setModel(self.from_model)
        sm = self.from_list.selectionModel()
        sm.selectionChanged.connect(  # type: ignore
            lambda s, d: self.check_from_selection())

        self.to_model = G.QStandardItemModel()
        self.to_list = W.QListView()
        self.to_list.setViewMode(W.QListView.IconMode)
        self.to_list.setMovement(W.QListView.Static)
        self.to_list.setResizeMode(W.QListView.Adjust)
        self.to_list.setSelectionMode(W.QAbstractItemView.ExtendedSelection)
        self.to_list.setModel(self.to_model)
        self.to_list.selectionModel().selectionChanged.connect(  # type: ignore
            lambda s, d: self.check_to_selection())

        move_layout = W.QVBoxLayout()

        self.add_button = W.QToolButton()
        self.add_button.setText('Add')
        self.add_button.setIcon(config.get_icon('arrow-right-bold'))
        self.add_button.clicked.connect(lambda _: self.add_items())
        self.add_button.setEnabled(False)
        self.add_button.setShortcut(C.Qt.ALT + C.Qt.Key_Right)
        helper.set_tooltip(self.add_button)
        self.remove_button = W.QToolButton()
        self.remove_button.setText('Remove')
        self.remove_button.setIcon(config.get_icon('arrow-left-bold'))
        self.remove_button.clicked.connect(lambda _: self.remove_items())
        self.remove_button.setEnabled(False)
        self.remove_button.setShortcut(C.Qt.ALT + C.Qt.Key_Left)
        helper.set_tooltip(self.remove_button)
        move_layout.addWidget(self.add_button)
        move_layout.addWidget(self.remove_button)

        arrange_layout = W.QVBoxLayout()
        self.up_button = W.QToolButton()
        self.up_button.setText('Up')
        self.up_button.setIcon(config.get_icon('arrow-up-bold'))
        self.up_button.clicked.connect(lambda _: self.move_up())
        self.up_button.setEnabled(False)
        self.up_button.setEnabled(False)
        self.up_button.setShortcut(C.Qt.ALT + C.Qt.Key_Up)
        self.down_button = W.QToolButton()
        self.down_button.setText('Down')
        self.down_button.setIcon(config.get_icon('arrow-down-bold'))
        self.down_button.clicked.connect(lambda _: self.move_down())
        self.down_button.setEnabled(False)
        self.down_button.setShortcut(C.Qt.ALT + C.Qt.Key_Down)
        helper.set_tooltip(self.down_button)
        arrange_layout.addWidget(self.up_button)
        arrange_layout.addWidget(self.down_button)

        splitter = W.QSplitter()

        from_layout = W.QHBoxLayout()
        from_layout.addWidget(self.from_list)
        from_layout.addLayout(move_layout)
        from_widget = W.QWidget()
        from_widget.setLayout(from_layout)
        splitter.addWidget(from_widget)

        to_layout = W.QHBoxLayout()
        to_layout.addWidget(self.to_list)
        to_layout.addLayout(arrange_layout)
        to_widget = W.QWidget()
        to_widget.setLayout(to_layout)
        splitter.addWidget(to_widget)

        self.setCentralWidget(splitter)

        toolbar = W.QToolBar()
        clear_action = toolbar.addAction(config.get_icon('file-outline'),
                                         'Clear', self.clear)
        clear_action.setShortcut(C.Qt.ALT + C.Qt.Key_C)
        helper.set_tooltip(clear_action)
        add_action = toolbar.addAction(config.get_icon('folder'), 'Add folder',
                                       lambda: self.add_dir(recursive=False))
        add_action.setShortcut(C.Qt.ALT + C.Qt.Key_F)
        helper.set_tooltip(add_action)
        toolbar.addAction(config.get_icon('file-tree'), 'Add tree',
                          lambda: self.add_dir(recursive=True))
        toolbar.addSeparator()
        zoom_in_action = toolbar.addAction(
            config.get_icon('magnify-plus'), 'Zoom in', lambda: self.
            resize_pictures(self.picture_size + picture_size_step))
        zoom_in_action.setShortcut(C.Qt.CTRL + C.Qt.Key_Plus)
        helper.set_tooltip(zoom_in_action)
        zoom_out_action = toolbar.addAction(
            config.get_icon('magnify-minus'), 'Zoom out', lambda: self.
            resize_pictures(self.picture_size - picture_size_step))
        zoom_out_action.setShortcut(C.Qt.CTRL + C.Qt.Key_Minus)
        helper.set_tooltip(zoom_out_action)
        toolbar.addSeparator()
        self.apply_action = toolbar.addAction(config.get_icon('floppy'),
                                              'Apply', self.apply)
        self.apply_action.setEnabled(False)
        self.addToolBar(toolbar)
        self.apply_action.setShortcut(C.Qt.ALT + C.Qt.Key_A)
        helper.set_tooltip(self.apply_action)

        self.load_pictures_task = task.Task(self.load_pictures)
        C.QCoreApplication.postEvent(self, InitEvent(paths))
コード例 #18
0
ファイル: server.py プロジェクト: maskiran/pylib
 def wait_for_task(self, task_mor):
     """Wait for the give task to complete"""
     tk_obj = task.Task(self, task_mor)
     tk_obj.wait()
     return tk_obj
コード例 #19
0
def main():
    """  main """

    try:

        def stop():
            """ stop """
            for pr in procList:
                pr.flag = 'stop'
                logger.logs(LOG, "stop proc task id:" + str(id(pr.task)))
                print("stop proc task id:" + str(id(pr.task)))

        def out():
            pass

        """
		settings.json parse

		"""
        conf = open('settings.json', 'r')
        param = conf.read()
        js_param = json.loads(param)

        LOG = js_param['log']
        SETT = js_param['settings']

        logger.logs(LOG, "start programm")

        global procList
        """
		tkinter

		"""
        root = Tk()
        menubar = Menu(root)
        filemenu = Menu(menubar, tearoff=0)
        filemenu.add_command(label="Выход", command=stop)
        menubar.add_cascade(label="Файл", menu=filemenu)

        editmenu = Menu(menubar, tearoff=0)
        editmenu.add_separator()
        menubar.add_cascade(label="Отправка", menu=editmenu)

        servicemenu = Menu(menubar, tearoff=0)
        servicemenu.add_command(label="Проверить ключи", command=donothing)
        servicemenu.add_command(label="asrkeyw", command=donothing)
        servicemenu.add_command(label="Остановить обработку", command=stop)
        menubar.add_cascade(label="Сервис", menu=servicemenu)

        helpmenu = Menu(menubar, tearoff=0)
        helpmenu.add_command(label="О программе", command=donothing)
        menubar.add_cascade(label="Справка", menu=helpmenu)

        for element in SETT:

            sheduleTask = task.Task(element['name'], element['path_in'],
                                    element['maska'], element['archive'],
                                    element['target'], element['period'],
                                    element['command_in'],
                                    element['command_out'],
                                    element['path_out'])

            if not os.path.exists(sheduleTask.path_in):
                os.makedirs(sheduleTask.path_in)

            if not os.path.exists(os.path.join(sheduleTask.archive, 'in')):
                os.makedirs(os.path.join(sheduleTask.archive, 'in'))

            if not os.path.exists(os.path.join(sheduleTask.archive, 'out')):
                os.makedirs(os.path.join(sheduleTask.archive, 'out'))

            if not os.path.exists(sheduleTask.target):
                os.makedirs(sheduleTask.target)

            if not os.path.exists(sheduleTask.path_out):
                os.makedirs(sheduleTask.path_out)

            editmenu.add_command(label=sheduleTask.name,
                                 command=donothing)  #add menu
            editmenu.add_separator()

            proc = ProcessObr(sheduleTask, LOG)
            procList.append(proc)

            thread = threading.Thread(target=proc.inner)
            thread.start()

        root.title("Обработка отчетности")
        root.config(menu=menubar)
        root.geometry('1121x443')
        root.resizable(False, False)
        root.mainloop()

    except Exception as e:
        logger.logs(LOG, "error: {0}".format(e))
コード例 #20
0
def submit_task(request):
    print request.POST
    task_obj = task.Task(request)
    res = task_obj.handle()
    return HttpResponse(json.dumps(res))
コード例 #21
0
    def sound(self,
              path: str,
              duration: float = None,
              repeat: bool = False) -> task.Task:
        """
        returns a Task object, that plays a sound file

        Attributes:
        path: name of the sound file (without extension ".rsf")

        Keyword Attributes:
        duration: duration of the sound file (in sec.)
        repeat: flag, if repeatedly playing
        """
        if repeat:
            ops = b''.join([
                ev3.opSound,
                ev3.REPEAT,
                ev3.LCX(self._volume),  # VOLUME
                ev3.LCS(path)  # NAME
            ])
        else:
            ops = b''.join([
                ev3.opSound,
                ev3.PLAY,
                ev3.LCX(self._volume),  # VOLUME
                ev3.LCS(path)  # NAME
            ])
        # pylint: disable=redefined-variable-type
        if not repeat and not duration:
            return task.Task(self.send_direct_cmd, args=(ops, ))
        elif not repeat and duration:
            t_inner = task.Task(self.send_direct_cmd,
                                args=(ops, ),
                                duration=duration,
                                action_stop=self.stop)
            return task.Task(t_inner.start, join=True)
        elif repeat and not duration:
            t_inner = task.Task(self.send_direct_cmd,
                                args=(ops, ),
                                action_stop=self.stop,
                                action_cont=self.send_direct_cmd,
                                args_cont=(ops, ),
                                duration=999999999)
            return task.Task(t_inner.start, join=True)
        elif repeat and duration:

            class _Task(task.Task):
                # pylint: disable=protected-access
                def _final(self, **kwargs):
                    super()._final(**kwargs)
                    if self._root._time_action:
                        self._root._time_rest = self._root._time_action - time.time(
                        )
                        self._root._time_action -= self._root._time_rest

                # pylint: enable=protected-access
                def _cont2(self, **kwargs):
                    self._time_action += self._time_rest
                    super()._cont2(**kwargs)

            t_inner = task.concat(
                _Task(self.send_direct_cmd,
                      args=(ops, ),
                      duration=duration,
                      action_stop=self.stop,
                      action_cont=self.send_direct_cmd,
                      args_cont=(ops, )), _Task(self.stop))
            # pylint: enable=redefined-variable-type
            return task.Task(t_inner.start, join=True)
コード例 #22
0
 def reply_async(self, handler, *args, **kwargs):
     self.ws = None
     t = task.Task(handler, self, *args, **kwargs)
     t.start()
     return t
コード例 #23
0
ファイル: jobSitter.py プロジェクト: cpausmit/MitProd
# Here is where the real action starts -------------------------------------------------------------
# --------------------------------------------------------------------------------------------------

# Find the list of crab tasks to babysit
crabTasks = []
datasetList = []
cmd = 'find ./ -maxdepth 1 -name crab_0_\* |grep -v cfg | sort' + backward
print '\n=============================================================================='
print ' Summary of crab task list: \n'
for line in os.popen(cmd).readlines():  # run command
    line = line[:-1]  # strip '\n'
    ## print ' LINE: ' + line
    f = line.split('/')  # splitting every blank
    tag = f.pop()

    crabTask = task.Task(tag)

    #print 'Pattern: ' + pattern + '  tag: ' + crabTask.mitDataset
    if apattern != '' and re.search(apattern, crabTask.mitDataset):
        print '\n Skipping: ' + crabTask.mitDataset + '\n\n'
    if re.search(pattern, crabTask.mitDataset):
        crabTasks.append(crabTask)
        crabTask.show()

    if one == 1:
        break

# Process the crab tasks determined to be relevant in the last query
print '\n=============================================================================='
print ' Process crab task list\n'
i = 0
コード例 #24
0
 def setUp(self):
     super(TaskTest, self).setUp()
     self.task = task.Task(self._TASK_NAME, self._SUITE,
                           [self._BRANCH_SPEC], None, None, self._BOARD,
                           self._PRIORITY, self._TIMEOUT)
コード例 #25
0
def get_task_result(request):
    # 获取任务执行结果,返回给前端
    task_obj = task.Task(request)
    res = task_obj.get_task_result()
    return HttpResponse(json.dumps(res, default=utils.jsonDateFormat))
コード例 #26
0
def add_task(name, description):
    task.Task(name, description)
    save()
コード例 #27
0
class Apps:
	task = task.Task(logoffpath="/logon/logoff")
コード例 #28
0
ファイル: views.py プロジェクト: yanjunjie/myops
def submit_task(request):
    print request.POST

    tas_obj = task.Task(request)
    res = tas_obj.handle()
    return HttpResponse("dddd")
コード例 #29
0
    cmd = "Crab file not found: %s" % crabFile
    raise RuntimeError, cmd
cmsswFile = os.environ['MIT_PROD_DIR'] + '/' + mitCfg + '/' + version + '/' + cmssw + '.py'
if not os.path.exists(cmsswFile):
    cmd = "Cmssw file not found: %s" % cmsswFile
    cmd = " XXXX ERROR no valid configuration found XXXX"
    raise RuntimeError, cmd
cmsswPy  = cmssw + '_' + crabId + '.py'

# Prepare the ce/se translator
trans = translator.Translator(os.environ['MIT_PROD_DIR']+'/'+mitCfg+'/'+version+'/ceTable',
                              os.environ['MIT_PROD_DIR']+'/'+mitCfg+'/'+version+'/seTable',
                              os.environ['MIT_PROD_DIR']+'/'+mitCfg+'/'+version+'/preferredSites')

# Create the corresponding crab task
crabTask             = task.Task(crabId,cmsDataset,mitDataset,mitCfg,version,cmssw)
if crabTask.mitDataset == 'undefined' or crabTask.cmsDataset == 'undefined':
    print " ERROR - dataset not defined."
    sys.exit(1)
else:
    mitDataset = crabTask.mitDataset
    cmsDataset = crabTask.cmsDataset
    #dbs        = crabTask.dbs

# Deal with obvious problems
if cmsDataset == None or mitDataset == None:
    cmd = "--cmsDataset & --mitDataset  " + \
          "Have to be defined now (do you have the right database (Productions.<cmssw>)?"
    raise RuntimeError, cmd

getFiles(mitCfg,version)
コード例 #30
0
 def setUp(self, mock1, mock2, mock3, mock4, mock5):
     self.task = task.Task(True)