Esempio n. 1
0
def start_work(job, chia_location, log_directory):
    logging.info(f'Starting new plot for job: {job.name}')
    nice_val = 15
    if is_windows():
        nice_val = psutil.REALTIME_PRIORITY_CLASS

    now = datetime.now()
    log_file_path = get_log_file_name(log_directory, job, now)
    logging.info(f'Job log file path: {log_file_path}')
    destination_directory, temporary2_directory = get_target_directories(job)
    logging.info(f'Job destination directory: {destination_directory}')

    work = deepcopy(Work())
    work.job = job
    work.log_file = log_file_path
    work.datetime_start = now
    work.work_id = job.current_work_id

    job.current_work_id += 1

    if job.temporary2_destination_sync:
        logging.info(f'Job temporary2 and destination sync')
        temporary2_directory = destination_directory
    logging.info(f'Job temporary2 directory: {temporary2_directory}')

    plot_command = plots.create(
        chia_location=chia_location,
        farmer_public_key=job.farmer_public_key,
        pool_public_key=job.pool_public_key,
        size=job.size,
        memory_buffer=job.memory_buffer,
        temporary_directory=job.temporary_directory,
        temporary2_directory=temporary2_directory,
        destination_directory=destination_directory,
        threads=job.threads,
        buckets=job.buckets,
        bitfield=job.bitfield,
        exclude_final_directory=job.exclude_final_directory,
    )
    logging.info(f'Starting with plot command: {plot_command}')

    log_file = open(log_file_path, 'a')
    logging.info(f'Starting process')
    process = start_process(args=plot_command, log_file=log_file)
    pid = process.pid
    logging.info(f'Started process: {pid}')

    logging.info(f'Setting priority level: {nice_val}')
    psutil.Process(pid).nice(nice_val)
    logging.info(f'Set priority level')

    work.pid = pid
    job.total_running += 1
    job.running_work = job.running_work + [pid]
    logging.info(f'Job total running: {job.total_running}')
    logging.info(f'Job running: {job.running_work}')

    return job, work
Esempio n. 2
0
def get_running_plots(jobs, running_work, instrumentation_settings):
    chia_processes = []
    logging.info(f'Getting running plots')
    chia_executable_name = get_chia_executable_name()
    for process in psutil.process_iter():
        try:
            if chia_executable_name not in process.name(
            ) and 'python' not in process.name().lower():
                continue
        except (psutil.AccessDenied, psutil.NoSuchProcess):
            continue
        try:
            if 'plots' not in process.cmdline(
            ) or 'create' not in process.cmdline():
                continue
        except (psutil.ZombieProcess, psutil.NoSuchProcess):
            continue
        if process.parent():
            try:
                parent_commands = process.parent().cmdline()
                if 'plots' in parent_commands and 'create' in parent_commands:
                    continue
            except (psutil.AccessDenied, psutil.ZombieProcess):
                pass
        logging.info(f'Found chia plotting process: {process.pid}')
        datetime_start = datetime.fromtimestamp(process.create_time())
        chia_processes.append([datetime_start, process])
    chia_processes.sort(key=lambda x: (x[0]))

    for datetime_start, process in chia_processes:
        logging.info(f'Finding log file for process: {process.pid}')
        log_file_path = None
        commands = []
        try:
            commands = process.cmdline()
            for file in process.open_files():
                if '.mui' == file.path[-4:]:
                    continue
                if file.path[-4:] not in ['.log', '.txt']:
                    continue
                if file.path[-9:] == 'debug.log':
                    continue
                log_file_path = file.path
                logging.info(f'Found log file: {log_file_path}')
                break
        except (psutil.AccessDenied, RuntimeError):
            logging.info(f'Failed to find log file: {process.pid}')
        except psutil.NoSuchProcess:
            continue

        assumed_job = None
        logging.info(f'Finding associated job')

        temporary_directory, temporary2_directory, destination_directory = get_plot_directories(
            commands=commands)
        for job in jobs:
            if isinstance(
                    job.temporary_directory, list
            ) and temporary_directory not in job.temporary_directory:
                continue
            if not isinstance(
                    job.temporary_directory,
                    list) and temporary_directory != job.temporary_directory:
                continue
            logging.info(f'Found job: {job.name}')
            assumed_job = job
            break

        plot_id = None
        if log_file_path:
            plot_id = get_plot_id(file_path=log_file_path)

        temp_file_size = get_temp_size(
            plot_id=plot_id,
            temporary_directory=temporary_directory,
            temporary2_directory=temporary2_directory)

        temporary_drive, temporary2_drive, destination_drive = get_plot_drives(
            commands=commands)
        k_size = get_plot_k_size(commands=commands)
        work = deepcopy(Work())
        work.job = assumed_job
        work.log_file = log_file_path
        work.datetime_start = datetime_start
        work.pid = process.pid
        work.plot_id = plot_id
        work.work_id = '?'
        if assumed_job:
            work.work_id = assumed_job.current_work_id
            assumed_job.current_work_id += 1
            assumed_job.total_running += 1
            set_plots_running(
                total_running_plots=assumed_job.total_running,
                job_name=assumed_job.name,
                instrumentation_settings=instrumentation_settings)
            assumed_job.running_work = assumed_job.running_work + [process.pid]
        work.temporary_drive = temporary_drive
        work.temporary2_drive = temporary2_drive
        work.destination_drive = destination_drive
        work.temp_file_size = temp_file_size
        work.k_size = k_size

        running_work[work.pid] = work
    logging.info(f'Finished finding running plots')

    return jobs, running_work
def start_work(job, chia_location, log_directory, drives_free_space, backend):
    logging.info(f'Starting new plot for job: {job.name}')
    nice_val = job.unix_process_priority
    if is_windows():
        nice_val = job.windows_process_priority

    now = datetime.now()
    log_file_path = get_log_file_name(log_directory, job, now)
    logging.info(f'Job log file path: {log_file_path}')
    destination_directory, temporary_directory, temporary2_directory, job = \
        get_target_directories(job, drives_free_space=drives_free_space)
    if not destination_directory:
        return job, None

    logging.info(f'Job temporary directory: {temporary_directory}')
    logging.info(f'Job destination directory: {destination_directory}')

    work = deepcopy(Work())
    work.job = job
    work.log_file = log_file_path
    work.datetime_start = now
    work.work_id = job.current_work_id
    work.k_size = job.size
    work.destination_drive = destination_directory

    job.current_work_id += 1

    if job.temporary2_destination_sync:
        logging.info(f'Job temporary2 and destination sync')
        temporary2_directory = destination_directory
    logging.info(f'Job temporary2 directory: {temporary2_directory}')

    plot_command = plots.create(
        chia_location=chia_location,
        farmer_public_key=job.farmer_public_key,
        pool_public_key=job.pool_public_key,
        pool_contract_address=job.pool_contract_address,
        size=job.size,
        memory_buffer=job.memory_buffer,
        temporary_directory=temporary_directory,
        temporary2_directory=temporary2_directory,
        destination_directory=destination_directory,
        threads=job.threads,
        threadX_p2=job.threadX_p2,
        buckets=job.buckets,
        buckets_p3=job.buckets_p3,
        bitfield=job.bitfield,
        exclude_final_directory=job.exclude_final_directory,
        backend=backend,
    )
    logging.info(f'Starting with plot command: {plot_command}')

    log_file = open(log_file_path, 'a')
    logging.info(f'Starting process')
    process = start_process(args=plot_command, log_file=log_file)
    pid = process.pid
    logging.info(f'Started process: {pid}')

    logging.info(f'Setting priority level: {nice_val}')
    psutil.Process(pid).nice(nice_val)
    logging.info(f'Set priority level')
    if job.enable_cpu_affinity:
        logging.info(f'Setting process cpu affinity: {job.cpu_affinity}')
        psutil.Process(pid).cpu_affinity(job.cpu_affinity)
        logging.info(f'Set process cpu affinity')

    work.pid = pid
    job.total_running += 1
    job.total_kicked_off += 1
    job.running_work = job.running_work + [pid]
    logging.info(f'Job total running: {job.total_running}')
    logging.info(f'Job running: {job.running_work}')

    return job, work