def _init(self, quantum, timer, monitor): #configuration variables self._quantum = quantum #minimum atomic runtime of a job self._timer = timer #max time slice per job until timer interrupt self._timestamp = 0 #start time #bookkeeping self._ready_jobs_queue = ReadyQueue() #ReadyQueue of (priority, Queue<JCB>) tuples self._ready_tasks_set = set() #List of currently running tasks (i.e. tasks with uncompleted jobs) -> should be an ordered set (?) #saving self._monitor = monitor #monitor to log results #logging self._logger = loggingconfig.getLogger('SCH')
def __init__(self, task): #a TCB encapsulates a task self.task = task #every TCB is parent to a number of 'child' JCBs self.jcbs = [] #bookkeeping variables self._lifetime = 0 self._last_running_ts = None self._interrupted = True #everything we want to save in the database self.task['activation_ts'] = [] self.task['interrupt_ts'] = [] self.task['start_date'] = None self.task['end_date'] = None self.task['success'] = True #logging self._logger = loggingconfig.getLogger('SCH.TCB_{}'.format(self.task['id']))
def __init__(self, parentTCB): #link to parent task self.parentTCB = parentTCB #bookkeeping variables self._remaining_executiontime = self.parentTCB.task['executiontime'] self._slice = 0 self._last_running_ts = None self._deadline = None self._interrupted = True #everything we want to save in the database self.job = Job() self.job['activation_ts'] = [] self.job['interrupt_ts'] = [] self.job['id'] = self.parentTCB.job_count self.job['success'] = False self.job['release'] = None self.parentTCB.task['jobs'].append(self.job) #logging self._logger = loggingconfig.getLogger('SCH.TCB_{}.JCB_{}'.format(self.parentTCB.task['id'], self.job['id']))
# -*- coding: utf-8 -*- #Baustelle """(Command Line) Interface script for rnnschedana.""" from datetime import datetime import datageneration as datagen from mongointerface import MongoInterface import preparedata as prepdata import loggingconfig import models import samples import utilities import pprint logger = loggingconfig.getLogger('INTERFACE') TF_RECORDS_DIR = '/home/hans/Documents/Uni/TUM/Informatik/WS18/BA/tfrecords' def create_data(experiment_spec, tfrecords_dir=TF_RECORDS_DIR, shard_size=1000, parallel=True, overwrite=True): sw = utilities.StopWatch('total') mongo = MongoInterface() try: doc = mongo.get_experiment_doc_by_name(experiment_spec.name) if not overwrite: raise Exception('An experiment with this name exists already.')
import scipy from taskgen.distributor import Distributor from taskgen.sessions.simso import SimSoSession from taskgen.sessions.file import FileSession from taskgen.monitors.stdout import StdOutMonitor import samples import loggingconfig from mongomonitor import MongoMonitor from mongointerface import MongoInterface from schedulerproxy import PriorityRoundRobin from schedulerproxy import PriorityRoundRobin2 import utilities logger = loggingconfig.getLogger('DATAGEN') BaseSpec = namedtuple( 'BaseSpec', 'name sample_rv_class sample_length size sch_q sch_it description id' ) #do not use like this, use ExperimentSpec class ExperimentSpec(BaseSpec): def __new__(cls, name, sample_rv_class, sample_length, size, sch_q, sch_it,
def __init__(self, sample_coll): #create _logger at instance level, because it doesn't show at module level for some reason (?) self._logger = loggingconfig.getLogger('MongoMonitor') self._sample_coll = sample_coll