def setup_process_for_tr(self, employee): if employee.direct_supervisor == 0: if employee.department_head == 0: step = 9 else: step = 5 else: step = 1 end_date = date.today() + timedelta(days=5) process_item = Process(0, "pending", step, str(end_date), str(False), employee.employee_id) sql = "INSERT INTO process (process_name, step, begin_date, completed, tr_instance_id) " \ "VALUES (%s,%s,%s,%s,%s) returning *" cursor = connection.cursor() cursor.execute(sql, (process_item.process_name, process_item.step, process_item.begin_date, process_item.completed, process_item.tr_instance_id)) connection.commit() record = cursor.fetchone() cursor.close() if record: return Process(record[0], record[1], record[2], record[3], record[4], record[5]) else: raise ResourceNotFound( f"The grading data was not formatted for the database.")
def init_processes(processes): """ Initialization of processes :param processes: :return: """ p_arr = [] for index, p in enumerate(processes): process = Process(index, p, processes[p].split(',')[0], processes[p].split(',')[1]) p_arr.append(process) return p_arr
def onSessionOpen(self): """ When connection is established, we create our model instances and register them for RPC. that's it. """ # set up process model self.processModel = Process() self.processModel.signalNamespace(self, "processModel") # expose model methods for RPC self.registerMethodForRpc(self.uri + '/processModel.rowCount', self.processModel, lambda i: self.processModel.rowCount()) self.registerMethodForRpc(self.uri + '/processModel.columnCount', self.processModel, lambda i: self.processModel.columnCount()) self.registerMethodForRpc(self.uri + '/processModel.headerData', self.processModel, lambda section, i: self.processModel.headerData(i)) self.registerMethodForRpc(self.uri + '/processModel.allData', self.processModel, lambda i: self.processModel.allData())
class ProcessMonitorServerProtocol(WampServerProtocol): """ This is simple process monitor server protocol. As with other server classes model is created, when new connection is established, and deleted when client closes websocket connection """ uri = "http://system-monitor.com" def onSessionOpen(self): """ When connection is established, we create our model instances and register them for RPC. that's it. """ # set up process model self.processModel = Process() self.processModel.signalNamespace(self, "processModel") # expose model methods for RPC self.registerMethodForRpc(self.uri + '/processModel.rowCount', self.processModel, lambda i: self.processModel.rowCount()) self.registerMethodForRpc(self.uri + '/processModel.columnCount', self.processModel, lambda i: self.processModel.columnCount()) self.registerMethodForRpc(self.uri + '/processModel.headerData', self.processModel, lambda section, i: self.processModel.headerData(i)) self.registerMethodForRpc(self.uri + '/processModel.allData', self.processModel, lambda i: self.processModel.allData()) def connectionLost(self, reason): """ When connection is gone (i.e. client close window, navigated away from the page), stop the model timer, which holds last reference to model, and delete the model """ WampServerProtocol.connectionLost(self, reason) self.processModel.timer.stop() self.processModel = None
def start(self, cmd=None, refresh_rate=1000, autostart=False, daemon=True) -> Process: if cmd is None: cmd = self.get_next p = Process(cmd, refresh_rate=refresh_rate, daemon=daemon) return p
from graphviz import Digraph from model.microservices import Microservices from model.process import Process from model.application import Application from model.infrastructure import Infrastructure from model.observability import Observability from model.test import Test dot = Digraph(comment='Microservices Roadmap', format='png') root = Microservices() root.build(dot) application = Application() application.build(dot) infrastructure = Infrastructure() infrastructure.build(dot) observability = Observability() observability.build(dot) process = Process() process.build(dot) test = Test() test.build(dot) dot.render('images/roadmap.gv', view=True)
def main(argv): args = [ 'width=', 'height=', 'screen=', 'dist=', 'xtiles=', 'ytiles=', 'indir=', 'outdir=', 'file=', 'hertz=', 'sfdegree=', 'sfcutoff=', 'dfdegree=', 'dfwidth=', 'vt=', 'baselineT=', 'endT=', 'smooth=', 'proximity=' ] try: opts, args = getopt.getopt(argv, '', args) except getopt.GetoptError as e: print(e, file=sys.stderr) usage() return exit(1) # initialize vars to None width = height = screen = dist = xtiles = ytiles = indir = outdir = file = hertz = sfdegree = sfcutoff = dfdegree = dfwidth = vt = baseline_t = end_t = smooth = proximity = None # parse args into vars for opt, arg in opts: opt = opt.lower() if opt != '--file': arg = arg.lower() if opt == '--width': width = arg elif opt == '--height': height = arg elif opt == '--screen': screen = float(arg) elif opt == '--dist': dist = float(arg) elif opt == '--xtiles': xtiles = int(arg) elif opt == '--ytiles': ytiles = int(arg) elif opt == '--indir': indir = arg elif opt == '--outdir': outdir = arg elif opt == '--file': file = arg elif opt == '--hertz': hertz = float(arg) elif opt == '--sfdegree': sfdegree = float(arg) elif opt == '--sfcutoff': sfcutoff = float(arg) elif opt == '--dfdegree': dfdegree = float(arg) elif opt == '--dfwidth': dfwidth = float(arg) elif opt == '--vt': vt = float(arg) elif opt == '--baselinet': baseline_t = float(arg) elif opt == '--endt': end_t = float(arg) elif opt == '--smooth': if arg == 'true': smooth = True elif arg.lower() == 'false': smooth = False else: raise Exception("Invalid arg for --smooth.") elif opt == '--proximity': if arg == 'true': proximity = True elif arg.lower() == 'false': proximity = False else: raise Exception("Invalid arg for --proximity") # (default) differentiation (SG) filter parameters: width, degree, order if dfwidth is None and dfdegree is None: if smooth: dfwidth = 3 dfdegree = 2 else: dfwidth = 5 dfdegree = 3 dfo = 1 # (default) smoothing (Butterworth) filter parameters: degree, cutoff if sfdegree is None: sfdegree = 3 if sfcutoff is None and hertz is not None: sfcutoff = 1.0 / hertz # must be 0 < Wn < 1 if None in [ width, height, screen, dist, xtiles, ytiles, indir, outdir, hertz, sfdegree, sfcutoff, dfdegree, dfwidth, vt, baseline_t, end_t, smooth, proximity ]: print("Some args are not initialized", file=sys.stderr) return exit(1) # get .csv input files to process if os.path.isdir(indir): files = glob.glob('%s/*.csv' % indir) else: files = [] # if user specified --file="..." then we use that as the only one to process if file is not None: file = indir + file if os.path.isfile(file): files = [file] print("overriding files with: ", files) # declare monitor monitor = Monitor(int(width), int(height), screen, dist) # model loop, we iterate over .csv files for file in files: # don't process empty files if os.path.getsize(file) == 0: continue # base = os.path.basename(file) path, base = os.path.split(file) print("Processing: ", file, "[", base, "]") # split filename from extension filename, ext = os.path.splitext(base) print("path: %s, base: %s, filename: %s, ext: %s" % (path, base, filename, ext)) # removed for file naming flexibility # subj = filename.split('-')[0] # group = filename.split('-')[1] # block = filename.split('-')[2] # task = filename.split('-')[3] typ = filename.rsplit('-')[-1] # print("subj: %s, group: %s, block: %s, task: %s" % (subj, group, block, task)) process = Process(width, height, screen, dist, hertz) process.parse_file(file) process.smooth(sfdegree, sfcutoff, smooth) process.differentiate(dfwidth, dfdegree, dfo) process.threshold(vt, monitor, typ, proximity) process.write_threshold_to_file("%s/%s-fxtn%s" % (outdir, filename, ".csv")) del process