def yourFunction(request): # Import the module and collect data inspector = Inspector() inspector.inspectAll() inspector.addTimeStamp("frameworkRuntime") calcs = request['calcs'] sleepTime = request['sleep'] loops = request['loops'] operand_a = [0] * calcs operand_b = [0] * calcs operand_c = [0] * calcs for k in range(0, loops): for i in range(0, calcs): j = randint(0, calcs) operand_a[j] = randint(0, 99999) operand_b[j] = randint(0, 99999) operand_c[j] = randint(0, 99999) mult = operand_a[j] * operand_b[j] div = mult / operand_c[j] inspector.inspectCPUDelta() return inspector.finish()
def initUI(self): self.setAcceptDrops(True) # establish layout rootHLayout = QtGui.QHBoxLayout() vLayout1 = QtGui.QVBoxLayout() # functions self.functionList = FunctionScrollWidget() vLayout1.addWidget(self.functionList) # create the inspector self.inspector = Inspector(self.controller) self.inspector.setMinimumHeight(150) vLayout1.addWidget(self.inspector) # add the vLayout to the root rootHLayout.addLayout(vLayout1); # set up the buttons btnAddRow = QtGui.QPushButton("Add Row"); # label that stores the name of the recipe self.lblName = QtGui.QLabel("Untitled", self) self.lblName.setAlignment(QtCore.Qt.AlignmentFlag.AlignHCenter) self.list = RecipeList(); self.list.setBackgroundRole(QtGui.QPalette.ColorRole.Light) self.list.setMinimumWidth(300) self.list.addEmptyTrigger(); #connect the list's function_selected signal to the inspector's setEditor slot #This will make the inspector update its editor every time a function is selected self.list.function_selected.connect(self.inspector.setEditor) #Connect the btnAddRow clicked signal to add a new row in the recipe list btnAddRow.clicked.connect(self.list.addEmptyTrigger) vLayout2 = QtGui.QGridLayout(); vLayout2.addWidget(self.lblName, 0,0,1,1) vLayout2.addWidget(btnAddRow, 1, 0, 1, 1) vLayout2.addWidget(self.list, 2, 0, 10, 1) rootHLayout.addLayout(vLayout2); self.pnlBuyActions = ActionPanel(self); self.pnlBuyActions.setBackgroundRole(QtGui.QPalette.ColorRole.Light); self.pnlBuyActions.setMinimumWidth(300) self.pnlBuyActions.addEmptyTrigger(); #self.pnlBuyActions.setMaximumHeight(100); rootHLayout.addWidget(self.pnlBuyActions) # window code self.setLayout(rootHLayout); self.setGeometry(300,300,600,300); self.setWindowTitle('Click or Move'); self.show()
def handler(event, context): s3_client = boto3.client('s3') inspector = Inspector() inspector.inspectAll() inspector.addTimeStamp("FramworkRuntime") bucket = event.get("bucketname") key = event.get("filename") filename = '/tmp/target.csv' processed_file = '/tmp/processed.csv' upload_key = 'transform.csv' s3_client.download_file(bucket, key, filename) processed_data = trans.process(filename) processed_data.to_csv(processed_file, index=False) s3_client.upload_file(processed_file, bucket, upload_key) inspector.inspectCPUDelta() inspector.addAttribute("outputFile", upload_key) inspector.addAttribute("numLine", processed_data.shape[0]) return inspector.finish()
def inspect(anObject): import Inspector return Inspector.inspect(anObject)
def yourFunction(request, context): dbEndpoint = os.getenv('databaseEndpoint') if 'dbEndpoint' in request: dbEndpoint = request['dbEndpoint'] dbName = os.getenv('databaseName') if 'dbName' in request: dbName = request['dbName'] key = str(request['key']) bucketname = str(request['bucketname']) tablename = str(request['tablename']) batchSize = request['batchSize'] # import the module and collect data inspector = Inspector() inspector.inspectAll() inspector.addAttribute("endpoint", dbEndpoint) inspector.addAttribute("bucketname", bucketname) inspector.addAttribute("key", key) s3 = boto3.client('s3') csvfile = s3.get_object(Bucket=bucketname, Key=key) csvcontent = csvfile['Body'].read().split(b'\n') i = 0 for line in csvcontent: csvcontent[i] = line.decode("utf-8") i += 1 content = read_csv(csvcontent, inspector) write_output(content, batchSize, tablename, dbEndpoint, dbName, inspector) inspector.inspectAllDeltas() return inspector.finish()
def handler(event, context): s3 = boto3.client('s3') inspector = Inspector() inspector.inspectAll() inspector.addTimeStamp("FrameWorkRuntime") bucket = event.get("bucketname") key = event.get("filename") data_path = '/tmp/target.csv' db_path = '/tmp/' + key.split('.')[0] + '.db' s3.download_file(bucket, key, data_path) load.database_init(data_path, db_path, logger) s3.upload_file(db_path, bucket, 'target.db') inspector.inspectCPUDelta() inspector.addAttribute("DatabaseName", "target.db") return inspector.finish()
def yourFunction(request, context): # Import the module and collect data inspector = Inspector() inspector.inspectAll() # Add custom message and finish the function if ('name' in request): inspector.addAttribute("message", "Hello " + str(request['name']) + "!") else: inspector.addAttribute("message", "Hello World!") inspector.inspectAllDeltas() return inspector.finish()
def parse(source): ret = yacc.parse(source) reload(Inspector) Parser.correct = Inspector.inspect(ret) and Parser.correct # printAST(ret) return ret if Parser.correct else None
def yourFunction(request, context): # Import the module and collect data inspector = Inspector() inspector.inspectAll() inspector.addTimeStamp("frameworkRuntime") bucketname = str(request['bucketname']) key = str(request['key']) s3 = boto3.client('s3') csvfile = s3.get_object(Bucket=bucketname, Key=key) csvcontent = csvfile['Body'].read().split(b'\n') i = 0 for line in csvcontent: csvcontent[i] = line.decode("utf-8") i = i + 1 csv_data = csv.DictReader(csvcontent) #csv_input = pd.read_csv('input.csv') #csv_input['Berries'] = csv_input['Name'] #csv_input.to_csv('output.csv', index=False) #test_str=csv_data["Region"] test_val = "" # Add custom message and finish the function if ('key' in request): inspector.addAttribute( "bucketname", "bucketname " + str(request['bucketname']) + "!") inspector.addAttribute("key", str(request['key'])) inspector.addAttribute("test val", csvcontent[0]) inspector.inspectCPUDelta() return inspector.finish()
def yourFunction(request, context): key = str(request['key']) bucketname = str(request['bucketname']) # Import the module and collect data inspector = Inspector() inspector.inspectAll() inspector.addAttribute("bucketname", bucketname) inspector.addAttribute("key", key) s3 = boto3.client('s3') csvfile = s3.get_object(Bucket=bucketname, Key=key) csvcontent = csvfile['Body'].read().decode('utf-8').split("\n") content = read_csv(csvcontent) output = write_csv(content) _bytes = output.getvalue() record_size = key.split("_") dest_object_name = "edited_{0}_Sales_Records.csv".format(record_size[0]) s3.put_object(Bucket=bucketname, Key=dest_object_name, Body=_bytes) inspector.inspectAllDeltas() return inspector.finish()
def yourFunction(request, context): dbEndpoint = os.getenv('databaseEndpoint') if 'dbEndpoint' in request: dbEndpoint = request['dbEndpoint'] dbName = os.getenv('databaseName') if 'dbName' in request: dbName = request['dbName'] key = str(request['key']) bucketname = str(request['bucketname']) tablename = str(request['tablename']) stressTestLoops = request['stressTestLoops'] # import the module and collect data inspector = Inspector() inspector.inspectAll() inspector.addAttribute("endpoint", dbEndpoint) inspector.addAttribute("bucketname", bucketname) inspector.addAttribute("key", key) s3 = boto3.client('s3') request['filterBy'] = dict() request['filterBy']["Region"] = ["Australia and Oceania"] request['filterBy']["Item Type"] = ["Office Supplies"] request['filterBy']["Sales Channel"] = ["Office Supplies"] request['filterBy']["Order Priority"] = ["Offline"] request['filterBy']["Country"] = ["Fiji"] request['aggregateBy'] = dict() request['aggregateBy']["max"] = ["Units Sold"] request['aggregateBy']["min"] = ["Units Sold"] request['aggregateBy']["avg"] = [ "Order Processing Time", "Gross Margin", "Units Sold" ] request['aggregateBy']["sum"] = [ "Units Sold", "Total Revenue", "Total Profit" ] query_string = contstruct_query_string(request['filterBy'], request['aggregateBy'], request['tablename']) query_result = exexute_query(query_string, dbEndpoint, dbName) stressTest(stressTestLoops, tablename, dbEndpoint, dbName) key_split = str(request['key']).split('_')[0] result_key = "{0}_results.csv".format(key_split) csv_content = convert_rs_to_csv(query_result) s3.put_object(Bucket=bucketname, Key=result_key, Body=(csv_content)) inspector.inspectAllDeltas() return inspector.finish()
printBanner = False Banner = "CMS Preliminary" verbose = False # check options option,args = parse(__doc__) if not args and not option: exit() if option.batch: ROOT.gROOT.SetBatch() if option.verbose: verbose = True if option.list: ins = Inspector.Inspector() ins.Verbose(True) ins.createXML(False) ins.SetFilename(option.list) ins.GetListObjects() sys.exit() if option.create: createXML = Inspector.Inspector() createXML.Verbose(False) createXML.createXML(True) if option.tag: createXML.SetTag(option.tag) createXML.SetFilename(option.create) createXML.GetListObjects() sys.exit()
def handler(event, context): s3_client = boto3.client('s3') # import the module and collect data inspector = Inspector() inspector.inspectAll() inspector.addTimeStamp("frameworkRuntime") # detector bucket = event.get("bucketname") key = event.get("filename") #image = os.mkdir(os.path.join('/tmp', key)) image = '/tmp/target.jpg' print(image) s3_client.download_file(bucket, key, image) box, profile = dc.getBox(image) inspector.addAttribute("box", str(box)) inspector.addAttribute("Detector_profile", profile) inspector.inspectCPUDelta() return inspector.finish()
def yourFunction(request, context): # Import the module and collect data inspector = Inspector() inspector.inspectAll() inspector.addTimeStamp("frameworkRuntime") bucketname = str(request['bucketname']) key = str(request['key']) s3 = boto3.client('s3') csvfile = s3.get_object(Bucket=bucketname, Key=key) csvcontent = csvfile['Body'].read().decode('utf-8').split("\n") test_val = "" content = read_csv(csvcontent) output = write_csv(content) bytes = output.getvalue() record_size = str(request['key']).split("_") dest_object_name = "{0}_newdata.csv".format(record_size[0]) s3.put_object(Bucket=bucketname, Key=dest_object_name, Body=bytes) # Add custom message and finish the function if ('key' in request): inspector.addAttribute( "bucketname", "bucketname " + str(request['bucketname']) + "!") inspector.addAttribute("key", str(request['key'])) inspector.addAttribute("test val", csvcontent[0]) inspector.inspectCPUDelta() return inspector.finish()
def yourFunction(request, context): # Import the module and collect data inspector = Inspector() inspector.inspectAll() inspector.addTimeStamp("frameworkRuntime") bucketname = str(request['bucketname']) key = str(request['key']) s3 = boto3.client('s3') csvfile = s3.get_object(Bucket=bucketname, Key=key) csvcontent = csvfile['Body'].read().split(b'\n') i = 0 for line in csvcontent: csvcontent[i] = line.decode("utf-8") i = i + 1 csv_data = csv.DictReader(csvcontent) test_val = "" query_string = contstruct_query_string(request['filterBy'], request['aggregateBy'], request['tablename']) query_result = exexute_query(query_string) json_result = convert_query_to_json(query_result) csv_result = convert_json_csv(query_result) #dest_object_name = "newjson.txt" #my_bytes = bytes(json_result.encode('UTF-8')) #s3.put_object(Bucket=bucketname, Key=dest_object_name,Body=(my_bytes)) my_bytes = bytes(csv_result.getvalue()) key_split = str(request['key']).split('_')[0] result_key = "{0}_results.csv".format(key_split) s3.put_object(Bucket=bucketname, Key=result_key, Body=(my_bytes)) #bytes = csv_result.getvalue() # Add custom message and finish the function if ('key' in request): inspector.addAttribute( "bucketname", "bucketname " + str(request['bucketname']) + "!") inspector.addAttribute("key", str(request['key'])) inspector.addAttribute("test val", csvcontent[0]) inspector.inspectCPUDelta() return inspector.finish()
def handler(event, context): s3 = boto3.client('s3') inspector = Inspector() inspector.inspectAll() inspector.addTimeStamp("FrameWorkRuntime") service = event.get("service") bucket = event.get("bucketname") key = event.get("filename") if service == 1: filename = '/tmp/target.csv' processed_file = '/tmp/processed.csv' upload_key = 'transform.csv' s3.download_file(bucket, key, filename) processed_data = trans.process(filename) processed_data.to_csv(processed_file, index=False) s3.upload_file(processed_file, bucket, upload_key) inspector.addAttribute("numLine", processed_data.shape[0]) inspector.addAttribute("outputFile", upload_key) elif service == 2: logger = logging.getLogger() logger.setLevel(logging.INFO) data_path = '/tmp/target.csv' db_path = '/tmp/' + key.split('.')[0] + '.db' s3.download_file = 'transform.csv' load.database_init(data_path, db_path, logger) s3.upload_file(db_path, bucket, 'target.db') inspector.addAttribute("DatabaseName", "target.db") elif service == 3: where_statement = event.get("where") group_statement = event.get("group") db_path = '/tmp/target.db' if not os.path.isfile(db_path): s3.download_file(bucket, key, db_path) result, lines = query.search(db_path, where_statement, group_statement) inspector.addAttribute("data", result) inspector.addAttribute("numLine", lines) else: raise NameError("There is no such service") inspector.inspectCPUDelta() return inspector.finish()
def yourFunction(request): # Import the module and collect data inspector = Inspector() inspector.inspectAll() inspector.addTimeStamp("frameworkRuntime") # Add custom message and finish the function inspector.addAttribute("message", "Hello " + request['name'] + "!") inspector.inspectCPUDelta() return inspector.finish()
def yourFunction(request, context): # Import the module and collect data inspector = Inspector() accessKey = request['accessKey'] secretKey = request['secretKey'] sessionToken = request['sessionToken'] jobID = request['jobID'] task = request['task'] arguments = request['arguments'] if ('episodes' in arguments): arguments['episodes'] += 1 instanceID = "" if ('instanceID' in arguments): instanceID = arguments['instanceID'] if (instanceID is not None): jobID += str(instanceID) if ('gitHubURL' not in arguments): arguments['gitHubURL'] = githubDefaultRepo arguments['gitHubBranch'] = githubDefaultBranch continuousTraining = False if ("continuousTraining" in arguments): continuousTraining = arguments["continuousTraining"] modelName = "model.bin" botoSession = boto3.Session( aws_access_key_id=accessKey, aws_secret_access_key=secretKey, aws_session_token=sessionToken, region_name=awsRegion ) inspector.addAttribute("instanceStateText", "Loading...") if 'instanceType' in arguments: try: inspector.addAttribute("cost", instanceInfo[arguments['instanceType']]['cost']) inspector.addAttribute("info", instanceInfo[arguments['instanceType']]) except: pass if (task == "poll"): ec2Client = botoSession.client('ec2') ec2Resource = botoSession.resource('ec2') s3Resource = botoSession.resource('s3') try: ourInstance = findOurInstance(ec2Client, jobID, inspector) inspector.addAttribute("validCredentials", 1) except: inspector.addAttribute("validCredentials", 0) return inspector.finish() if (ourInstance is None): createInstance(ec2Client, ec2Resource, jobID, arguments, inspector) inspector.addAttribute("message", "creating instance") inspector.addAttribute("instanceState", "booting") inspector.addAttribute("instanceStateText", "Booting") else: # Check if it is ready to SSH... try: ip = ourInstance['PublicIpAddress'] inspector.addAttribute("ip", ip) ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(ip, username='******', password='******') ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command( "echo test") stdout = ssh_stdout.readlines() except: inspector.addAttribute( "error", "Problem creating ssh connection to " + str(ip) + " try again") return inspector.finish() if (stdout[0] == "test\n"): ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command( "cat easyRL-v0/lambda/version_check1.txt") instanceData = ssh_stdout.readlines() # Has the version check? If not update if (instanceData == []): ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command( "mv easyRL-v0/ OLD" + str(random.randint(1,10000000)) + "/") stdout = ssh_stdout.readlines() if (sessionToken == ""): ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command( "sleep " + str(arguments['killTime']) + " && python3.7 easyRL-v0/lambda/killSelf.py " + jobID + " " + accessKey + " " + secretKey + " &") else: ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command( "sleep " + str(arguments['killTime']) + " && python3.7 easyRL-v0/lambda/killSelf.py " + jobID + " " + accessKey + " " + secretKey + " " + sessionToken + " &") ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command( "git clone --branch " + arguments['gitHubBranch'] + " " + arguments['gitHubURL']) stdout = ssh_stdout.readlines() # DO NOT REMOVE stderr = ssh_stderr.readlines() # DO NOT REMOVE ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command( "echo " + arguments['instanceType'] + str(arguments['killTime']) + " > tag.txt") inspector.addAttribute("instanceState", "updated") inspector.addAttribute("instanceStateText", "Cloned Repository") else: # Instance type match the tag? If not reboot... ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command( "cat tag.txt") instanceData = ssh_stdout.readlines() tag = arguments['instanceType'] + str(arguments['killTime']) if (instanceData == [] or tag not in instanceData[0]): terminateInstance( ec2Client, ec2Resource, ourInstance, inspector) createInstance(ec2Client, ec2Resource, jobID, arguments, inspector) try: bucket = s3Resource.Bucket('easyrl-' + jobID) bucket.objects.all().delete() except: pass inspector.addAttribute('instanceState', "rebooting") inspector.addAttribute("instanceStateText", "Recreating") else: # Is job running? If it is get progress. Else return idle. ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command( "ps -aux | grep EasyRL.py") stdout = ssh_stdout.readlines() results = "" for line in stdout: results += line if ("terminal" in results): inspector.addAttribute( 'instanceState', "runningJob") inspector.addAttribute("instanceStateText", "Running Task") ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command( "cat ./arguments.json") stdout = ssh_stdout.readlines() #inspector.addAttribute("Test", str(stdout)) #return inspctor.finish() if (stdout != []): jobArguments = json.loads(stdout[0]) inspector.addAttribute( "jobArguments", jobArguments) if continuousTraining and jobArguments != arguments: inspector.addAttribute('instanceState', "changingJob") inspector.addAttribute("instanceStateText", "Changing Task") task = "haltJob" ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command( "cat ./data.json") stdout = ssh_stdout.readlines() if (stdout != []): try: inspector.addAttribute( "progress", json.loads(stdout[0])) except: inspector.addAttribute("progress", "waiting") else: inspector.addAttribute("progress", "waiting") else: inspector.addAttribute('instanceState', "idle") inspector.addAttribute("instanceStateText", "Idle") if continuousTraining: task = "runJob" inspector.addAttribute('instanceState', "startingJob") inspector.addAttribute("instanceStateText", "Starting Task") else: inspector.addAttribute('instanceState', "initializing") inspector.addAttribute("instanceStateText", "Initializing") ssh.close() if (task == "runJob"): ec2Client = botoSession.client('ec2') ec2Resource = botoSession.resource('ec2') s3Resource = botoSession.resource('s3') try: bucket = s3Resource.Bucket('easyrl-' + jobID) bucket.objects.all().delete() except: pass ourInstance = findOurInstance(ec2Client, jobID, inspector) if (ourInstance is not None): ip = ourInstance['PublicIpAddress'] inspector.addAttribute("ip", str(ip)) try: ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(ip, username='******', password='******') ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command( "ps -aux | grep EasyRL.py") stdout = ssh_stdout.readlines() except: inspector.addAttribute( "error", "Problem creating ssh connection to " + str(ip) + " try again") return inspector.finish() results = "" for line in stdout: results += line if ("terminal" not in results): # Error Checking if (str(arguments['agent']) in paraMap): missingAttributes = [] outOfRange = [] valid = True envIndex = str(arguments['environment']) agentIndex = str(arguments['agent']) if envMap[envIndex]['type'] not in agentMap[agentIndex]['supportedEnvs']: inspector.addAttribute("error", "Incompatible agent/environment pair!") return inspector.finish() for pp in paraMap[str(arguments['agent'])]: pp = str(pp) if pp not in arguments: missingAttributes.append(pp) else: val = arguments[pp] if (val < paramConditions[pp]['min'] or val > paramConditions[pp]['max']): outOfRange.append(pp) if len(missingAttributes) > 0: inspector.addAttribute("error-Missing", "Missing hyperparameters for agent: " + str(missingAttributes)) valid = False if len(outOfRange) > 0: errorMessage = "Attributes with invalid value: " for error in outOfRange: errorMessage += error + " min: " + str(paramConditions[error]['min']) + " max: " + str(paramConditions[error]['max']) + " used: " + str(arguments[error]) + " " inspector.addAttribute("error-Range", errorMessage) valid = False if (valid == False): return inspector.finish() else: inspector.addAttribute("error", "Unknown Agent " + str(arguments['agent'])) return inspector.finish() ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command( "echo \'" + json.dumps(arguments) + "\' > arguments.json") stdout = ssh_stdout.readlines() command = 'printf "' command += str(arguments['environment']) + '\n' command += str(arguments['agent']) + '\n' command += '1\n' paramList = paraMap[str(arguments['agent'])] for param in paramList: command += str(arguments[param]) + '\n' command += '4\n' command += modelName + '\n' command += '5\n' if (sessionToken != ""): command += '" | python3.7 ./easyRL-v0/EasyRL.py --terminal --secretKey ' + secretKey + \ ' --accessKey ' + accessKey + ' --sessionToken ' + \ sessionToken + ' --jobID ' + jobID else: command += '" | python3.7 ./easyRL-v0/EasyRL.py --terminal --secretKey ' + \ secretKey + ' --accessKey ' + accessKey + ' --jobID ' + jobID command += ' &> lastJobLog.txt & sleep 1' #inspector.addAttribute("command", command) ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(command) stdout = ssh_stdout.readlines() #inspector.addAttribute("stdout", stdout) ssh.close() inspector.addAttribute("message", "Job started") else: inspector.addAttribute("message", "Job already running") else: inspector.addAttribute('error', 'Instance not found.') if (task == "runTest"): ec2Client = botoSession.client('ec2') ec2Resource = botoSession.resource('ec2') s3Resource = botoSession.resource('s3') try: bucket = s3Resource.Bucket('easyrl-' + jobID) bucket.objects.all().delete() except: pass ourInstance = findOurInstance(ec2Client, jobID, inspector) if (ourInstance is not None): ip = ourInstance['PublicIpAddress'] inspector.addAttribute("ip", str(ip)) try: ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(ip, username='******', password='******') ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command( "ps -aux | grep EasyRL.py") stdout = ssh_stdout.readlines() except: inspector.addAttribute( "error", "Problem creating ssh connection to " + str(ip) + " try again") return inspector.finish() results = "" for line in stdout: results += line if ("terminal" not in results): # Error Checking if (str(arguments['agent']) in paraMap): missingAttributes = [] outOfRange = [] valid = True envIndex = str(arguments['environment']) agentIndex = str(arguments['agent']) if envMap[envIndex]['type'] not in agentMap[agentIndex]['supportedEnvs']: inspector.addAttribute("error", "Incompatible agent/environment pair!") return inspector.finish() for pp in paraMap[str(arguments['agent'])]: pp = str(pp) if pp not in arguments: missingAttributes.append(pp) else: val = arguments[pp] if (val < paramConditions[pp]['min'] or val > paramConditions[pp]['max']): outOfRange.append(pp) if len(missingAttributes) > 0: inspector.addAttribute("error-Missing", "Missing hyperparameters for agent: " + str(missingAttributes)) valid = False if len(outOfRange) > 0: errorMessage = "Attributes with invalid value: " for error in outOfRange: errorMessage += error + " min: " + str(paramConditions[error]['min']) + " max: " + str(paramConditions[error]['max']) + " used: " + str(arguments[error]) + " " inspector.addAttribute("error-Range", errorMessage) valid = False if (valid == False): return inspector.finish() else: inspector.addAttribute("error", "Unknown Agent " + str(arguments['agent'])) return inspector.finish() ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command( "echo \'" + json.dumps(arguments) + "\' > arguments.json") stdout = ssh_stdout.readlines() ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command( "md5sum " + modelName) instanceData = ssh_stdout.readlines() # Has the tag? If not update if (instanceData != []): command = 'printf "' command += str(arguments['environment']) + '\n' command += str(arguments['agent']) + '\n' command += '2\n' command += modelName + '\n' command += '3\n' paramList = paraMap[str(arguments['agent'])] for param in paramList: command += str(arguments[param]) + '\n' command += '5\n' if (sessionToken != ""): command += '" | python3.7 ./easyRL-v0/EasyRL.py --terminal --secretKey ' + secretKey + \ ' --accessKey ' + accessKey + ' --sessionToken ' + \ sessionToken + ' --jobID ' + jobID else: command += '" | python3.7 ./easyRL-v0/EasyRL.py --terminal --secretKey ' + \ secretKey + ' --accessKey ' + accessKey + ' --jobID ' + jobID command += ' &> lastJobLog.txt & sleep 1' #inspector.addAttribute("command", command) ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(command) stdout = ssh_stdout.readlines() #inspector.addAttribute("stdout", stdout) ssh.close() inspector.addAttribute("message", "Test started") else: ssh.close() inspector.addAttribute("error", "No trained agent found") else: inspector.addAttribute("message", "Test already running") else: inspector.addAttribute('error', 'Instance not found.') if (task == "haltJob"): ec2Client = botoSession.client('ec2') ec2Resource = botoSession.resource('ec2') ourInstance = findOurInstance(ec2Client, jobID, inspector) if (ourInstance is not None): ip = ourInstance['PublicIpAddress'] #inspector.addAttribute("ip", str(ip)) ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(ip, username='******', password='******') command = "pkill python3.7" #inspector.addAttribute("command", command) ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(command) stdout = ssh_stdout.readlines() #inspector.addAttribute("stdout", stdout) ssh.close() inspector.addAttribute("message", "Job halted.") else: inspector.addAttribute("error", "Instance not found.") if (task == "exportModel"): ec2Client = botoSession.client('ec2') ec2Resource = botoSession.resource('ec2') ourInstance = findOurInstance(ec2Client, jobID, inspector) if (ourInstance is not None): ip = ourInstance['PublicIpAddress'] #inspector.addAttribute("ip", str(ip)) ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(ip, username='******', password='******') ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command( "md5sum " + modelName) instanceData = ssh_stdout.readlines() # Has the tag? If not update if (instanceData != []): if (sessionToken == ""): command = "python3.7 easyRL-v0/lambda/upload.py " + modelName + " " + jobID + " " + accessKey + " " + secretKey else: command = "python3.7 easyRL-v0/lambda/upload.py " + modelName + " " + jobID + " " + accessKey + " " + secretKey + " " + sessionToken ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(command) stdout = ssh_stdout.readlines() inspector.addAttribute("url", "https://easyrl-" + str(jobID) + ".s3.amazonaws.com/" + modelName) else: inspector.addAttribute("error", "Model not trained yet!") ssh.close() else: inspector.addAttribute("error", "Instance not found.") if (task == "import"): ec2Client = botoSession.client('ec2') ec2Resource = botoSession.resource('ec2') ourInstance = findOurInstance(ec2Client, jobID, inspector) if (ourInstance is not None): ip = ourInstance['PublicIpAddress'] ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(ip, username='******', password='******') if (sessionToken == ""): command = "python3.7 easyRL-v0/lambda/download.py " + modelName + " " + jobID + " " + accessKey + " " + secretKey else: command = "python3.7 easyRL-v0/lambda/download.py " + modelName + " " + jobID + " " + accessKey + " " + secretKey + " " + sessionToken ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(command) stdout = ssh_stdout.readlines() inspector.addAttribute("error", stdout) ssh.close() else: inspector.addAttribute("error", "Instance not found.") if (task == "jobLog"): ec2Client = botoSession.client('ec2') ec2Resource = botoSession.resource('ec2') ourInstance = findOurInstance(ec2Client, jobID, inspector) if (ourInstance is not None): ip = ourInstance['PublicIpAddress'] #inspector.addAttribute("ip", str(ip)) ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(ip, username='******', password='******') if (sessionToken == ""): command = "python3.7 easyRL-v0/lambda/upload.py lastJobLog.txt " + jobID + " " + accessKey + " " + secretKey else: command = "python3.7 easyRL-v0/lambda/upload.py lastJobLog.txt " + jobID + " " + accessKey + " " + secretKey + " " + sessionToken ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(command) stdout = ssh_stdout.readlines() inspector.addAttribute("url", "https://easyrl-" + str(jobID) + ".s3.amazonaws.com/lastJobLog.txt") ssh.close() else: inspector.addAttribute("error", "Instance not found.") if (task == "terminateInstance"): ec2Client = botoSession.client('ec2') ec2Resource = botoSession.resource('ec2') s3Resource = botoSession.resource('s3') try: bucket = s3Resource.Bucket('easyrl-' + jobID) bucket.objects.all().delete() except: pass ourInstance = findOurInstance(ec2Client, jobID, inspector) terminateInstance(ec2Client, ec2Resource, ourInstance, inspector) if (task == "info"): inspector.addAttribute("environments", envList) inspector.addAttribute("environmentsMap", envMap) inspector.addAttribute("parameters", paramConditions) combinedAgents = [] for agent in agentList: agent['parameters'] = paraMap[agent['index']] combinedAgents.append(agent) combinedAgentsMap = {} for aa in combinedAgents: combinedAgentsMap[aa['index']] = aa inspector.addAttribute("agents", combinedAgents) inspector.addAttribute("agentsMap", combinedAgentsMap) return inspector.finish()
def OnInit(self): Preferences.initScreenVars() # i18n support self.locale = wx.Locale(Preferences.i18nLanguage) wx.Locale.AddCatalogLookupPathPrefix(os.path.join(Preferences.pyPath, 'locale')) if hasattr(sys, 'frozen'): self.locale.AddCatalog('wxstd') self.locale.AddCatalog('boa') wx.ToolTip.Enable(True) if Preferences.debugMode == 'release': self.SetAssertMode(wx.PYAPP_ASSERT_SUPPRESS) elif Preferences.debugMode == 'development': self.SetAssertMode(wx.PYAPP_ASSERT_EXCEPTION) conf = Utils.createAndReadConfig('Explorer') if not conf.has_section('splash'): conf.add_section('splash') modTot = 1 else: modTot = conf.getint('splash', 'modulecount') fileTot = len(eval(conf.get('editor', 'openfiles'), {})) abt = About.createSplash(None, modTot, fileTot) try: abt.Show() abt.Update() # Let the splash screen repaint wx.Yield() # Imported here to initialise core features and plug-ins import PaletteMapping print 'creating Palette' import Palette self.main = Palette.BoaFrame(None, -1, self) print 'creating Inspector' import Inspector inspector = Inspector.InspectorFrame(self.main) print 'creating Editor' import Editor editor = Editor.EditorFrame(self.main, -1, inspector, wx.Menu(), self.main.componentSB, self, self.main) self.SetTopWindow(editor) inspector.editor = editor conf.set('splash', 'modulecount', str(len(sys.modules))) try: Utils.writeConfig(conf) except IOError, err: startupErrors.append(_('Error writing config file: %s\nPlease ' 'ensure that the Explorer.*.cfg file is not read only.')% str(err)) if not emptyEditor: editor.restoreEditorState() self.main.initPalette(inspector, editor) ## editor.setupToolBar() import Help if not Preferences.delayInitHelp: print 'initialising help' Help.initHelp() global constricted constricted = constricted or Preferences.suBoaConstricted print 'showing main frames <<100/100>>' if constricted: editor.CenterOnScreen() inspector.CenterOnScreen() inspector.initSashes() else: self.main.Show() inspector.Show() # For some reason the splitters have to be visible on GTK before they # can be sized. inspector.initSashes() editor.Show() editor.doAfterShownActions() # Call startup files after complete editor initialisation global startupfile if Preferences.suExecPythonStartup and startupEnv: startupfile = startupEnv if editor.shell: editor.shell.execStartupScript(startupfile)
class RecipeWindow(QWidget): def __init__(self, controller, recipeName=None): super(RecipeWindow, self).__init__(); self.setAcceptDrops(True) self.recipeName = recipeName self.controller = controller self.initUI() def initUI(self): self.setAcceptDrops(True) # establish layout rootHLayout = QtGui.QHBoxLayout() vLayout1 = QtGui.QVBoxLayout() # functions self.functionList = FunctionScrollWidget() vLayout1.addWidget(self.functionList) # create the inspector self.inspector = Inspector(self.controller) self.inspector.setMinimumHeight(150) vLayout1.addWidget(self.inspector) # add the vLayout to the root rootHLayout.addLayout(vLayout1); # set up the buttons btnAddRow = QtGui.QPushButton("Add Row"); # label that stores the name of the recipe self.lblName = QtGui.QLabel("Untitled", self) self.lblName.setAlignment(QtCore.Qt.AlignmentFlag.AlignHCenter) self.list = RecipeList(); self.list.setBackgroundRole(QtGui.QPalette.ColorRole.Light) self.list.setMinimumWidth(300) self.list.addEmptyTrigger(); #connect the list's function_selected signal to the inspector's setEditor slot #This will make the inspector update its editor every time a function is selected self.list.function_selected.connect(self.inspector.setEditor) #Connect the btnAddRow clicked signal to add a new row in the recipe list btnAddRow.clicked.connect(self.list.addEmptyTrigger) vLayout2 = QtGui.QGridLayout(); vLayout2.addWidget(self.lblName, 0,0,1,1) vLayout2.addWidget(btnAddRow, 1, 0, 1, 1) vLayout2.addWidget(self.list, 2, 0, 10, 1) rootHLayout.addLayout(vLayout2); self.pnlBuyActions = ActionPanel(self); self.pnlBuyActions.setBackgroundRole(QtGui.QPalette.ColorRole.Light); self.pnlBuyActions.setMinimumWidth(300) self.pnlBuyActions.addEmptyTrigger(); #self.pnlBuyActions.setMaximumHeight(100); rootHLayout.addWidget(self.pnlBuyActions) # window code self.setLayout(rootHLayout); self.setGeometry(300,300,600,300); self.setWindowTitle('Click or Move'); self.show() """ Bring up a file dialog so the user can save the recipe """ def saveRecipeAs(self): #bring up a file dialog so they can save it self.recipeName = QFileDialog.getSaveFileName(self, dir="/home/dylan/mock_algo/", filter="*.algo")[0] if self.recipeName == '': self.recipeName = None return self.lblName.setText(os.path.basename(self.recipeName)) self.saveRecipeWithName(self.recipeName) def saveRecipe(self): self.saveRecipeWithName(self.recipeName) def saveRecipeWithName(self,name): if(self.recipeName == None): self.saveRecipeAs() return if(self.list.numTriggers() == 0): msgBox = QtGui.QMessageBox(QtGui.QMessageBox.Icon.Critical, "Error", ""); msgBox.setText("A recipe must have at least one trigger to be able to save.") msgBox.exec_() return recipe = self.list.createRecipe() if not recipe: msgBox = QtGui.QMessageBox(QtGui.QMessageBox.Icon.Critical, "Error", "") msgBox.setText("There was an error saving the recipe! Fix the red triggers and try again.") msgBox.exec_() return if not self.pnlBuyActions.validate(self.controller): msgBox = QtGui.QMessageBox(QtGui.QMessageBox.Icon.Critical, "Error", "") msgBox.setText("There was an error saving the recipe! Fix the buy action and try again.") msgBox.exec_() return triggerFunc = self.pnlBuyActions.getTrigger().convertToTriggerFunc() recipe.trigger = triggerFunc recipe.name = name.rsplit('/')[-1] recipe.to_file(self.recipeName); #save successful msgBox = QtGui.QMessageBox(QtGui.QMessageBox.Icon.Information, "Success!", "") msgBox.setText("Recipe successfully saved!") msgBox.exec_() def openRecipe(self): #bring up a file dialog so they can open it path = QFileDialog.getOpenFileName(self, dir="/home/dylan/mock_algo/", filter="*.algo")[0] if path == '': return parser = Parser(None) recipe = None try: recipe = parser.parse_recipe(path) except: msgBox = QtGui.QMessageBox(QtGui.QMessageBox.Icon.Critical, "Error", ""); msgBox.setText("Unable to open file.") msgBox.exec_() return self.list.loadRecipe(recipe) self.pnlBuyActions.loadRecipe(recipe); self.recipeName = path self.lblName.setText(os.path.basename(self.recipeName))