def setComponentName(comp, compIdToNameMap, prenamed, nameComponents): if (prenamed): if (comp.compId not in compIdToNameMap): raise ValueError('No name for ' + comp.compId + '!') comp.name = compIdToNameMap[comp.compId] state.get('componentNames').add(comp.name) return # if not prenamed: compTypes = state.get('componentTypes') try: idx = compTypes.index(comp) comp.name = compTypes[idx].name except ValueError: if nameComponents: generateCompDiagram(comp) comp.name = input( ' - Found new component, please provide a name for it: ') print() else: unnamedTypesCounter = state.get('unnamedTypesCounter') comp.name = 'type' + str(unnamedTypesCounter) state.set('unnamedTypesCounter', unnamedTypesCounter + 1) compTypes.append(comp)
def checkAndRecluster(): if (state.get('labelPredictionsUpdated')): return allModels = getRemainingModelsList() allModelsNum = len(allModels) lockR = Lock() lockW = Lock() newClusters = {} initProgressBar() threads = list() for tIdx in range(CLASSIFICATION_THREADS): threads.append( Thread(target=computeLabelsForNewModels, args=(allModels, newClusters, allModelsNum, lockR, lockW), daemon=True)) printTitle('Recalibrating algorithm with new hypotheses, please wait.') for thread in threads: thread.start() for thread in threads: thread.join() updateClusters(newClusters) state.set('skippedModels', []) state.set('labelPredictionsUpdated', True) showExpectedLabelsDistribution()
def initUserLabelCounters(): labels = state.get('labels') counters = {} for label in labels: counters[label] = 0 state.set('userLabelCounters', counters)
def popModelsAndClassify(modelsStrings, labelsFile, labelsCounter, lockR, lockW): tempDirPath = state.get('tempDirPath') tempFilePath = join(tempDirPath, uuid.uuid4().hex + '.las') output = state.get('classifOutput') totalNumOfModels = state.get('numOfInputModels') maxModelsAtOnce = MODELS_PER_PROC while True: currModels = list() lockR.acquire() if (not len(modelsStrings)): lockR.release() return numOfModels = min(maxModelsAtOnce, len(modelsStrings)) # print(numOfModels) for idx in range(numOfModels): currModels.append(modelsStrings.pop()) # print(len(modelsStrings)) lockR.release() modelObjs = list(map(utils.computeModelObjFromModelStr, currModels)) labelPredsForModels = utils.computeLabelPredsForModels( modelObjs, tempFilePath) modelLabelsMap = utils.getModelLabelsMap(labelPredsForModels) lockW.acquire() output = state.get('classifOutput') for label in labelPredsForModels: output += label + '.\n' state.set('classifOutput', output) for model in list(modelLabelsMap.keys()): if len(modelLabelsMap[model]) == 1: labelsCounter[modelLabelsMap[model][0]] += 1 else: labelsCounter[MULTIPLE_LABELS_STRING] += 1 labelsCounter[NO_LABEL_STRING] += numOfModels - len( list(modelLabelsMap.keys())) for model in modelObjs: mId = model.modelId if (mId not in list(modelLabelsMap.keys())): noLabelMustLabels = state.get( 'mustLabelModels')[NO_LABEL_STRING] if (len(noLabelMustLabels) < MUST_LABEL_SIZE): noLabelMustLabels.append(model) elif (len(modelLabelsMap[mId]) > 1): multipleLabelsMustLabels = state.get( 'mustLabelModels')[MULTIPLE_LABELS_STRING] if (len(multipleLabelsMustLabels) < MUST_LABEL_SIZE): multipleLabelsMustLabels.append(model) utils.printProgressBar(totalNumOfModels, numOfIterations=numOfModels) lockW.release()
def initClusterWeights(): clusters = state.get('clusters') clusterKeys = list(clusters.keys()) weights = {} for ck in clusterKeys: weights[ck] = 1 state.set('clusterWeights', weights)
def on_message(msg): payload = rfm_actuators[msg.payload] payload_split = payload.split("_") state.set(payload_split[0] + "." + payload_split[1], payload_split[2]) ser = serial.Serial(serialdev, 9600) # open serial port for i in range(3): ser.write('{"DEVICE":[{"G":"0","V":0,"D":11,"DA":"' + hextobin(payload) + '"}]}') time.sleep(1) ser.close()
def onExit(): nonlocal patternVars relevantPatterns = [] for pattern in list(patternVars.keys()): if patternVars[pattern].get(): relevantPatterns.append(pattern) state.set('relevantPatterns', relevantPatterns) root.destroy()
def runILASPCommands(labelsToUpdateHypotheses): utils.printTitle( 'Please wait while the hypotheses are being computed, this might take a while.' ) backGroundStr = utils.getBackgroundString() biasConstantsStr = utils.computeBiasConstants() genericBiasStr = utils.getBiasString().replace('$$CONSTANTS$$', biasConstantsStr) outputs = {} lock = Lock() threads = list() for label in labelsToUpdateHypotheses: threads.append( Thread(target=runILASPCMDInThread, args=(backGroundStr, genericBiasStr, label, outputs, lock), daemon=True)) for thread in threads: thread.start() for thread in threads: thread.join() try: utils.updateHypotheses(outputs) state.get('hypothesesToUpdate').clear() except ExitError as e: print( '** Error: No hypotheses covering ALL manual classifications were found.\n' ) if state.get('noise'): # Technically, shouldn't be able to get here; # If we do, raise the error in order to fully exit raise e print('Would you like to:') print( "(1) Continue search for hypotheses with BEST coverage of manual classifications?" ) print('(2) Exit?') while True: try: ans = int(input('Your answer (1/2): ')) if (ans < 1 or ans > 2): raise ValueError break except ValueError: continue if (ans == 2): raise e else: state.set('noise', True) utils.noisifyExamplesFiles() runILASPCommands(state.get('labels'))
def printProgressBar(total, numOfIterations=1, full=False): iteration = state.get('iterationNum') + numOfIterations state.set('iterationNum', iteration) if full: iteration = total relProgress = iteration / float(total) percent = ("{0:.2f}").format(100 * relProgress) length = 50 filledLength = int(length * relProgress) bar = '█' * filledLength + '-' * (length - filledLength) print('\r%s |%s| %s%%' % ('Progress:', bar, percent), end='\r') if (iteration == total): print('\n')
def classifyAllModels(modelsAbsPath): modelsStrings = utils.getModelsStrings(modelsAbsPath) utils.initProgressBar() numOfThreads = CLASSIFICATION_THREADS lockR = Lock() lockW = Lock() classFilePath = state.get('outputFilePath') labelsFile = open(classFilePath, 'w') labelsCounter = utils.getBlankLabelsCounter() threads = list() for tIdx in range(numOfThreads): threads.append( Thread(target=popModelsAndClassify, args=(modelsStrings, labelsFile, labelsCounter, lockR, lockW), daemon=True)) utils.printTitle( 'All initial models are about to be labelled, this might take a while.' ) for thread in threads: thread.start() for thread in threads: thread.join() labelsFile.write(state.get('classifOutput')) state.set('classifOutput', '') print('* All models have been succesfully labelled and saved in:\n' + classFilePath + '\n') labelsFile.close() labelKeys = list(labelsCounter.keys()) nonZeroLabels = [l for l in labelKeys if labelsCounter[l] > 0] labels = list( map(lambda l: l + ': ' + str(labelsCounter[l]), nonZeroLabels)) values = list(map(lambda l: labelsCounter[l], nonZeroLabels)) utils.generatePieChart(labels, values, title='Labels distribution')
def parseInputFile(): print('* Parsing input file...\n') modelStrings = utils.getModelsStrings(state.get('inputFilePath')) state.set('numOfInputModels', len(modelStrings)) # Note that we want to randomize order of models, regardless of whether # we select a subsample of MAX_RELEVANT_MODELS or we keep all samples # This helps after clustering is done, because we won't have to generate # a random index within each cluster, since the models in that cluster will # already be in random order after this step, so we can just pop the first/last one randomSampleSize = min(len(modelStrings), MAX_RELEVANT_MODELS) modelStrings = random.sample(modelStrings, randomSampleSize) models = computeAllModelObjects(modelStrings) state.set('sampleModelIds', utils.getModelIds(models)) if not state.get('prenamedComponents'): compNames = set( list(map(lambda comp: comp.name, state.get('componentTypes')))) state.set('componentNames', compNames) print('* Setting up model selection algorithm...') compositionVectors = computeModelsCompositionVectors(models) labels = clusterModels(compositionVectors) return computeClusterToModelMapping(models, labels)
def updateCache(query): genericQuery = computeGenericQuery(query) modelStrings = utils.getModelsStrings(state.get('inputFilePath')) utils.initProgressBar() numOfThreads = CLASSIFICATION_THREADS lockR = Lock() lockW = Lock() # validModels will contain all models 'valid' in the sense that they comply with # the query constraints, and are also not among the already labelled models validModels = list() threads = list() for tIdx in range(numOfThreads): threads.append( Thread(target=popModelsAndCheckQuery, args=(modelStrings, genericQuery, lockR, lockW, validModels), daemon=True)) utils.printTitle( 'Searching for complying models, this might take a while.') for thread in threads: thread.start() for thread in threads: thread.join() randomSampleSize = min(len(validModels), QUERY_CACHE_SIZE) cache = random.sample(validModels, randomSampleSize) state.set('queryCache', cache) state.set('ranAQuery', True) # Update preQuery to have it appear by default in the query editor # next time the user wants to use it state.set('prevQuery', query)
def get_canonical_url(url): # Check if the result is cached in the state hash_key = md5.new(url).hexdigest() result = yield state.get("canonical_url_%s" % hash_key) if result is not None: raise gen.Return(result) sub_process = process.Subprocess( args= "curl -sIL '%s' | grep -i ^location:" % url, shell= True, stdout= process.Subprocess.STREAM, stderr= process.Subprocess.STREAM) result, error = yield [ gen.Task(sub_process.stdout.read_until_close), gen.Task(sub_process.stderr.read_until_close) ] logger.info("Subprocess result: " + str(result)) logger.info("Subprocess error: " + str(error)) result = result.splitlines() if len(result) >= 1: result = result[-1] result = re.match("location:\s*(.*)$", result, re.IGNORECASE) if result and result.groups()[0]: result = result.groups()[0] else: result = url else: # Either the url is canonical or we are broken result = url # Cache the result if result is not None: state.set("canonical_url_%s" % hash_key, result) raise gen.Return(result)
def updateClusters(newClusters): state.set('clusters', newClusters) clusterKeys = list(newClusters.keys()) newClusterWeights = {} actualLabelsCounter = 0 noLabelStr = NO_LABEL_STRING multipleLabelsStr = MULTIPLE_LABELS_STRING for ck in clusterKeys: if (ck != noLabelStr and ck != multipleLabelsStr): actualLabelsCounter += 1 newClusterWeights[ck] = 1 boostedWeight = actualLabelsCounter or 1 if noLabelStr in newClusters: newClusterWeights[noLabelStr] = boostedWeight if multipleLabelsStr in newClusters: newClusterWeights[multipleLabelsStr] = boostedWeight state.set('clusterWeights', newClusterWeights)
def get(self, datachannel_id, action): if action not in [ 'start', 'pause', 'reset' ]: self.fail("Unrecognizable state action %s" % action) else: if action == 'start': state.set("pull_themes_graphdb_channel_%s.frozen" % datachannel_id, False) elif action == 'pause': state.set("pull_themes_graphdb_channel_%s.frozen" % datachannel_id, True) elif action == 'reset': state.set("pull_themes_graphdb_channel_%s.last_update" % datachannel_id, None) self.success("OK")
import noddy import state print("1 testing state") state.set("foo", "bar") print(" ", state.get("foo")) print(" ", state.get("bar")) print(" pass") print("2 test noddy") n = noddy.Noddy("Rick", "James", 5) print(" ", n.first, n.last, n.number) print(" ", n.name()) print(" pass") print(dir(state)) print(dir(noddy))
reverse = True elif(buttonRot and pot>=-0.2 and reverse): reverse = False sendstr="set:io4_3="+str(int(reverse))+"\n" if(reverse): pot*=-1 buttons=values[2][1:] for i,x in enumerate(buttons): buttons[i]=bool(x) if not DEV: vjoy.setButton(i, buttons[i]) if DEV: print("reverse",reverse) print("pot:",pot) print("enc:",enc) print("io4:",buttons) print("butrot:",buttonRot) print("sending",sendstr) else: reverse=state.set("rev",reverse) vjoy.setAxis(2, pot) vjoy.setAxis(3, enc) vjoy.setButton(4, buttonRot) sock.sendall(sendstr) finally: sock.close()
OVERRIDE_DOG_BUTTON_OUT = 7 OVERRIDE_HOLD_SECONDS = 0.1 # sticks vjoy = joysticks.get('vJoy Device') combatstick = joysticks.get("CH Combatstick USB") pedals = joysticks.get('CH Pro Pedals USB') throttle = joysticks.get('Saitek Pro Flight Quadrant') # combatstick button long press for CMS down/right toggle and CMS forward cms = combatstick.getButton(CMS_BUTTON_IN) if state.toggle("cms-downright", cms, CMS_DOWNRIGHT_HOLD_SECONDS): cmsdown = state.get("cms-down", False) vjoy.setButton(CMS_DOWN_BUTTON_OUT, not cmsdown) vjoy.setButton(CMS_RIGHT_BUTTON_OUT, cmsdown) state.set("cms-down", not cmsdown) vjoy.setButton(CMS_FORWARD_BUTTON_OUT, False) state.set("cms-up", True) # fake toggle cms forward else: vjoy.setButton(CMS_RIGHT_BUTTON_OUT, False) vjoy.setButton(CMS_DOWN_BUTTON_OUT, False) if not state.get("cms-downright"): vjoy.setButton(CMS_FORWARD_BUTTON_OUT, state.toggle("cms-up", not cms)) # combatstick button for zoom axis toggle and right pedal for custom zoom FREETRACK_KEY = "CONTROL SHIFT ALT F" ZOOMED_OUT = 1.0 ZOOM_IN = 0.50 zoomButton = combatstick.getButton(ZOOM_BUTTON_IN) zoom = state.get("zoom")
def on_message(me, myself, msg): inbound = json.loads(msg.payload) light = inbound[0] func = inbound[1] try: data = inbound[2] except IndexError: data = "" hub = lights[light]['hub'] group = str(lights[light]['group']) light_type = str(lights[light]['type']) try: attributes = state.get(light).attributes current_status = state.get(light).state except: attributes = {} current_status = "" attributes['brightness'] = 0 if light_type == "white": for x in range(0, 10): logger.debug("Step down") send_command(hub, light, commands["white_brightnessdown"]) if light_type == "rgbw": brightness = brightness_map[0] command = commands["rgbw_brightness"] command[1] = brightness send_command(hub, light, command) print light + ' ' + func + ' ' + data if func == current_status: logger.info("Sorry we are already at status " + str(func) + " for this light") return False command = "" # On or full if func == "on" or func == "full": print light_type + "_" + group + "_" + func command = commands[light_type + "_" + group + "_" + func] if func == "on": state_data = "on" if func == "full": state_data = "on" attributes['brightness'] = 10 # Off if func == "off": send_command(hub, light, commands[light_type + "_" + group + "_on"]) time.sleep(0.2) set_brightness(hub, light, light_type, attributes['brightness'], "0") time.sleep(0.2) command = commands[light_type + "_" + group + "_off"] state_data = "off" attributes['brightness'] = 0 # Brightness if func == "brightness": set_brightness(hub, light, light_type, attributes['brightness'], data) attributes['brightness'] = data state_data = "on" if command is not "": send_command(hub, light, command) if attributes: state.set("limitlessLED", light, state_data, json.dumps(attributes)) else: state.set("limitlessLED", light, state_data)
def computeHypotheses(): # By default, ask to classify a model to have something to work with newClassif() # This simulates a do-until loop; it asks the user to classify models until # they decide to stop, at which point the hypotheses are computed based on the # classified examples, and then outputted for the user to analyze. # At that point, the user can decide to go back to manual classification to # obtain better hypotheses (basically start the function over) or exit the loop # and use the hypotheses they have now in order to automatically classify all # the initial provided models while True: # Check if no further models available, compute hypotheses and return if true if (utils.clustersAreEmpty(state.get('clusters'))): utils.printTitle( 'No other models to classify available, computing hypotheses.') hypothesesToUpdate = list(state.get('hypothesesToUpdate')) if (len(hypothesesToUpdate)): runILASPCommands(hypothesesToUpdate) utils.printHypotheses() else: utils.printTitle( 'All new labels agree with the last hypotheses computed.') return continueClassif = input( '\nWould you like to classify another model? (y/n) ').lower() if (continueClassif == 'y'): print() newClassif() elif (continueClassif == 'n'): hypothesesToUpdate = list(state.get('hypothesesToUpdate')) if (len(hypothesesToUpdate)): runILASPCommands(hypothesesToUpdate) utils.printHypotheses() state.set('labelPredictionsUpdated', False) else: utils.printTitle( 'All new labels agree with the last hypotheses computed.') utils.checkAndRecluster() utils.resetMustLabelModels() print('Would you like to:') print( '(1) Continue classification to improve current class hypotheses?' ) print( '(2) Use current hypotheses to automatically classify all initial data?' ) while True: try: ans = int(input('Your answer (1/2): ')) if (ans < 1 or ans > 2): raise ValueError break except ValueError: continue if (ans == 1): print() computeHypotheses() return
def setParamsFromArgs(args): for arg in args: if arg == '-p': state.set('prenamedComponents', True) if arg == '-n': state.set('noise', True)
def resetMustLabelModels(): state.set('mustLabelModels', { NO_LABEL_STRING: [], MULTIPLE_LABELS_STRING: [] })
def setDefaultQuery(): defaultQueryPath = getDefaultQueryPath() file = open(defaultQueryPath, 'r') defaultQueryString = file.read() file.close() state.set('prevQuery', defaultQueryString)
''' Diagnoses available simulator input and outputs ''' import joysticks, state, log, phidgets # debug joysticks for j in range(0, joysticks.numJoysticks()): joy = joysticks.get(j) for b in range(0, joy.numButtons()): key = "joysticks.get('%s').button(%d)" % (joy.name, b) now = joy.getButton(b) was = state.set(key, now) if now and not was: log.info("joystick %d button pressed - %s = True" % (j, key)) for a in range(0, joy.numAxis()): key = "joysticks.get('%s').axis(%d)" % (joy.name, a) now = joy.getAxis(a) was = state.get(key, 0) if abs(was - now) > 0.1: log.info("joystick %d axis moved - %s = %.1f" % (j, key, now)) state.set(key, now) # debug phidgets for p in phidgets.all(): if not p.isAttached(): continue dtype = p.getDeviceType()
# event - see http://msdn.microsoft.com/en-us/library/cc526980.aspx#AircraftMiscellaneousSystemsIDs # | GEAR_SET | Sets gear handle position up/down (0,1) | All aircraft # gearup = state.toggle("gearup", saitek.getAxis(1) < 0) if gearup is None: pass # ignore no change in handle elif gearup: log.info("Moving gear handle up!") fsx.set("GEAR HANDLE POSITION", "Bool", 0) # uset settable variable approach else: log.info("Moving gear handle down!") fsx.send("GEAR_SET", 1) # use send (key) event handle = fsx.get("GEAR HANDLE POSITION", "Bool", bool) if state.set("handle", handle) != handle: log.info("Current gear handle IS %s" % ("down" if handle else "up")) # # phidget 1&2 for increasing COM active frequency # # events - see http://msdn.microsoft.com/en-us/library/cc526981.aspx#Frequency # | KEY_COM_RADIO_WHOLE_DEC | COM_RADIO_WHOLE_DEC | Decrements COM by one MHz | All aircraft | # | KEY_COM_RADIO_WHOLE_INC | COM_RADIO_WHOLE_INC | Increments COM by one MHz | All aircraft | # | KEY_COM_RADIO_FRACT_DEC | COM_RADIO_FRACT_DEC | Decrements COM by 25 KHz | All aircraft | # | KEY_COM_RADIO_FRACT_INC | COM_RADIO_FRACT_INC | Increments COM by 25 KHz | All aircraft | # | KEY_COM_RADIO_SWAP | COM_STBY_RADIO_SWAP | Swaps COM 1 freq/standby | All aircraft | # encoder1 = phidgets.get(82141) delta = phidgets.getDelta(encoder1) for i in range(0, abs(delta)):
def initProgressBar(): state.set('iterationNum', 0)
def preProcessingFunc(): numOfArgs = len(sys.argv) mainScriptPath = sys.argv[0] try: inputFilePath = sys.argv[numOfArgs - 1] except IndexError: raise RuntimeError('No file with models provided!') utils.setParamsFromArgs(sys.argv[1:(numOfArgs - 1)]) state.set('mainScriptPath', utils.getAbsPath(mainScriptPath)) state.set('inputFilePath', utils.getAbsPath(inputFilePath)) state.set('outputFilePath', utils.computeClassFilePath()) utils.printTitle('Pre-processing of given file is about to begin.') tempDirPath = utils.createTempDirectory(mainScriptPath) state.set('tempDirPath', tempDirPath) if (not state.get('prenamedComponents')): nameComponentsInput = None while (nameComponentsInput != 'y' and nameComponentsInput != 'n'): nameComponentsInput = input( 'Would you like to ' + 'name the components for more human-readable class hypotheses? ' + '(y/n) ').lower() print() state.set('nameComponents', nameComponentsInput == 'y') clustersMap = preProcessing.parseInputFile() state.set('clusters', clustersMap) utils.initClusterWeights() utils.printTitle('Pre-processing of file complete!') while not len(state.get('relevantPatterns')): setRelevantPatterns() labels = utils.getAllLabelsFromUser() labelExamplesPaths = utils.createLabelExampleFiles(labels) state.set('labels', labels) state.set('labelExamplesPaths', labelExamplesPaths) utils.initUserLabelCounters() utils.setDefaultQuery() utils.printTitle('Thank you, classification process will now begin.')