def sanitize_files(self, args: object) -> None: ''' Sanitize files listed on command-line args: parsed command-line arguments ''' for name in args.files: for filename in glob(name): DEBUG(f"Filename: {filename}") for reader in self.readers: if reader.detect(filename): DEBUG(f"reader: {reader.__class__.__name__}") message = f"{filename} - {reader.description}" self.filenames.append(message) print(message) self.make_target_dir(args.out) self.sanitize_file(filename, args.out, reader) break else: print(f"No handler found for {filename}.")
def UpdateState(self, S, A, val): DEBUG(f"Updating State") if self.IsValidStateAction(S, A): DEBUG(f"State action is valid, calling UpdateStateVal") super(type(self), self).UpdateStateVal( np.append(S,A), val ) return True else: WARN(f"Invalid (S, A) pair, got S: {S} and A: {A}") return False
def sanitize_line(self, line: str) -> str: DEBUG(line.rstrip()) for cui in self.CUIs: DEBUG(f"CUI: {__class__.__name__}") line = cui.sanitize(line) DEBUG(f"-> {line}") return line
def erase_dir(path): """Remove all folders and files in dir @path except folder from @constants.DO_NOT_DELETE """ DEBUG("Erasing '%s'", path) for node in os.listdir(path): if node in constants.DO_NOT_DELETE: continue node_path = os.path.join(path, node) (shutil.rmtree if os.path.isdir(node_path) else os.remove)(node_path) DEBUG("'%s' successfully erased", path)
def ImprovePolicy(self, packet): DEBUG(f"Called improve policy") if not self.IsValidPacket(packet): DEBUG(f"Packet is too small") return False S_list, A_list, R_list = packet.Get() # Grab elements out of our exp_packet V = self.GetStateVal(S_list[0], A_list[0]) # grab the current value of the state G = self.GetTargetEstimate(packet) # Calculate the new target based on the exp_packet new_val = (1-self.alpha) * V + self.alpha * G # Increment towards the new target based on learning rate self.UpdateState(S_list[0], A_list[0], new_val) # Update the value of the state and return True return True
def sanitize_file(self, filename: str, out: str, reader: Reader) -> None: path = Path(filename) if out: target = path.parent / out / path.with_name( f"SANITIZED_{path.name}") else: target = path.parent / path.with_name(f"SANITIZED_{path.name}") DEBUG(f"Target: {target}") with open(target, "w", encoding="utf-8") as file: for i, line in enumerate(reader.lines(filename), start=1): DEBUG(f"{i}: {line.prefix}{line.text}") sanitized = self.sanitize_line(line.text) file.write(f"{line.prefix}{sanitized}")
def _GetActions(self, S, eps=1): DEBUG(f"Getting actions for state {S}") # occasionally select a random action if (np.random.rand() > eps): selection = range(self._num_a) DEBUG(f"Got selection {selection}") # get a set of actions tied for the max value for this state else: vals = self.vals[ToTuple(S)] DEBUG(f"Got Vals {vals}") selection = [index for index, val in enumerate(vals) if val == np.max(vals)] return selection
def _StepAgent(self, agent): # grab the current state s_prev = agent.GetCurrState() # Take action and get next state and reward a_next = agent.GetAction(s_prev) s_next = self._world.GetNextState(s_prev, a_next) r_next = self._world.GetReward(s_next) # Update the state of the agent and store the S A R triplet into the history agent.UpdateCurrentState( s_next ) self._history[agent.GetID()].Push(s_prev, a_next, r_next) # If this is a trainable agent, then attempt to train if agent.IsTrainable(): DEBUG(f"Training agent {agent.GetID()}") s_req, a_req, r_req = agent.PacketSizeReq() packet = self._history[agent.GetID()].GetLatestAsPacket(s_req, a_req, r_req) if packet != None: if not agent.ImprovePolicy(packet): WARN("Failed to train agent with ExpPacket {packet}") # Delete the packet which we don't need anymore del packet
def __init__(self, world, **kwargs): params = {} params["discount_factor"] = kwargs.pop("discount_factor", 1) params["exploration_factor"] = kwargs.pop("exploration_factor", 0.95) params["is_static"] = kwargs.pop("is_static", False) params["init_variance"] = kwargs.pop("init_variance", 0.01) params["learn_rate"] = kwargs.pop("learn_rate", 0.001) params["value_type"] = Policy.ACTION_STATE_VALUES self._req_S = 2 self._req_A = 2 self._req_R = 1 if len(kwargs) > 0: raise ValueError("Got unknown args in kwargs passed to SarsaPolicy") TabularPolicy.__init__(self, world, **params) DEBUG("Initialized SarsaPolicy instance") DEBUG(f"Shape of VALS {self.vals.shape}")
def QUERY(text): """ Execute a query. """ strippedText = text.strip() for line in strippedText.split('\n'): DEBUG(" %s" % line) results = __global_graph__.query(strippedText) DEBUG("Results:") if len(results) > 0: for result in results: DEBUG(result) else: DEBUG(" 0 found") if results is None: results = [] return results
def ImprovePolicy(self, exp_packet): """ Calls ImprovePolicy on the Agent's policy. Returns: (Bool) True if policy was improved, false otherwise """ # Only improve policy if training flag is set if self._is_training: return self._policy.ImprovePolicy(exp_packet) DEBUG("is_training is set to FALSE") return False
def hazards_func(state): haz_states = [] haz_states.append((1,1)) haz_states.append((2,2)) haz_states.append((3,3)) haz_states.append((1,4)) haz_states.append((4,1)) DEBUG(f"STATE: -- {state}") if tuple(state) in haz_states: return True else: return False
def sanitize(self, text: str) -> str: start = timer() parts = [] while m := self.find_in_line(text): DEBUG(text) key = self.key_from_match(m) if self.reject(m): value = key else: if key not in self.data: self.value.increment() self.data[key] = self.replacement(key) value = self.data[key] offset = text.find(key) parts.append(text[:offset]) parts.append(value) text = text[offset + len(key):]
def put(self, request): """ sample API with a PUT method,There are still unknown information :return: Status & Message Json format unsuspended """ api.payload validated = self.__validate_json_input(request) if (validated == 200): if 'subscribe_id' in self.__json_content: collection = unsuspend( '200', 'IoT.nxt user has been successfully unsuspended') return collection elif (validated != True): coll = unsuspend('404', 'json input need subscribe_id params'), 404 DEBUG(coll) return coll elif (validated == 400): raise BadRequest("Incorrect") else: abort(500, ' Not Allowed')
def convert(inputFilesOrDirs, inputFormat, inputExtensions, outputDir, outputFormat, outputExt, recursive=True, overwrite=True, loggingFunction=None): """ Conversion function. @param inputFilesOrDirs : a list of paths (to a file or to a directory) @param inputFormat : input files format (the keys of INPUT_FORMAT_TO_EXTENSIONS) @param inputExtensions : a list of input files extensions (one or more values of INPUT_FORMAT_TO_EXTENSIONS) @param outputFormat : output files format (the keys of OUTPUT_FORMAT_TO_EXTENSIONS) @param outputExt : the output files extension (one of the values of OUTPUT_FORMAT_TO_EXTENSIONS) @param recursive : if inputFilesOrDirs contains directories, descend into these directories to find all files @param overwrite : True to overwrite any existing file. """ if loggingFunction is None: loggingFunction = INFO # process each input file sequentially: for inputFileOrDir in inputFilesOrDirs: loggingFunction("Processing input file or directory '%s'" % inputFileOrDir) # check if the file exists, and if it's a directory or a file isdir = False if os.path.exists(inputFileOrDir): if os.path.isdir(inputFileOrDir): DEBUG("'%s' exists and is a directory" % inputFileOrDir) inputFileOrDir = os.path.abspath(inputFileOrDir) isdir = True else: DEBUG("'%s' exists and is a file" % inputFileOrDir) else: raise IOError("Input file '%s' was not found" % inputFileOrDir) DEBUG("Input format: %s" % inputFormat) DEBUG("Output format: %s" % outputFormat) # find out which extensions we should match if inputExtensions is None: inputExtensions = INPUT_FORMAT_TO_EXTENSIONS[inputFormat] DEBUG("Input extensions: %s" % inputExtensions) # find out which output extension we should write if outputExt: outputExtension = outputExt else: outputExtension = OUTPUT_FORMAT_TO_EXTENSION[outputFormat] DEBUG("Output extension: '%s'" % outputExtension) inputFiles = [] if isdir: DEBUG("Now walking the directory (recursive = %s):" % recursive) for root, dirnames, filenames in os.walk(inputFileOrDir): DEBUG(" * Finding files in '%s'" % root) for extension in inputExtensions: for filename in fnmatch.filter(filenames, "*%s" % extension): DEBUG(" -> found '%s'" % filename) inputFiles.append(os.path.join(root, filename)) if not recursive: break else: inputFiles.append(inputFileOrDir) # create the graph, and parse the input files for inputFile in inputFiles: g = ConjunctiveGraph() g.parse(inputFile, format=inputFormat) DEBUG("the graph was parsed successfully") # if no output directory is specified, just print the output to the stdout if outputDir is None: output = g.serialize(None, format=outputFormat) DEBUG("output:") print(output) # if an output directory was provided, but it doesn't exist, then exit the function elif not os.path.exists(outputDir): raise IOError("Output dir '%s' was not found" % outputDir) # if the output directory was given and it exists, then figure out the output filename # and write the output to disk else: head, tail = os.path.split(inputFile) DEBUG("head, tail: %s, %s" % (head, tail)) # remove the common prefix from the head and the input directory # (otherwise the given input path will also be added to the output path) commonPrefix = os.path.commonprefix([head, inputFileOrDir]) DEBUG("inputFileOrDir: %s" % inputFileOrDir) DEBUG("common prefix: %s" % commonPrefix) headWithoutCommonPrefix = head[len(commonPrefix) + 1:] DEBUG("head without common prefix: %s" % headWithoutCommonPrefix) outputAbsPath = os.path.join(os.path.abspath(outputDir), headWithoutCommonPrefix) DEBUG("output absolute path: %s" % outputAbsPath) outputFileName = os.path.splitext(tail)[0] + outputExtension outputAbsFileName = os.path.join(outputAbsPath, outputFileName) DEBUG("output filename: '%s'" % outputAbsFileName) # for safety, check that we're not overwriting the input file if outputAbsFileName == os.path.abspath(inputFile): IOError("Input file '%s' is the same as output file" % outputAbsFileName) else: DEBUG("this file is different from the input filename") # check if we need to skip this file skipThisFile = os.path.exists( outputAbsFileName) and not overwrite if skipThisFile: DEBUG("this file will be skipped") else: dirName = os.path.dirname(outputAbsFileName) if not os.path.exists(dirName): DEBUG("Now creating %s since it does not exist yet" % dirName) os.makedirs(dirName) loggingFunction("Writing %s" % outputAbsFileName) g.serialize(outputAbsFileName, auto_compact=True, format=outputFormat)
def create_folder(path, erase=False, quiet=False): """Create folder at @path. - @erase - erase existing folder - @quiet - don't ask user about particular actions - if @quiet is False, new folder with name @path[i] will be created - @erase has more priority than @quiet """ # >:( a lot of returns - not good style DEBUG("Creating '%s' folder", path) try: os.makedirs(path) except OSError as ex: # we can't support other errors, except 'Folder already exists' if ex.errno != 17: CRITICAL("Can't create folder %s", path) EXCEPTION("") emergency_exit() else: DEBUG("Folder '%s' created", path) return path # Looks like folder already exists # lets try to erase it or create new # at different path ERROR("Can't create '%s' folder", path) if erase: try: erase_dir(path) except Exception: CRITICAL("Folder '%s' can't be erased") else: INFO("Folder erased: '{}'".format(path)) return path # Well, erase == False or folder can't be erased if not quiet: answ = '' while not answ: answ = raw_input(("Type (E) to erase existing folder, " "type (Q) to exit the script " "or enter new folder name: ")).lower() if answ == "e": return create_folder(path, erase=True, quiet=quiet) elif answ == "q": script_exit() elif answ: return create_folder(answ, erase=False, quiet=quiet) else: return create_folder(find_unic_path(path), erase=erase, quiet=quiet)
def log(self, msg): """ Simple low-level log function. """ DEBUG("%s : %s" % (self['qname'], msg))
def SSD_builder(args): # get resnet symbol resnet = get_resnet_symbol(num_classes=1, num_layers=18, image_shape=args.image_shape) last_relu = resnet.get_internals()['relu1_output'] data = resnet.get_internals()['data'] label = mx.sym.Variable('label') # extract the layers right before downsampling multiscalelayers_name = args.multiscalelayers_name multiscalelayers_layers = get_all_bn_layers(last_relu, multiscalelayers_name) # Build the predict boxes predicted_boxes = [] predicted_classes = [] default_anchors = [] # Add relu+conv for layer, size, ratio in zip(multiscalelayers_layers, args.anchor_sizes, args.anchor_ratios): DEBUG("Verify the output shape of the layer: {}".format(layer.name)) DEBUG("Before->" + verify_shape(layer, (1, 3) + tuple(args.data_shape))) relu = mx.sym.Activation(data=layer, act_type='relu', name=layer.name + '_relu') boxes = box_predictor(relu, args.num_anchors) classes = class_predictor(relu, args.num_anchors, args.num_classes) anchors = MultiBoxPrior(layer, sizes=size, ratios=ratio, clip=True) DEBUG("After Box->" + verify_shape(boxes, (1, 3) + tuple(args.data_shape))) DEBUG("After Class->" + verify_shape(classes, (1, 3) + tuple(args.data_shape))) DEBUG("After Anchor->" + verify_shape(anchors, (1, 3) + tuple(args.data_shape))) predicted_boxes.append(flatten_prediction(boxes)) predicted_classes.append(flatten_prediction(classes)) default_anchors.append(anchors) all_anchors = concat_predictions(default_anchors) all_classes_pred = mx.sym.reshape(concat_predictions(predicted_classes), shape=(0, -1, args.num_classes + 1)) all_boxes_pred = concat_predictions(predicted_boxes) DEBUG("All anchors->" + verify_shape(all_anchors, (1, 3) + tuple(args.data_shape))) DEBUG("All classes->" + verify_shape(all_classes_pred, (1, 3) + tuple(args.data_shape))) DEBUG("All boxes->" + verify_shape(all_boxes_pred, (1, 3) + tuple(args.data_shape))) # setup groundtruth label box_target, box_mask, cls_target = training_targets( all_anchors, all_classes_pred, label) DEBUG("box_target->" + verify_shape(box_target, (1, 3) + tuple(args.data_shape), (1, 1, 5))) DEBUG("box_mask->" + verify_shape(box_mask, (1, 3) + tuple(args.data_shape), (1, 1, 5))) DEBUG("cls_target->" + verify_shape(cls_target, (1, 3) + tuple(args.data_shape), (1, 1, 5))) cls_loss = FocalLoss() box_loss = SmoothL1Loss() loss1 = cls_loss.hybrid_forward(all_classes_pred, cls_target) loss2 = box_loss.hybrid_forward(all_boxes_pred, box_target, box_mask) loss = loss1 + loss2 DEBUG("Final loss->" + verify_shape(loss, (1, 3) + tuple(args.data_shape), (1, 1, 5))) loss_make = mx.sym.MakeLoss(loss) output = mx.sym.Group([ loss_make, mx.sym.BlockGrad(all_classes_pred), mx.sym.BlockGrad(cls_target), mx.sym.BlockGrad(all_boxes_pred), mx.sym.BlockGrad(box_target), mx.sym.BlockGrad(box_mask) ]) return output
def getInstances(cache, className, filterNotExists=None): """ Get the instances of a class. """ INFO(" Get instances of %s" %className) if filterNotExists is None: filterNotExistsLine = "" else: filterNotExistsLine = "FILTER NOT EXISTS { %s }" %filterNotExists results = QUERY(""" SELECT DISTINCT ?instance ?label ?comment ?counter ?rdfClass WHERE { ?instance rdf:type/rdfs:subClassOf* %s . OPTIONAL { ?instance rdfs:label ?label } . OPTIONAL { ?instance rdfs:comment ?comment } . OPTIONAL { ?instance ontoscript:counter ?counter } . OPTIONAL { ?instance a/(rdfs:subClassOf*) ?rdfClass . FILTER (!isBlank(?rdfClass)) } . %s } """ %(className, filterNotExistsLine)) d = {} for uri, label, comment, counter, rdfClass in results: qname = URI_TO_QNAME(uri) if not d.has_key(qname): d[qname] = Node( qname = qname, uri = uri.toPython(), cache = cache) if label is not None: d[qname]["label"] = label.toPython() if comment is not None: d[qname]["comment"] = comment.toPython() if counter is not None: d[qname]["counter"] = int(counter.toPython()) if rdfClass is not None: d[qname].registerClass(URI_TO_QNAME(rdfClass.toPython())) keysStr = "" for key in d.keys(): keysStr += (key + " ") INFO(" --> " + keysStr) for qname, node in d.items(): node.registerKnownViews() if not cache.has_key(qname): DEBUG("Caching %s" %qname) cache[qname] = node # return a list of QNames ret = [] # list of qnames resultNodes = sorted(d.values(), key=lambda x: x["counter"]) for resultNode in resultNodes: ret.append(resultNode['qname']) return ret