def merge_unknown_words(self,w_graph,text):
		unk,tmp = {},[]
		for s,t,c in w_graph.edges():
			if self.dict['lexicon'].find(text[s:t]) == None:
				tmp.append((s,t))
				if s not in unk:
					unk[s] = [t]
				else:
					unk[s] += [t]
		tmp.reverse()
		while tmp != []:
			Q = [tmp.pop()]
			path = []
			while Q != []:
				s,t = Q.pop()
				path += [(s,t)]
				if t in unk:
					for x in unk[t]:
						Q.append((t,x))
						if (t,x) in tmp:
							tmp.remove((t,x))
			path.sort()
			if len(path) > 1:
				flag = True
				for x,y in path:
					if not Utils.is_thai_word(text[x:y]):
						flag = False
				if flag:
					w_graph.add_edge(path[0][0],path[-1][1])
Beispiel #2
0
 def get_paths_for_strat(self, strat):
   """Return lists of all cells connected by given strategy. There can be
      multiple lists, because one path can be divided by stone."""
   paths = []
   # For every path/edge coords built by given strategy
   # if there is no stone on it
   list_paths_for_strat = filter(lambda crd: "#" not in self[crd[0]][crd[1]], self.list_paths_for_strat(strat))
   for coords in list_paths_for_strat:
     ###print ">>> get_paths_for_strat: Considering %s for %s" % (coords, strat)
     processed = False
     # If we already have these coords in some path for this strategy
     for path in paths:
       if coords in path:
         ###print ">>> get_paths_for_strat: Coords %s already known" % coords
         processed = True
         break
     # If these coords are not part of this strategy
     if not processed:
       ###print ">>> get_paths_for_strat: Coords not in any of the paths, computing additional path"
       # Compute path these coords are part of
       path = self.get_connected_cells(coords, [])
       path.sort()
       # And if this path is not known already, add it
       assert path not in paths   # we are checking using that 'processed' before
       ###print ">>> get_paths_for_strat: This new path (%s) is not know yet, adding it among other paths" % path
       paths.append(path)
   ###print ">>> get_paths_for_strat: Returning:", paths
   return paths
Beispiel #3
0
 def complete_exe(self, entry):
     path = os.environ.get('PATH').split(':')
     #remove duplicates and non existant paths
     path.sort()
     last = path[-1]
     for i in range(len(path) - 2, -1, -1):
         if last == path[i]: del path[i]
         else:
             if (not os.access(path[i], os.R_OK)):
                 del path[i]
             else:
                 last = path[i]
     #all files in $PATH
     files = []
     for i in range(len(path)):
         try:
             pathlist = os.listdir(path[i])
         except OSError:
             pathlist = []
         for j in range(len(pathlist)):
             if pathlist[j].find(entry, 0) == 0:
                 files.append(pathlist[j])
     files.sort()
     return files
Beispiel #4
0
 def complete_exe(self, entry):
     path = os.environ.get('PATH').split(':')
     #remove duplicates and non existant paths
     path.sort()
     last = path[-1]
     for i in range(len(path)-2, -1, -1):
         if last==path[i]: del path[i]
         else:
             if(not os.access(path[i],os.R_OK)):
                 del path[i]
             else:
                 last=path[i]
     #all files in $PATH
     files = []
     for i in range(len(path)):
         try:
             pathlist = os.listdir(path[i])
         except OSError:
             pathlist = []
         for j in range(len(pathlist)):
             if pathlist[j].find(entry, 0) == 0:
                 files.append(pathlist[j])
     files.sort()
     return files
Beispiel #5
0
def get_class_Ind():
    path = os.listdir('hmdb51')
    path.sort()
    return path
Beispiel #6
0
import cv2
import os.path
import glob

path = glob.glob("./safebooru/bg/6000/*.jpg")
path.sort()


def detect(filename, cascade_file="./lbpcascade_animeface.xml"):
    if not os.path.isfile(cascade_file):
        raise RuntimeError("%s: not found" % cascade_file)

    cascade = cv2.CascadeClassifier(cascade_file)
    image = cv2.imread(str(filename))

    gray = cv2.imread(str(filename), 0)
    gray = cv2.equalizeHist(gray)

    faces = cascade.detectMultiScale(
        gray,
        # detector options
        scaleFactor=1.1,
        minNeighbors=5,
        minSize=(24, 24))
    for (x, y, w, h) in faces:
        cv2.rectangle(image, (x, y), (x + w, y + h), (0, 0, 255), 2)

    return image, faces


def neg_generate(filename, faces):
Beispiel #7
0
def prepare_update_config(config_dict, crush, curr_reweight):
    config = Config()

    config.max_nodes_per_round = config_dict.get('max_updated_nodes', 4)
    config.max_weight_change = config_dict.get('step', 0.5)
    config.max_reweight_change = config_dict.get('restep', 0.1)
    config.min_weight_diff = config_dict.get('min_weight_diff', 0.01)
    config.min_reweight_diff = config_dict.get('min_reweight_diff', 0.01)

    new_osd_reweights = {}

    for node in config_dict['osds']:
        node = node.copy()
        if 'weight' not in node and 'reweight' not in node:
            logger.error(
                "Node with osd %r has neither weight no reweight. Fix config and restart",
                node)
            return None

        new_weight = node.pop('weight', None)
        new_reweight = node.pop('reweight', None)

        if new_weight is not None:
            path = list(node.items())
            path.sort(key=lambda x: -default_zone_order.index(x[0]))
            path_s = "/".join("{0}={1}".format(tp, name) for tp, name in path)
            try:
                cnode = crush.find_node(path)
            except IndexError as exc:
                logger.error("Fail to find node %s: %s", path_s, exc)
                return None

            diff = abs(new_weight - cnode.weight)
            if diff < config.min_weight_diff:
                logger.info(
                    "Skip %s as requested weight diff %.2f is less than %.2f",
                    cnode.str_path(), diff, config.min_weight_diff)
            else:
                config.rebalance_nodes.append((cnode, new_weight))
                config.total_weight_change += diff
                logger.info("%s weight = %s => %s", cnode.str_path(),
                            cnode.weight, new_weight)

        if new_reweight is not None:
            osd_name = node['osd']
            if osd_name not in curr_reweight:
                logger.error(
                    "No reweight coeficient available for %s. Can't apply reweight parameter from config",
                    osd_name)
                return None

            if new_osd_reweights.get(node['osd'],
                                     new_reweight) != new_reweight:
                logger.error(
                    "%s has different reweight in different tree parts in config."
                    + "Impossible to apply this configuration", osd_name)
                return None

            diff = abs(new_reweight - curr_reweight[osd_name])
            if diff < config.min_reweight_diff:
                logger.info(
                    "Skip reweighting %s as requested diff %.3f is less than %.3f",
                    osd_name, diff, config.min_reweight_diff)
            else:
                new_osd_reweights[osd_name] = new_reweight
                config.total_reweight_change += diff
                logger.info("%s Reweigh = %s => %s", osd_name,
                            curr_reweight[osd_name], new_reweight)

    config.reweight_nodes = [
        (FakedNode(name=osd_name,
                   weight=curr_reweight[osd_name]), new_reweight)
        for osd_name, new_reweight in new_osd_reweights.items()
    ]
    return config