Esempio n. 1
0
 def test_equals(self):
     data = 123
     filter = Filter(equals=123)
     self.assertTrue(filter(data))
     data = '123'
     self.assertTrue(filter(data))
     filter = Filter(neq='abc')
     self.assertTrue(filter(data))
Esempio n. 2
0
def meow(days=None,
         dates=None,
         limit=None,
         short=None,
         fail=True,
         exclude=None,
         job_type=None,
         down_path=config.DOWNLOAD_PATH,
         periodic=False,
         ):
    """
        This function actually runs the whole work,
        you can import it anywhere and run with parameters:

    :param days: how many days history to take, usually 7 (week) is enough
    :param dates: specific dates in format ["%m-%d", ..]: ['04-15', '05-02']
    :param limit: limit overall amount of jobs to analyze
    :param short: analyze only this type of jobs,
                    accepts short name: "ha","upgrades","nonha"
    :param fail: whether analyze and print only failed jobs (true by default)
    :param exclude: exclude specific job type: "gate-tripleo-ci-f22-containers"
    :param job_type: include only this job type (like short, but accepts
                        full name): "gate-tripleo-ci-f22-nonha"
    :param down_path: path on local system to save all jobs files there
    :param periodic: if take periodic (periodic=True) or patches (False)
    :return: parsed jobs data, ready for printing to HTML or console
    """
    if not periodic:
        g = Gerrit(period=days)
        #gerrit = g.get_project_patches(config.PROJECTS)
        # Dump gerrit data for investigation
        #with open("/tmp/gerrit", "w") as f:
        #    f.write(json.dumps(gerrit))
        # If debug mode
        with open("/tmp/gerrit", "r") as f:
            gerrit = json.loads(f.read())
        jobs = (job for patch in gerrit for job in Patch(patch).jobs)
    else:
        jobs = (job
                for url in config.PERIODIC_URLS
                for job in Periodic(
                    url, down_path=down_path, limit=limit).jobs)
    f = Filter(
        jobs,
        days=days,
        dates=dates,
        limit=limit,
        short=short,
        fail=fail,
        exclude=exclude,
        job_type=job_type,
        periodic=periodic
    )
    filtered = f.run()
    ready = []
    for job in filtered:
        ready.append(analyze(job, down_path=down_path))
    return ready
def split_lines(binary,
                contour,
                min_size,
                thresh=0.5,
                roi=False,
                offset=(0, 0)):
    if not roi:
        roi_slice = utils.get_roi_slice(contour, offset)
        binary = binary[roi_slice]
    # cv2.imshow("bin", binary)
    # ACHTUNG: may not work correct if `roi` is not correct contour's roi
    binary = binary & np.array(utils.contour_mask(contour), dtype=bool)

    means = binary.mean(axis=1)

    # THRESH_INV
    means[means < thresh] = -1
    means[means >= thresh] = 0
    means[means == -1] = 1

    # Find where gaps starts and ends
    means = np.pad(means, 1, 'constant')
    means = means - np.roll(means, 1)
    begins = (means == 1).nonzero()[0] - 1  # -1 caused by pad
    ends = (means == -1).nonzero()[0] - 1

    # Getting ars to cut
    args = (begins + ends) / 2
    args = args[args >= min_size]
    args = args[args <= means.shape[0] - min_size]
    if args.shape[0] == 0:
        return [contour]

    lines = []
    prev = 0
    args = np.append(args, means.shape[0] - 2)
    for arg in args:  # TODO: make this code reusable for fragmentation
        line_roi = binary[prev:arg, :]
        curr_off = (contour.rect.left, contour.rect.top + prev)
        conts = [
            Contour(c) for c in cv2.findContours(line_roi,
                                                 cv2.RETR_LIST,
                                                 cv2.CHAIN_APPROX_SIMPLE,
                                                 offset=curr_off)[0]
        ]
        lines.extend(conts)
        prev = arg

    flt = Filter()
    flt.add_filter("min_area", min_size * min_size)
    lines = flt.filter(lines)
    if len(lines) <= 1:
        return [contour]

    return lines
Esempio n. 4
0
def initFilter():
    global deffs, chain, fs
    chain = FilterChain()
    chain._filters.append(
        Filter(FilterType.LShelving, LOW_EQ, 0, 1, enabled=True))
    # chain._filters.append(Filter(FilterType.HShelving, deffs[4], 0, 1, enabled = True))
    # chain._filters.append(Filter(FilterType.Peak, deffs[0], 0, 1, enabled = True))
    chain._filters.append(Filter(FilterType.Peak, HIGH_EQ, 0, 1, enabled=True))
    # chain._filters.append(Filter(FilterType.LPButter, deffs[3], 0, 1, enabled = True))
    # chain._filters.append(Filter(FilterType.HPButter, deffs[3], 0, 1, enabled = True))
    chain.reset()
Esempio n. 5
0
 def initFilter(self):
     self.chain = FilterChain()
     self.chain._filters.append(
         Filter(FilterType.LShelving, self.LOW_EQ, 0, 1, enabled=True))
     # self.chain._filters.append(Filter(FilterType.HShelving, deffs[4], 0, 1, enabled = True))
     # self.chain._filters.append(Filter(FilterType.Peak, deffs[0], 0, 1, enabled = True))
     self.chain._filters.append(
         Filter(FilterType.Peak, self.HIGH_EQ, 0, 1, enabled=True))
     # self.chain._filters.append(Filter(FilterType.LPButter, deffs[3], 0, 1, enabled = True))
     # self.chain._filters.append(Filter(FilterType.HPButter, deffs[3], 0, 1, enabled = True))
     self.chain.reset()
Esempio n. 6
0
 def test_melif(self):
     data, target = self.basehock['X'], self.basehock['Y']
     _filters = [
         Filter('GiniIndex', cutting_rule=GLOB_CR["Best by value"](0.4)),
         # Filter('FitCriterion', cutting_rule=GLOB_CR["Best by value"](0.0)),
         Filter(GLOB_MEASURE["FRatio"](data.shape[1]),
                cutting_rule=GLOB_CR["Best by value"](0.6)),
         Filter('InformationGain',
                cutting_rule=GLOB_CR["Best by value"](-0.4))
     ]
     melif = Melif(_filters, f1_score)
     melif.fit(data, target)
     estimator = SVC()
     melif.run(GLOB_CR['K best'](50), estimator)
Esempio n. 7
0
def main():
    filter = Filter(model_file="model.p", scaler_file="scaler.p")
    clip = VideoFileClip("project_video_short3.mp4")
    cnt = 0
    stop_frame_num = 113
    for img in clip.iter_frames():
        cnt += 1
        if (cnt == stop_frame_num):
            if img.shape[2] == 4:
                img = img[:, :, :3]
            ret = filter.pipepine(img)
            plt.figure(figsize=(16, 10))
            plt.imshow(filter.diagScreen)
            plt.subplots_adjust(left=0.03, bottom=0.03, right=1, top=1)
            plt.show()
Esempio n. 8
0
    def updateFilter(self, i, param, val):
        oldf = self.chain._filters[i]
        type = oldf._type
        fc = oldf._fc
        g = oldf._g
        Q = oldf._Q

        if param == Params.TYPE:
            type = val
            Q = 1                      
        elif param == Params.F:
            fc = int(self.nodes[i].ctrls[2].text()) * 2 / fs
        elif param == Params.G:
            g = float(self.nodes[i].ctrls[3].text())
        elif param == Params.Q:
            if type == FilterType.LPButter or type == FilterType.HPButter:
                Q = val
            elif type == FilterType.Peak:
                Q = val / 10
            elif type == FilterType.LShelving or FilterType.HShelving:
                Q = val / 100

        self.chain.updateFilt(i, Filter(type, fc, g, Q))
        if param == Params.TYPE:            
            self.updateControls(i, type)
            self.adjustSliderRange(i, type) 

        self.updateSliderLabel(i)    
Esempio n. 9
0
def filterQuestions(results):
	global bot
	for x in results["items"]:
		englishWords = 0;
			
		question = Question(x)
		
		quality = Filter(question)
		quality.calc()
		print quality.score
		
		if quality.score < 40:
			message = ""
			for y in quality.text:
				message += "Words: `%i`, Chars: `%i`, Bold: `%i`, Italic: `%i`, Case=> Upper: `%i`, Lower: `%i`" % (y.get("words"), y.get("chars"), y.get("bold"), y.get("italic"), y.get("case").get("upper"), y.get("case").get("lower"))
			bot.send("[tag:low-quality] ["+question.title+"]("+question.link+") - "+message)
Esempio n. 10
0
def build_soft_fusion_kernel(loops, loop_chain_index):
    """
    Build AST and :class:`Kernel` for a sequence of loops suitable to soft fusion.
    """

    kernels = [l.kernel for l in loops]
    asts = [k._ast for k in kernels]
    base_ast, fuse_asts = dcopy(asts[0]), asts[1:]

    base_fundecl = FindInstances(ast.FunDecl).visit(base_ast)[ast.FunDecl][0]
    base_fundecl.body[:] = [ast.Block(base_fundecl.body, open_scope=True)]
    for unique_id, _fuse_ast in enumerate(fuse_asts, 1):
        fuse_ast = dcopy(_fuse_ast)
        fuse_fundecl = FindInstances(
            ast.FunDecl).visit(fuse_ast)[ast.FunDecl][0]
        # 1) Extend function name
        base_fundecl.name = "%s_%s" % (base_fundecl.name, fuse_fundecl.name)
        # 2) Concatenate the arguments in the signature
        base_fundecl.args.extend(fuse_fundecl.args)
        # 3) Uniquify symbols identifiers
        fuse_symbols = SymbolReferences().visit(fuse_ast)
        for decl in fuse_fundecl.args:
            for symbol, _ in fuse_symbols[decl.sym.symbol]:
                symbol.symbol = "%s_%d" % (symbol.symbol, unique_id)
        # 4) Concatenate bodies
        base_fundecl.body.extend(
            [ast.FlatBlock("\n\n// Fused kernel: \n\n")] +
            [ast.Block(fuse_fundecl.body, open_scope=True)])

    # Eliminate redundancies in the /fused/ kernel signature
    Filter().kernel_args(loops, base_fundecl)

    return Kernel(kernels, base_ast, loop_chain_index)
Esempio n. 11
0
    def __compare_measure__(self, measure_name, data):
        data, target = data['X'], data['Y']

        start_time = time.time()
        custom = lambda x, y: np.sum(x + y, axis=1)
        f = Filter(custom, GLOB_CR["K best"](6))
        res = f.run(
            data, target
        )  # Filter(measure_name, GLOB_CR["K best"](6)).run(data, target)
        print("ITMO_FS time --- %s seconds ---" % (time.time() - start_time))

        start_time = time.time()
        res = SelectKBest(GLOB_MEASURE[measure_name],
                          k=6).fit_transform(data, target)
        print("SKLEARN time --- %s seconds ---" % (time.time() - start_time))
        print(data.shape, '--->', res.shape)
Esempio n. 12
0
    def fetch_stocks(self, params):
	''' if params==all fetch all stocks get_all_categories'''
        filter = Filter()
        parser = Parse()
        stocklist = []
        if params=='all':
            cats = filter.get_all_categories()
            for cat in cats:
                params = [('sc', cat)]
                try:
                    stocklist.extend(self.fetch_stocks(params))
                except Exception, e:
                    print cat 
                    print e
                    #print stocklist
                    print 'exited prematurely'
                    exit()
Esempio n. 13
0
def run():

    for aux in range(len(list_departaments)):

        departament = DataSourceDepartament.read(
            "https://matriculaweb.unb.br/graduacao/oferta_dep.aspx?cod=" +
            str(list_departaments_numbers[aux]), list_departaments[aux])
        DataSinkDepartament.write(departament, list_departaments[aux])

    for new_aux in range(len(list_departaments_cods)):

        subjects = DataSourceSubject.read(
            "https://matriculaweb.unb.br/graduacao/oferta_dis.aspx?cod=" +
            str(list_departaments_cods[new_aux]), new_aux)
        DataSinkSubject.write(subjects)

    subjects_darcy = DataSourceDepartament.read(
        "https://matriculaweb.unb.br/graduacao/oferta_dep.aspx?cod=" +
        str(list_departaments_numbers[3]), list_departaments[3])

    subjects_darcy_cods = subjects_darcy.list_cods
    subjects_darcy_initials = subjects_darcy.list_initials
    size_subjects_darcy = 76

    for aux in range(size_subjects_darcy):

        subject = DataSourceSubjectsDarcy.read(
            "https://matriculaweb.unb.br/graduacao/oferta_dis.aspx?cod=" +
            str(subjects_darcy_cods[aux]), subjects_darcy_initials[aux],
            subjects_darcy_cods[aux])
        DataSinkSubject.write(subject)

    for aux in range(len(list_campi_numbers)):

        request = requests.get(
            "https://matriculaweb.unb.br/graduacao/curso_rel.aspx?cod=" +
            str(list_campi_numbers[aux]))
        soup = BeautifulSoup(request.content, "html.parser")

        courses = Courses()
        list_content = []
        for tr in soup.find_all('tr'):

            for td in tr.find_all('td'):
                list_content.append(td.text)

                list_content = Filter.blank_space(list_content)
                list_content = Filter.upper_words(list_content)
                list_content = Filter.remove_accents(list_content)
                list_cods = Filter.remove_vogals(list_content)
                list_modalities = Filter.get_modalities(list_content)
                list_shift = Filter.get_shift(list_content)
                list_names = Filter.get_names(list_content)

                courses.codes = list_cods
                courses.modalities = list_modalities
                courses.list_shift = list_shift
                courses.names = list_names

        DataSinkCourse.write(courses, list_departaments[aux])
Esempio n. 14
0
 def test_where(self):
     l = ['ab', 'bc', 'ca', 'AB']
     self.assertListEqual(SequenceChain(l).where().reveal(), l)
     self.assertListEqual(
         SequenceChain(l).where(Filter(eq='ab')).reveal(), ['ab'])
     self.assertListEqual(
         SequenceChain(l).where(Filter(contains='a')).reveal(),
         ['ab', 'ca'])
     self.assertListEqual(
         SequenceChain(l).where(Filter(icontains='a')).reveal(),
         ['ab', 'ca', 'AB'])
     self.assertListEqual(
         SequenceChain(l).where(Filter(istartswith='a')).reveal(),
         ['ab', 'AB'])
     self.assertListEqual(
         SequenceChain(l).where(Filter(contains='a',
                                       contains_='b')).reveal(),
         ['ab', 'bc', 'ca'])
Esempio n. 15
0
 def mouseMoveEvent(self, e):
     QFrame.mouseMoveEvent(self, e)
     if self.dragged:
         pos, i = e.pos(), self.focused
         fc, g = fromPixelCords(self.width(), self.height(), pos, self.xaxis, self.raxis)
         old = self.parent().chain._filters[i]
         if old._type not in (FilterType.Peak, FilterType.LShelving, FilterType.HShelving):
             g = 0
         self.parent().chain.updateFilt(i,Filter(old._type, fc * 2 / fs, g, Q = old._Q))
         self.updateHandles()
         self.parent().updateChainTF()
         self.update()
Esempio n. 16
0
def main():
    filter = Filter(model_file="model.p", scaler_file="scaler.p")
    #filter.predict_batch(image_path=glob("./labeled_data_smallset/vehicles_smallset/**/*.*"))
    #filter.predict_batch(image_path=filter.test_clf_image_paths)
    frame = None
    cnt = 0
    for path in filter.test_video_images_path:
        cnt += 1
        if frame != None and cnt == frame:
            image = cv2.cvtColor(cv2.imread(path), cv2.COLOR_BGR2RGB)
            final_image = filter.pipepine(image)
            plt.imshow(final_image)
            plt.show()
            break
        elif frame == None:
            image = cv2.cvtColor(cv2.imread(path), cv2.COLOR_BGR2RGB)
            final_image = filter.pipepine(image)
            plt.imshow(final_image)
            plt.show()
    # image_res, centroids_and_sizes = filter.sliding_box_multi_level(image, level=2)
    """
Esempio n. 17
0
    def updateFilter(self, i, fc, g, Q):
        oldf = self.chain._filters[i]
        type = oldf._type
        # print oldf._type, oldf._fc, oldf._g, oldf._Q

        # fc_val = fc * 2 / fs
        # print fc_val, g, Q

        f = Filter(type, fc, g, Q)
        self.chain.updateFilt(i, f)
        # chain.changeFilt(i, type, fc, g, Q)
        self.chain.reset()
Esempio n. 18
0
def updateFilter(i, fc, g, Q):
    global chain
    global fs
    oldf = chain._filters[i]
    type = oldf._type
    # print oldf._type, oldf._fc, oldf._g, oldf._Q

    # fc_val = fc * 2 / fs
    # print fc_val, g, Q

    f = Filter(type, fc, g, Q)
    chain.updateFilt(i, f)
    # chain.changeFilt(i, type, fc, g, Q)
    chain.reset()
Esempio n. 19
0
    def parse(input):
        """
        Trying to parse input. Depending on it
        will return Filter or Event
        :param input:
        :return:
        """
        input = str(input)
        try:
            message, category, to = re.findall('^(.*)\s#(\S*)\s@(\S*)$',
                                               input)[0]
            return Event(message, category, to, datetime.datetime.now())
        except IndexError:
            pass

        try:
            return Filter.create(input)
        except ValueError as e:
            raise ValueError("Input is not valid: \n{}".format(str(e)))
Esempio n. 20
0
def scrape(data):
    us_apps_filter = Filter(
        field='app_store_url',
        op=operator.contains,
        value='/us/'
    )
    filter_chain = FilterChain()
    filter_chain.add_filter(us_apps_filter)

    us_apps = filter_chain.filter(data)

    gathered_data = gather_data(us_apps)

    spanish_and_tagalog_filter = ListContainedinListFilter(
        field='languages',
        op=operator.contains,
        value=[u'Spanish', u'Tagalog']
    )

    filter_chain = FilterChain()

    filter_chain.add_filter(spanish_and_tagalog_filter)

    spanish_and_tagalog_data = filter_chain.filter(gathered_data)

    insta_in_name_filter = CaseInsensitiveStringFilter(
        field='name',
        op=operator.contains,
        value='insta'
    )

    filter_chain = FilterChain()

    filter_chain.add_filter(insta_in_name_filter)

    insta_in_name_data = filter_chain.filter(gathered_data)

    filtered_data = {
        'apps_in_spanish_and_tagalog': [_d.get('app_identifier') for _d in spanish_and_tagalog_data],
        'apps_with_insta_in_name': [_d.get('app_identifier') for _d in insta_in_name_data]
    }
    write_json_to_file(filtered_data, 'filtered_apps.json')
    write_json_to_file(gathered_data, 'apps.json')
Esempio n. 21
0
    def build_dfgp(self, filter_type):
        """Create DFGP based on internal settings. Creation ordering: build,
        find loops and parallelisms, and the filter.
        Args:
            filter_type (str): Toggle how dfgp is filtered.
        """
        # print settings
        print("DFGP - settings > {}:{}:{}".format(
            self.parallelisms_threshold, self.percentile_frequency_threshold,
            filter_type))
        # build
        self.build()
        loops = self.detect_loops_simple()
        loops_extended = self.detect_loops_extended(loops)
        self.detect_parallelisms(loops, loops_extended)

        # filter
        if filter_type == 'FWG':
            Filter.with_guarantees(self, self.percentile_frequency_threshold)
        elif filter_type == 'WTH':
            Filter.with_threshold(self, self.percentile_frequency_threshold)
        elif filter_type == 'STD':
            Filter.standard(self)
Esempio n. 22
0
 def fetch_stocks(self, params):
     filter = Filter()
     parser = Parse()
     
     url = filter.build_query_string(params)
     results = parser.parse(url, [])
Esempio n. 23
0
def test_compound_filter(varset_message, varset_callid_message, agentcalled_message):
    message_filter = Filter(event="varset", variable="call_id")
    assert not message_filter.match(varset_message)
    assert message_filter.match(varset_callid_message)
    assert not message_filter.match(agentcalled_message)
Esempio n. 24
0
def test_basic_filter(varset_message, varset_callid_message, agentcalled_message):
    message_filter = Filter(event="varset")
    assert message_filter.match(varset_message)
    assert message_filter.match(varset_callid_message)
    assert not message_filter.match(agentcalled_message)
Esempio n. 25
0
 def test_filter(self):
     data, target = load_iris(True)
     res = Filter("SpearmanCorr",
                  GLOB_CR["Best by value"](0.9999)).run(data, target)
     print("SpearmanCorr:", data.shape, '--->', res.shape)
Esempio n. 26
0
 def __test_mrmr(cls, data, target):
     n = data.shape[1] / 2
     res = Filter(GLOB_MEASURE["MrmrDiscrete"](n),
                  GLOB_CR["Best by value"](0.0)).run(data, target)
     print("Mrmr:", data.shape, '--->', res.shape)
Esempio n. 27
0
from astropy.table import Table
from spectrum import Spectrum
from filters import Filter
from diagram import ColourColourDiagram
import matplotlib.pyplot as plt

table = Table.read('sdss_data.fits')  # magnitudes from SDSS so that we can classify our spectrum
stars = table[table['class'] == 'STAR']
qsos = table[table['class'] == 'QSO']

# read in the relevant filter curves
u_filter = Filter.from_file('sdss_filters/sdss-u.fits')
g_filter = Filter.from_file('sdss_filters/sdss-g.fits')
r_filter = Filter.from_file('sdss_filters/sdss-r.fits')

# Now define our colours using filters
# Python allows us to decide how objects use the `-` operator. Look at `Filter` to see how it works
ug_colour = u_filter - g_filter
gr_colour = g_filter - r_filter

star_galaxy_diagram = ColourColourDiagram(ug_colour, gr_colour)  # make a colour-colour diagram using the two colours above
star_galaxy_diagram.plot_table(stars, c='b', alpha=0.2, s=1, label='stars')  # plot the known stars using the `plot_table` method
star_galaxy_diagram.plot_table(qsos, c='r', alpha=0.2, s=1, label='QSOs')  # plot the known qsos using the `plot_table` method

spectrum = Spectrum.from_file('spec-0266-51602-0003.fits')  # read the spectrum into the `Spectrum` object
star_galaxy_diagram.plot_spectrum(spectrum, c='k', s=50)  # plot the spectrum, where all the maths is behind the scenes

# So you can see that it is now really easy to plot any additional spectra we have without changing your code

plt.legend()
plt.show()
Esempio n. 28
0
    def render_POST(self, request):
        """Handle POST requests."""
        global master, log, key, agents, filters, lock
        log.info(request.path)

        reqJSON = json.loads(request.content.getvalue())
        if request.path == "/handshake":  # New agent initializes secret key
            # Retrieve public key
            agent_pub = long(reqJSON['key'])

            # Parse agent address
            agent_addr = request.getClientIP() + ":" + str(reqJSON['port'])

            # Find the agent
            ret = ""
            lock.acquire()
            try:
                for x in range(0, len(agents)):
                    if agents[x].getName() == reqJSON['name']:  # If found
                        if not agents[x]:  # If was dead
                            log.info("Recognized new agent: " +
                                     agents[x].getName() + " is now alive.")

                        # Turn on agent and generate secret key
                        agents[x].setStatus(True)
                        agents[x].generateKey(agent_pub)
                        log.debug(agents[x].getKey())
                        ret = agents[x].getPublicKey()
            finally:
                lock.release()

            # If agent was not recognized
            if not ret:
                temp = Agent(agent_addr, reqJSON['name'], True)
                temp.generateKey(agent_pub)
                ret = temp.getPublicKey()
                log.debug(temp.getKey())
                lock.acquire()
                try:
                    agents.append(temp)
                finally:
                    lock.release()
                log.info("Recognized new agent: " + temp.getName() +
                         " is now alive.")

                dumpContent()

            # Return master's public key
            return str(ret)
        elif request.path == "/newagent" and request.getClientIP(
        ) == "127.0.0.1":
            a = Agent(reqJSON['ip'] + ":" + reqJSON['port'], reqJSON['name'],
                      True)

            shakeHands(a)

            lock.acquire()
            try:
                agents.append(a)
            finally:
                lock.release()

            if a:
                init_one(a, log)

            dumpContent()
            log.info('Agent \'' + reqJSON['name'] +
                     '\' has been added from web manager.')

            request.setHeader("content-type", "application/json")
            return json.JSONEncoder().encode({'success': True})
        elif request.path == "/delagent" and request.getClientIP(
        ) == "127.0.0.1":
            lock.acquire()
            try:
                for x in range(0, len(agents)):
                    if agents[x].getName() == reqJSON['name']:
                        del agents[x]
                        break
            finally:
                lock.release()

            dumpContent()
            log.info('Agent \'' + reqJSON['name'] +
                     '\' has been deleted from web manager.')

            request.setHeader("content-type", "application/json")
            return json.JSONEncoder().encode({'success': True})
        elif request.path == "/newfilter" and request.getClientIP(
        ) == "127.0.0.1":
            lock.acquire()
            try:
                filters[reqJSON['name']] = Filter(reqJSON['filt'])
                for agent in agents:
                    if agent and filters[reqJSON['name']].isUsed(
                            agent.getName()):
                        init_one(agent, log)
            finally:
                lock.release()

            dumpContent()
            log.info('Filter \'' + reqJSON['name'] +
                     '\' has been added from web manager.')

            request.setHeader("content-type", "application/json")
            return json.JSONEncoder().encode({'success': True})
        elif request.path == "/delfilter" and request.getClientIP(
        ) == "127.0.0.1":
            reloads = []
            for agent in agents:
                lock.acquire()
                try:
                    if agent and filters[reqJSON['name']].isUsed(
                            agent.getName()):
                        reloads.append(agent)
                    else:
                        lock.release()
                finally:
                    lock.release()

            lock.acquire()
            try:
                del filters[reqJSON['name']]
            finally:
                lock.release()

            dumpContent()
            for agent in reloads:
                init_one(agent, log)

            log.info('Filter \'' + reqJSON['name'] +
                     '\' has been deleted from web manager.')

            request.setHeader("content-type", "application/json")
            return json.JSONEncoder().encode({'success': True})
        else:
            request.setResponseCode(501)
            return ""
Esempio n. 29
0
def main():
    filter = Filter(model_file="model.p", scaler_file="scaler.p")
    image = cv2.imread(filter.ex.car_paths[0])
    filter.extract_half_image_hog(image)
Esempio n. 30
0
 def _filter(self, loops):
     return Filter().loop_args(loops).values()
Esempio n. 31
0
 def render_POST(self, request):
     global master, key, filters
     log.debug(request.path)
     
     if request.path == "/handshake": # Generates new key. All other POST bodies are encoded with this secret key.
         key.generateKey(long(request.content.getvalue()))
         log.debug(key.getKey())
         log.debug("Handshake complete. Secret key generated.")
         return str(key.publicKey)
     
     log.debug(util.decode(request.content.getvalue(), key.getKey()))
     reqJSON = json.loads(util.decode(request.content.getvalue(), key.getKey())) # Decode POST body 
     if request.path[:5] == "/init":
         if request.path == "/init":
             log.info("Master is online. Initial retrieval of DNS settings begun.")
             
             # Remove all conf files to create fresh ones
             try:
                 os.remove(ns_file)
             except:
                 log.warning("NS file (" + ns_file + ") has already been deleted.")
             try:
                 os.remove(a_file)
             except:
                 log.warning("A file (" + a_file + ") has already been deleted.")
             try:
                 os.remove(ptr_file)
             except:
                 log.warning("PTR file (" + ptr_file + ") has already been deleted.")
             
             # Create new conf files
             ns_f = open(ns_file, 'a')
             a_f = open(a_file, 'a')
             ptr_f = open(ptr_file, 'a')
             
             # Write headers to each new conf file
             ns_f.write("######### NS Records #########\n\n")
             a_f.write("######### A Records #########\n\n")
             ptr_f.write("######### PTR Records #########\n\n")
     
             ns_f.close()
             a_f.close()
             ptr_f.close()
             
             filters = {}
             return json.dumps({'success': True})
         # Endpoints to initialize each record type
         if request.path[5:] == '/ns':
             communicator.receiveFromFile(ns_file, reqJSON, filters)
             return json.dumps({'success': True})
         elif request.path[5:] == '/a':
             communicator.receiveFromFile(a_file, reqJSON, filters)
             return json.dumps({'success': True})
         elif request.path[5:] == '/ptr':
             communicator.receiveFromFile(ptr_file, reqJSON, filters)
             return json.dumps({'success': True})
     elif request.path[:4] == '/ns/':
         if request.path[4:] == 'delete':
             ns_records = communicator.fromJSON(reqJSON, log)['ns_records']
             
             communicator.logSet(ns_records, log, "- ")
             communicator.deleteFromFile(ns_records, ns_file, log)
             request.responseHeaders.addRawHeader(b"content-type", b"application/json")
             return json.dumps({'success': True})
         elif request.path[4:] == "add":
             ns_records = communicator.fromJSON(reqJSON, log)['ns_records']
             
             communicator.logSet(ns_records, log, "+ ")
             communicator.addToFile(ns_records, ns_file, filters, log)
             request.responseHeaders.addRawHeader(b"content-type", b"application/json")
             return json.dumps({'success': True})
         else:
             request.setResponseCode(501)
             return ""
     elif request.path[:3] == '/a/':
         if request.path[3:] == 'delete':
             a_records = communicator.fromJSON(reqJSON, log)['a_records']
             
             communicator.logSet(a_records, log, "- ")
             communicator.deleteFromFile(a_records, a_file, log)
             request.responseHeaders.addRawHeader(b"content-type", b"application/json")
             return json.dumps({'success': True})
         elif request.path[3:] == "add":
             a_records = communicator.fromJSON(reqJSON, log)['a_records']
             
             communicator.logSet(a_records, log, "+ ")
             communicator.addToFile(a_records, a_file, filters, log)
             request.responseHeaders.addRawHeader(b"content-type", b"application/json")
             return json.dumps({'success': True})
         else:
             request.setResponseCode(501)
             return ""
     elif request.path[:5] == '/ptr/':
         if request.path[5:] == 'delete':
             ptr_records = communicator.fromJSON(reqJSON, log)['ptr_records']
             
             communicator.logSet(ptr_records, log, "- ")
             communicator.deleteFromFile(ptr_records, ptr_file, log)
             request.responseHeaders.addRawHeader(b"content-type", b"application/json")
             return json.dumps({'success': True})
         elif request.path[5:] == "add":
             ptr_records = communicator.fromJSON(reqJSON, log)['ptr_records']
             
             communicator.logSet(ptr_records, log, "+ ")
             communicator.addToFile(ptr_records, ptr_file, filters, log)
             request.responseHeaders.addRawHeader(b"content-type", b"application/json")
             return json.dumps({'success': True})
         else:
             request.setResponseCode(501)
             return ""
     elif request.path == '/filters':
         log.debug("Filters:")
         filters[reqJSON['code']] = Filter(reqJSON['filter'])
         log.info("Added filter '" + reqJSON['code'] + "'")
         log.debug(json.dumps(reqJSON['filter']))
         request.responseHeaders.addRawHeader(b"content-type", b"application/json")
         return json.dumps({'success': True})
     elif request.path == '/reload':
         log.info("DNS settings up to date. Reloading.")
         util.execute("systemctl restart dnsmasq")
         log.debug("DNSMasq reloaded.")
         return json.dumps({'success': True})
     else:
         request.setResponseCode(501)
         return ""
Esempio n. 32
0
    contents = os.listdir(directory)
    files = []

    for c in contents:
        if os.path.isfile(os.path.join(directory, c)) and os.path.splitext(
                c)[-1] in correct_extensions and os.path.splitext(
                    c)[0] not in banned:
            files.append(os.path.join(directory, c))
        elif os.path.isdir(os.path.join(directory,
                                        c)) and c + "/" not in banned:
            files += get_files_recursive(os.path.join(directory, c),
                                         file_filter)
    return files


file_filter = Filter()
"""
Enter directory for searching
"""
while True:
    search_dir = input("Directory: ")
    if os.path.exists(search_dir):
        break
    print("Directory doesn't exist!")
print("\n")
"""
Enter extensions to add
"""
while True:
    ext = input("Enter extension (end): ")
    if ext == "end":
Esempio n. 33
0
from tensorflow.keras.models import model_from_json
import numpy as np
from datagenerator import FacialKeyPointsDataset
from filters import Filter

TYPE_OF_DATA_AND_MODEL = 'vector'

flags = {
    "detect_faces": False,
    "draw_keypts": False,
    "filter": None,
    "run": True
}

filters = [
    Filter("images/filter1.png", 17, (25, 29), (2, 16), offset=(-10, 0)),
]

face_cascade = cv2.CascadeClassifier(
    'detector_architectures/haarcascade_frontalface_default.xml')

datasetgen = FacialKeyPointsDataset(
    csv_file='data/training_frames_keypoints.csv',
    root_dir='data/training/',
    normalization=TYPE_OF_DATA_AND_MODEL)

# load json and create model
json_file = open(
    'models/model_{}_batchnorm_194.json'.format(TYPE_OF_DATA_AND_MODEL), 'r')
loaded_model_json = json_file.read()
json_file.close()
def find_text_lines(original,
                    thresh=0.02,
                    min_size=4,
                    max_size=12,
                    spacing=1.8,
                    min_length=1,
                    otsu=False,
                    x_only=False):
    def _threshold_normal(_image, _thresh):
        return np.uint8(
            cv2.threshold(_image,
                          _thresh * utils.dtype_limits(_image)[1], 255,
                          cv2.THRESH_BINARY)[1])

    def _threshold_otsu(_image, *args):  # Careful with that axe, Eugene
        return cv2.threshold(
            np.uint8(_image / utils.dtype_limits(_image)[1] * 255.), 0, 255,
            cv2.THRESH_BINARY + cv2.THRESH_OTSU)[1]

    if otsu:
        get_thresh = _threshold_otsu
    else:
        get_thresh = _threshold_normal

    def _find_text_preprocess(_image):
        # _image = cv2.bilateralFilter(_image, -1, 64, 3)
        # if gauss:
        #     _image = cv2.medianBlur(_image, 3)
        # cv2.imshow('prep', _image)
        if len(_image.shape) > 2:
            _image = cv2.cvtColor(_image, cv2.COLOR_BGR2GRAY)
        # image = cv2.equalizeHist(image)
        return _image

    def _get_diff(_image):
        # _image = cv2.medianBlur(_image, 5)
        # _image = cv2.GaussianBlur(_image, (3, 3), 5)
        _image = utils.differentiate(_image, metric=utils.METRIC_MAX)
        return utils.differentiate(_image, metric=utils.METRIC_MIN)

    if spacing != 0:
        ksize = int(min_size * spacing)
    else:
        ksize = 1

    filters = Filter()
    # filters.add_filter("outer")
    filters.add_filter("min_area", min_size * min_size * min_length * 12)
    # filters.add_filter("min_fill", 0.3)
    filters.add_filter("min_aspect_ratio", 0.1)
    # filters.add_filter("smoothness", 5)

    # if diffs is None:
    #     diffs = utils.differentiate(np.float32(_find_text_preprocess(original)) / 255.,
    #                                 xkernel=5, ykernel=5, metric=utils.METRIC_SPLIT)
    # if x_only:
    #     diff = utils.differentiate(np.float32(_find_text_preprocess(original)) / 255.,
    #  xkernel=5, ykernel=5, metric=utils.METRIC_SPLIT)[0]
    # else:
    #     diff = utils.differentiate(np.float32(_find_text_preprocess(original)) / 255.,
    # xkernel=5, ykernel=5)
    # diff = utils.differentiate(np.float32(_find_text_preprocess(original) / 255.),
    # metric=utils.METRIC_MIN)

    diff = _get_diff(_find_text_preprocess(original))
    ndiff = diff / diff.max()
    binary = get_thresh(ndiff, thresh)
    # cv2.imshow('diff', binary * 255)
    # Connecting
    connected = cv2.morphologyEx(binary,
                                 cv2.MORPH_CLOSE,
                                 cv2.getStructuringElement(
                                     cv2.MORPH_RECT,
                                     utils.fix_kernel_size(ksize, ksize)),
                                 borderType=cv2.BORDER_CONSTANT,
                                 borderValue=0)
    # cv2.imshow('connected', connected)
    # Filtering
    binary = cv2.morphologyEx(
        connected, cv2.MORPH_OPEN,
        cv2.getStructuringElement(
            cv2.MORPH_RECT,
            utils.fix_kernel_size(int(min_size) - 1,
                                  int(min_size) - 1)))
    # cv2.imshow('bin', binary)
    return filters.filter([
        Contour(c, thresh) for c in cv2.findContours(
            binary, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[0]
    ])
Esempio n. 35
0
    logToConsole = cfg.getboolean(section, 'logToConsole')
    cfgLogLevel = cfg.get(section, 'cfgLogLevel')
    log_max_size = cfg.getint(section, 'log_max_size')
    log_max_backup = cfg.getint(section, 'log_max_backup')

    section = 'Settings'
    serverIP = cfg.get(section, 'serverIP')
    port = cfg.getint(section, 'port')
    shakeInterval = cfg.getint(section, 'handshake')
    checkInterval = cfg.getint(section, 'statusCheck')
    pollInterval = cfg.getint(section, 'poll')
    filters = {}
    json_fl = join(installPath, cfg.get(section, 'content'))
    json_from_fl = json.load(open(json_fl))
    for filt in json_from_fl['filters']:
        filters[filt] = Filter(json_from_fl['filters'][filt])
    agents = []
    for agent in json_from_fl['agents']:
        agents.append(Agent(agent['addr'], agent['name']))

    section = 'Email Settings'
    email_sender = cfg.get(section, 'from')
    receivers = cfg.get(section, 'receivers').split(',')

    section = 'Database'
    host = cfg.get(section, 'host')
    db_port = cfg.getint(section, 'port')
    user = cfg.get(section, 'user')
    password = cfg.get(section, 'password')
    db = cfg.get(section, 'db')
Esempio n. 36
0
    tr_label_ = rg.transform(tr_label_.values, False)
    #print rg.n_grid

    print 'train...'
    est = RandomForestClassifier(
        n_jobs=-1,
        n_estimators = 100,
        max_features='sqrt',
        bootstrap=True,
        criterion='gini'
    ).fit(tr_feature_.values, tr_label_)


    print 'predict...'
    te_pred = est.predict_proba(te_feature_.values)
    fl = Filter()    
    top_n = 1
    te_pred_ = te_pred.copy()

    pred_idx = np.argsort(-te_pred, axis=1)
    for i, idx in enumerate(pred_idx[:, top_n:]):
        te_pred_[i, idx] = 0.

    z = np.sum(te_pred_, axis=1)
    z.shape = len(z),1
    te_pred_ = te_pred_ / z

    te_pred_ = te_pred_.dot(rg.grid_center)
    
    te_pred_ = fl.mean_filter(te_pred_, 11, 10)
    error += [disney.distance(pt1, pt2) for pt1, pt2 in zip(te_pred_, te_label_.values)]
Esempio n. 37
0
 def filter_videos(self, filters):
     self.filtered_playlists = Filter.filter_all(filters, self.playlists)