Exemplo n.º 1
0
def get_movie_recomendation():

    global response
    data = request.get_json(silent=True, force=True)
    try:
        if data['queryResult']['action'] == str('id_usuario'):
            ## GET USER
            user = int(data['queryResult']['parameters']['id_usuario'])
            ## GET RECOMMENDATION
            get_movie_user = Filtering()
            response = get_movie_user.get_rec_movie(user)
            response = response.replace("{'", "")
            response = response.replace("'}", "")
            response = response.replace("', '", "  |  ")

        elif data['queryResult']['action'] == str('movies'):
            ## GET MOVIE
            movie = data['queryResult']['parameters']['movie']
            ## GET RECOMENDATION
            get_movie = Recom()
            response = get_movie.obten_recomendacion(movie)
            response = response.replace("['", "")
            response = response.replace("']", "")
            response = response.replace("', '", "  |  ")

    except:
        response = "En este momento no puedo entregarte una recomendación, intenta nuevamente"

    reply = {"fulfillmentText": response}
    return jsonify(reply)
Exemplo n.º 2
0
    def compare(self):
        """ Test a suspicious document for near-duplicate plagiarism with regards to
        a source document and return a feature list.
        """
        ps = []
        detections = []
        susp_sent = []

        for i in range(len(self.susp_bow)):
            for j in range(len(self.src_bow)):
                alza_sim = Seeding.alzahrani_similarity(self.susp_bow[i], self.src_bow[j], self.model)
                if alza_sim > self.th1:
                    # print "***"
                    # print alza_sim
                    # print self.susp_bow[i]
                    # print self.src_bow[j]
                    ps.append((i, j))

        # extend faza
        (plags, psr) = Extension.integrate_cases(ps, self.src_gap, self.susp_gap, self.src_size, self.susp_size)
        (plags2, psr2) = Extension.integrate_cases(ps, self.src_gap + 20, self.susp_gap + 20, self.src_size, self.susp_size)


        plags = Extension.similarity3(plags, psr, self.src_bow, self.susp_bow, self.src_gap, self.src_gap_least, self.susp_gap,
                            self.susp_gap_least, self.src_size, self.susp_size, self.th3, self.model)
        plags2 = Extension.similarity3(plags2, psr2, self.src_bow, self.susp_bow, self.src_gap + 20, self.src_gap_least,
                             self.susp_gap + 20, self.susp_gap_least, self.src_size, self.susp_size, self.th3, self.model)

        plags = Filtering.remove_overlap3(self.src_bow, self.susp_bow, plags, self.model)
        plags2 = Filtering.remove_overlap3(self.src_bow, self.susp_bow, plags2, self.model)

        sum_src = 0
        sum_susp = 0
        for plag in plags2:
            arg1 = (self.src_offsets[plag[0][0]][0], self.src_offsets[plag[0][1]][0] + self.src_offsets[plag[0][1]][1])
            arg2 = (
            self.susp_offsets[plag[1][0]][0], self.susp_offsets[plag[1][1]][0] + self.susp_offsets[plag[1][1]][1])
            sum_src = sum_src + (arg1[1] - arg1[0])
            sum_susp = sum_susp + (arg2[1] - arg2[0])

        if sum_src >= 3 * sum_susp:
            for plag in plags2:
                arg1 = (
                self.src_offsets[plag[0][0]][0], self.src_offsets[plag[0][1]][0] + self.src_offsets[plag[0][1]][1])
                arg2 = (
                self.susp_offsets[plag[1][0]][0], self.susp_offsets[plag[1][1]][0] + self.susp_offsets[plag[1][1]][1])
                if arg1[1] - arg1[0] >= self.min_plaglen and arg2[1] - arg2[0] >= self.min_plaglen:
                    detections.append([arg1, arg2])
        else:
            for plag in plags:
                arg1 = (
                self.src_offsets[plag[0][0]][0], self.src_offsets[plag[0][1]][0] + self.src_offsets[plag[0][1]][1])
                arg2 = (
                self.susp_offsets[plag[1][0]][0], self.susp_offsets[plag[1][1]][0] + self.susp_offsets[plag[1][1]][1])
                if arg1[1] - arg1[0] >= self.min_plaglen and arg2[1] - arg2[0] >= self.min_plaglen:
                    detections.append([arg1, arg2])
        return detections
Exemplo n.º 3
0
def _fit(raw_data, debug_mode=False):
    """Fits a single dictionary of raw data.

    This function is meant to be passed as an argument
    to multiprocessing map.

    :param raw_data: a dictionary of raw data parameters
    :returns: a dictionary of fitted parameters

    """

    lags = create_lag_list(raw_data)

    # We check number of averages < 0 since this will cause invalid
    # division in the noise calculation
    if raw_data['nave'] <= 0:
        noise_pwr = 1.0
    else:
        raw_data['nave']
        noise_pwr = NoisePower.acf_cutoff_pwr(raw_data)

    range_list = []
    #num_ranges_with_data = len(raw_data['slist'])
    for idx, range_number in enumerate(raw_data['slist']):
        if raw_data['pwr0'][range_number] != 0:
            new_range = Range(idx, range_number, raw_data, lags)
            range_list.append(new_range)

    Filtering.filter_tx_overlapped_lags(raw_data, lags, range_list)
    Filtering.filter_inf_lags(range_list)
    Filtering.filter_low_pwr_lags(raw_data, range_list)
    Filtering.filter_bad_acfs(raw_data, range_list, noise_pwr)

    ACFFitting.acf_pwr_fitting(range_list)

    ACFFitting.calculate_phase_and_elev_sigmas(range_list, raw_data)

    ACFFitting.acf_phase_unwrap(range_list, raw_data)
    ACFFitting.acf_phase_fitting(range_list)

    Filtering.filter_bad_fits(range_list)

    ACFFitting.xcf_phase_unwrap(range_list)
    ACFFitting.xcf_phase_fitting(range_list)

    #if debug_mode:
    #debug_output(range_list)

    determined_parameters = Determinations(raw_data, range_list, noise_pwr)
    return determined_parameters.paramater_dict
    def __init__(self, channel_len, sampling_freq, hp_thresh, lp_thresh,
                 notch_thresh):
        Saving.__init__(self)
        Filtering.__init__(self, sampling_freq, hp_thresh, lp_thresh,
                           notch_thresh)
        self.data_orig = []
        self.data_processed = []
        self.buffer_process = []
        self.loc_start = []
        self.loc_start_orig = []

        self.__flag_start_bit = 165
        self.__flag_end_bit = 90
        self.__flag_sync_pulse = [0, 255]
        self.__sample_len = 25
        self.__channel_len = channel_len
        self.__sync_pulse_len = 1
        self.__counter_len = 1
        self.__ring_column_len = self.__channel_len + self.__sync_pulse_len + self.__counter_len

        self.filter_obj = [
            Filtering(sampling_freq, hp_thresh, lp_thresh, notch_thresh)
            for __ in range(self.__channel_len)
        ]
Exemplo n.º 5
0
    def OnSearch(self, event):
        matchlist = cf.top_match(cf, loadDict() , '사용자')
        ch = 0
        gap = -20

        fr = wx.Frame(parent=None, title="음식 추천 순위", size=(320, 200))
        fr.pnl = wx.Panel(fr)

        for m in matchlist:
            gap += 20
            ch += 1
            hl.HyperLinkCtrl(fr.pnl, -1, m[1], pos=(30, 20 + gap), URL=findURL(m[1]))

        fr.Centre()
        fr.Show(True)
Exemplo n.º 6
0
def run_provider(provider, payload, method):
    """ Provider thread entrypoint

    Args:
        provider (str): Provider ID
        payload (dict): Search payload from Elementum
        method   (str): Type of search, can be ``general``, ``movie``, ``show``, ``season`` or ``anime``
    """
    log.debug("[%s] Processing %s with %s method" %
              (provider, provider, method))

    filterInstance = Filtering()

    if method == 'movie':
        filterInstance.use_movie(provider, payload)
    elif method == 'season':
        filterInstance.use_season(provider, payload)
    elif method == 'episode':
        filterInstance.use_episode(provider, payload)
    elif method == 'anime':
        filterInstance.use_anime(provider, payload)
    else:
        filterInstance.use_general(provider, payload)

    if 'is_api' in definitions[provider]:
        results = process(provider=provider,
                          generator=extract_from_api,
                          filtering=filterInstance,
                          has_special=payload['has_special'],
                          skip_auth=payload['skip_auth'])
    else:
        results = process(provider=provider,
                          generator=extract_torrents,
                          filtering=filterInstance,
                          has_special=payload['has_special'],
                          skip_auth=payload['skip_auth'])

    got_results(provider, results)
Exemplo n.º 7
0
timing = time.monotonic()
def t() -> str: return "%ds" % int(time.monotonic() - timing)

if len(sys.argv) != 6:
    sys.stderr.write('Usage: %s <start_ts> <days> <router> <directory> <dbfile>\n' % sys.argv[0])
    sys.exit(1)

if sys.argv[1].lower() == 'now':
    start_ts = dt.now()
else: start_ts = dt.strptime(sys.argv[1], "%Y-%m-%d %H:%M")
days = int(sys.argv[2])
router = sys.argv[3]
directory = sys.argv[4]
db_file = sys.argv[5]
pid = os.getpid()

print(pid, t(), router, 'load data from file %s' % repr(db_file), flush = True)
storage = Storage(db_file, accounts, directory, False)

reports = AccountsReport(lnames, accounts, storage)
print(pid, t(), router, 'calculate account usage', flush = True)
account_usage: Dict[str, Dict[p.Account, p.Usage]] = {}
for limit_name in lnames:
    account_usage[limit_name] = reports.account_usage(start_ts, router, limit_name)

print(pid, t(), router, 'configure firewall', flush = True)
filtering = Filtering(lnames, accounts, account_usage)
filtering.filter(directory, storage.rest_adds)

print(pid, t(), router, 'firewall configured.')
Exemplo n.º 8
0
def run_provider(provider, payload, method):
    """ Provider thread entrypoint

    Args:
        provider (str): Provider ID
        payload (dict): Search payload from Quasar
        method   (str): Type of search, can be ``general``, ``movie``, ``show``, ``season`` or ``anime``
    """
    log.debug("Processing %s with %s method" % (provider, method))

    filterInstance = Filtering()

    if method == 'movie':
        filterInstance.use_movie(provider, payload)
    elif method == 'season':
        filterInstance.use_season(provider, payload)
    elif method == 'episode':
        filterInstance.use_episode(provider, payload)
    elif method == 'anime':
        filterInstance.use_anime(provider, payload)
    else:
        filterInstance.use_general(provider, payload)

    if 'is_api' in definitions[provider]:
        results = process(provider=provider, generator=extract_from_api, filtering=filterInstance)
    else:
        results = process(provider=provider, generator=extract_torrents, filtering=filterInstance)

    got_results(provider, results)
def calculate_info(initial_string, icsd, dim, delta):
    print(icsd, dim)
    initial_string = initial_string.rstrip()
    if dim == 0:
        for i in range(11):
            initial_string += ";-1"
    else:
        delta = up_delta
        g = open(
            "cif_files/data_" + icsd + "-ICSD.cif", "r"
        )  #assuming all cif files are present in the cif_files folder with that particular format
        cif_file = g.read()
        g.close()
        cif_file = re.sub(r"[^\x00-\x7F]", "",
                          cif_file)  #remove non-ascii characters
        cif_file = re.sub(
            r"(\(\d+)(\s|\n)", r"\1\)\2",
            cif_file)  #close any open brackets (throws an error otherwise)
        structure = CifParser.from_string(
            cif_file,
            occupancy_tolerance=100).get_structures(primitive=False)[0]
        filtering = Filtering(structure)
        filtering.form_neighbor_list()
        G = filtering.define_bonds(delta)
        delta_check = filtering.check_for_delta(G, dim)
        layers_angles_stuff = filtering.calculate_layers_angles_and_stuff(
            G, dim)

        while not delta_check:
            delta = delta - 0.1
            if (delta >= low_delta):
                G = filtering.define_bonds(delta)
                delta_check = filtering.check_for_delta(G, dim)
            else:
                break

        if delta < low_delta:
            layers_angles_stuff = filtering.calculate_layers_angles_and_stuff(
                filtering.define_bonds(up_delta), dim)
            initial_string += ";" + str(up_delta)
        else:
            layers_angles_stuff = filtering.calculate_layers_angles_and_stuff(
                G, dim)
            initial_string += ";" + str(delta)

        for stuff in layers_angles_stuff[:-2]:
            initial_string += ";" + str(stuff)

        # bonded lattice
        bonded_lattice = {}
        for key, value in layers_angles_stuff[-2].items():
            points_distorted = value
            coord_no = str(layers_angles_stuff[3][key])
            if coord_no in points_perfect:
                bonded_lattice[key] = {}
                for i in range(len(points_perfect[coord_no])):
                    bonded_lattice[key][lattices[coord_no][i]] = str(
                        np.round(symmetry_measure(
                            points_distorted,
                            points_perfect[coord_no][i])["symmetry_measure"],
                                 decimals=1))
            else:
                bonded_lattice[key] = -1

        # magnetic metal lattice
        metal_neigh_lattice = {}
        for key, value in layers_angles_stuff[-1].items():
            points_distorted = value
            coord_no = str(layers_angles_stuff[7][key])
            if coord_no in metal_points_perfect:
                metal_neigh_lattice[key] = {}
                metal_neigh_lattice[key][metal_lattices[coord_no]] = str(
                    np.round(symmetry_measure(
                        points_distorted,
                        metal_points_perfect[coord_no])["symmetry_measure"],
                             decimals=1))
            else:
                metal_neigh_lattice[key] = -1

        initial_string += ";" + str(bonded_lattice) + ";" + str(
            metal_neigh_lattice)
    f = open("results_" + str(os.getpid()) + ".txt", "a")
    print(initial_string)
    f.write(initial_string + "\n")
    f.close()
Exemplo n.º 10
0
def clear_text(text):
    filtering = Filtering()
    return filtering.get_feature_vector(filtering.process_text(text))
Exemplo n.º 11
0
 def test_stopwords(self):
     text = "again MacOS win Neco"
     filtering = Filtering()
     result = filtering.get_feature_vector(filtering.process_text(text))
     self.assertNotIn("again", result)
Exemplo n.º 12
0
 def test_process_text(self):
     filtering = Filtering()
     text = "WHAT'A    LOVELY    DAY"
     after_text = "what'a lovely day"
     result = filtering.process_text(text)
     self.assertEqual(after_text, result)