def request_freess_cx(url='https://freess.cx/', headers=None):
    print('req fscx...')
    qr = list()
    servers = list()
    try:
        response = requests.get(url, headers=headers).text
        soup = BeautifulSoup(response, 'html.parser')
        title = soup.find('title').text
        msg = soup.find('section', attrs={'id': 'banner'}).text.strip()

        info = {'message': msg, 'url': url, 'name': str(title)}
        qr = list(
            map(lambda x: url.strip('/') + '/' + x.find('a').get('href'),
                soup.find_all('div', attrs={'class': '4u 12u(mobile)'})))
        for i, img_url in enumerate(qr):
            print('req img', img_url)
            try:
                servers.append(
                    parse(scanNetQR(img_url), ' '.join([title, str(i)])))
            except Exception as e:
                logging.exception(e, stack_info=True)
                print('IMG_URL FOR freess.cx:', img_url)
    except Exception as e:
        logging.exception(e, stack_info=True)
        return [], {'message': str(e), 'url': '', 'name': ''}
    return servers, info
Beispiel #2
0
def main():
    try:
        connection = pymysql.connect(host=config.MYSQL_HOST,
                                     user=config.MYSQL_USER,
                                     password=config.MYSQL_PASSWORD,
                                     db=config.MYSQL_DB,
                                     cursorclass=pymysql.cursors.DictCursor)
    except MySQLError as e:
        print(f'MySWL error: {e}')
        exit(0)

    date = datetime.datetime.utcnow()

    for day in config.DAYS:
        print(f'process {day}')
        with open(f'out-{date.strftime("%Y-%m-%d")}.json', 'wt') as out:
            try:
                data = parse(connection, day)
                out.write(json.dumps(data, indent=4))
            except CoroparseException as e:
                print(f'Error "{type(e)}": {e}')
            except IOError as e:
                print(f'Error "{type(e)}": {e}')

        date -= datetime.timedelta(days=1)
Beispiel #3
0
def ask(threshold, matcher, facts, message):
    """
    Ask a question.

    :type threshold: float
    :type matcher: app.match.Matcher
    :type facts: list of app.types.fact.Fact
    :type message: str
    """
    user_input, valid, validation_mesage = parse(message)
    if not valid:
        return {'type': 'invalid', 'message': validation_mesage}
    matches = matcher.input2matches(threshold, user_input)
    n_matches = len(matches)
    if n_matches == 0:
        # TODO: how to give detailed information on why no match was found?
        return {'type': 'no_match'}
    else:
        if 1 < n_matches:
            return {'type': 'select_match', matches: matches}
        else:
            selected_match = matches[0]
            question = match2question(selected_match)
            answer = reason(facts, question)
            reply = answer2message(answer)
            return {'type': 'answer', 'answer': reply}
Beispiel #4
0
def prompt(prefix=None):
    """
    Prompt the user for input.

    Input is parsed.

    :type prefix: str
    :rtype: (str, bool, str)
    """
    user_input = prompt_without_parse(prefix=prefix)
    return parse(user_input)
Beispiel #5
0
    def test_valid_csv(self):
        # Valid CSV Example
        valid_csv = '1,John Smith,Pittsburgh,PA,15224,42'

        p = parse(valid_csv)

        self.assertEqual(1, p.id)
        self.assertEqual('John Smith', p.name)
        self.assertEqual('Pittsburgh', p.city)
        self.assertEqual('PA', p.state)
        self.assertEqual(15224, p.zip_code)
        self.assertEqual(42, p.age)
def request_url(url, headers=None):
    data = list()
    try:
        response = requests.get(url, headers=headers).text
        data += re.findall('ssr?://\w+', response)
        soup = BeautifulSoup(response, 'html.parser')
        title = soup.find('title').text
        data = list(set(data))

        info = {'message': '', 'url': url, 'name': str(title)}
        servers = list()
        for i, server in enumerate(data):
            try:
                servers.append(parse(server, ' '.join([title, str(i)])))
            except Exception as e:
                print(e)
    except Exception as e:
        return [], {'message': str(e), 'url': '', 'name': ''}
    return servers, info
def request_url(url, headers=None):
    data = list()
    try:
        response = requests.get(url, headers=headers).text
        data += re.findall('ssr?://\w+', response)
        soup = BeautifulSoup(response, 'html.parser')
        title = soup.find('title').text
        data = list(set(data))

        info = {'message': '', 'url': url, 'name': str(title)}
        servers = list()
        for i, server in enumerate(data):
            try:
                servers.append(parse(server, ' '.join([title, str(i)])))
            except Exception as e:
                print(e)
    except Exception as e:
        return [], {'message': str(e), 'url': '', 'name': ''}
    return servers, info
def tell(threshold, matcher, memory, message):
    """
    Tell a fact.

    :type threshold: float
    :type matcher: app.match.Matcher
    :type memory: app.memories.memory.Memory
    :type message: str
    """
    user_input, valid, validation_mesage = parse(message)
    if not valid:
        return {'type': 'invalid', 'message': validation_mesage}
    matches = matcher.input2matches(threshold, user_input)
    n_matches = len(matches)
    if n_matches == 0:
        # TODO: how to give detailed information on why no match was found?
        return {'type': 'no_match'}
    else:
        facts = list(map(match2fact, matches))
        memory.store(facts)
        return {'type': 'stored'}
def request_url(url, headers=None):
    print('req', url)

    data = set()
    servers = list()
    try:
        response = requests.get(url, headers=headers).text
        data.update(map(lambda x: re.sub('\s', '', x), re.findall('ssr?://[a-zA-Z0-9=]+', response)))
        soup = BeautifulSoup(response, 'html.parser')
        title = soup.find('title').text

        info = {'message': '', 'url': url, 'name': str(title)}
        for i, server in enumerate(data):
            try:
                servers.append(parse(server, ' '.join([title, str(i)])))
            except (IndexError, ValueError) as e:
                logging.exception(e, stack_info=False)
                print('URL:', url, 'SERVER', server)
    except Exception as e:
        logging.exception(e, stack_info=True)
        return [], {'message': str(e), 'url': '', 'name': ''}
    return servers, info
Beispiel #10
0
    def test_invalid_zip_code(self):
        csv_input = '1,John Smith,Pittsburgh,PA,15224-15232,42'

        p = parse(csv_input)
        self.assertIsNone(p, 'Zip code range should be rejected')
Beispiel #11
0
    def test_invalid_number_of_cols(self):
        csv_input = '1,extra_col,John Smith,Pittsburgh,PA,15224,42'

        p = parse(csv_input)
        self.assertIsNone(p)