def main():
    s = initSession()
    r = s.get('http://game.granbluefantasy.jp/{}/bookmaker/content/top'.format(teamraid))
    # print(s.cookies))
    # for key, value in s.cookies.items():
    # print(key, value)
    # with open(str(cron_dir / 'cookie.json'), 'w+', encoding='utf8') as f:
    # json.dump({o.name: o.value for o in s.cookies}, f)
    try:
        res = r.json()

        soup = BeautifulSoup(urllib.parse.unquote(res['data']), 'html.parser')
        data = {
            'north': int(soup.find('div', class_='lis-area area1').div.decode_contents().replace(',', '')),
            'west': int(soup.find('div', class_='lis-area area2').div.decode_contents().replace(',', '')),
            'east': int(soup.find('div', class_='lis-area area3').div.decode_contents().replace(',', '')),
            'south': int(soup.find('div', class_='lis-area area4').div.decode_contents().replace(',', '')),
            'time': int(time.time())
        }

        print('成功抓取到数据', data)
        result = bookmaker.insert_one(data)
        print('成功保存数据')
    except pymongo.errors.ConnectionFailure:
        print('成功获取到数据, 但是无法保存')
    except:
        print(r.text)
        print('未能成功保存数据, 请检查输出')
Beispiel #2
0
def main():
    s = initSession()
    urls = ['http://game.granbluefantasy.jp/{teamraid}/ranking_guild/detail/{index}/0'.format(teamraid=teamraid, index=index) for index in range(1, 2017)]
    rs = (grequests.get(u, session=s, stream=False) for u in urls)
    results = grequests.map(rs, size=40)
    now = int(time.time())
    c = db.get_collection(teamraid)
    for r in results:
        try:
            res = r.json()
            for item in res['list']:
                item['time'] = now
                item['name'] = html.parser.unescape(item['name'])
            c.insert_many(res['list'])
        except Exception as e:
            raise e
            print(r.text)
Beispiel #3
0
def main():
    s = initSession()
    r = s.get('http://game.granbluefantasy.jp/{}/bookmaker/content/top'.format(
        teamraid))
    with open(str(cron_dir / 'cookie.json'), 'w+', encoding='utf8') as f:
        json.dump({o.name: o.value for o in s.cookies}, f)
    try:
        res = r.json()

        soup = BeautifulSoup(urllib.parse.unquote(res['data']), 'html.parser')
        data = {
            'north':
            int(
                soup.find(
                    'div',
                    class_='lis-area area1').div.decode_contents().replace(
                        ',', '')),
            'west':
            int(
                soup.find(
                    'div',
                    class_='lis-area area2').div.decode_contents().replace(
                        ',', '')),
            'east':
            int(
                soup.find(
                    'div',
                    class_='lis-area area3').div.decode_contents().replace(
                        ',', '')),
            'south':
            int(
                soup.find(
                    'div',
                    class_='lis-area area4').div.decode_contents().replace(
                        ',', '')),
            'time':
            int(time.time())
        }

        result = bookmaker.insert_one(data)
        print(data)
        print(result)
    except:
        print(r.text)
Beispiel #4
0
def main():
    s = initSession()

    urls = [
        'http://game.granbluefantasy.jp/{teamraid}/rest_ranking_user/detail/{index}/0'
        .format(teamraid=teamraid, index=index) for index in range(1, 12001)
    ]
    rs = (grequests.get(u, session=s, stream=False) for u in urls)
    results = grequests.map(rs, size=20)
    now = int(time.time())
    for r in results:
        try:
            res = r.json()
            c = db.get_collection('individual')
            for item in res['list'].values():
                item['time'] = now
            c.insert_many(res['list'].values())
        except:
            print(r.text)
Beispiel #5
0
def main():
    s = initSession()

    urls = [
        'http://game.granbluefantasy.jp/{teamraid}/rest_ranking_user/detail/{index}/0'
        .format(teamraid=teamraid, index=index) for index in range(1, 12001)
    ]
    rs = (grequests.get(
        u,
        session=s,
        stream=False,
    ) for u in urls)
    results = grequests.map(rs, size=5)
    now = int(time.time())
    c = db.get_collection('{}_individual'.format(teamraid))
    for r in results:
        try:
            res = r.json()
            for item in res['list'].values():
                item['time'] = now
                item['point'] = int(item['point'])
                c.update_one({'_id': int(item['user_id'])}, {
                    '$set': {
                        'history.' + str(now): {
                            'point': item['point'],
                            'rank': item['rank']
                        }
                    }
                },
                             upsert=True)
                if int(item['rank']) in [1000, 30000, 50000, 70000, 120000]:
                    c.update_one({'_id': 'rank_{}'.format(item['rank'])}, {
                        '$set': {
                            'history.' + str(now): {
                                'point': item['point'],
                                'rank': item['rank']
                            }
                        }
                    },
                                 upsert=True)
        except:
            print(r.text)
Beispiel #6
0
def main():
    s = initSession()
    r = s.get('http://game.granbluefantasy.jp/quest/content/scene')
    print(r.text)