Exemple #1
0
 def test_get_albums_success(self):
     test_album = Album(title=self.test_album['title'],
                        band_id=self.test_album['band_id'])
     test_album.insert()
     res = self.client().get(
         '/albums',
         headers={"Authorization": "Bearer {}".format(self.manager)})
     self.assertEqual(res.status_code, 200)
     test_album.delete()
Exemple #2
0
    def test_delete_album_success(self):
        new_album = Album(title=self.test_album['title'],
                          band_id=self.test_album['band_id'])
        new_album.insert()

        res = self.client().delete(
            '/albums/{}'.format(new_album.id),
            headers={"Authorization": "Bearer {}".format(self.manager)})
        data = json.loads(res.data)
        self.assertEqual(res.status_code, 200)
        self.assertTrue(data['success'])
    def post_new_album(payload):
        # takes a JSON object with new album to add to database
        body = request.get_json()

        title = body.get('title', None)
        band_id = body.get('band_id', None)

        if title is None:
            abort(400)
        if band_id is None:
            abort(400)

        album = Album(title=title, band_id=band_id)
        album.insert()
        new_album = Album.query.get(album.id).format()

        return jsonify({
            'success': True,
            'album': new_album
        })
Exemple #4
0
    def post_album(jwt):
        # Process request data
        data = request.get_json()
        title = data.get('title', None)
        year = data.get('year', None)
        artist = data.get('artist', None)

        # return 400 for empty title or year or artist
        if title is None or year is None or artist is None:
            abort(400)

        try:
            album = Album(title=title, year=year, artist=artist)
            album.insert()
            return jsonify({
                'success': True,
                'album': album.format()
            }), 201
        except Exception:
            abort(500)
Exemple #5
0
def get_album_summary(album_id, uid=crawler.uid):
    resp = crawler.get_url(
        config.ALBUM_SUMMARY_URL.format(uid=uid, album_id=album_id))
    first_photo_id = re.findall(r'"photoId":"(\d+)",', resp.text)[0]

    layer = crawler.get_json(
        config.PHOTO_INFO_URL.format(uid=uid, photo_id=first_photo_id))

    cover = layer['album']['fullLargeUrl']
    if not cover or cover == "http://img.xiaonei.com/photos/0/0/large.jpg":
        cover = layer['list'][0]['large']

    album = {
        'id': album_id,
        'uid': uid,
        'name': layer['album']['name'],
        'desc': layer['album']['description'],
        'cover': get_image(cover),
        'count': layer['album']['photoCount'],
        'comment': layer['album']['commentcount'],
        'share': layer['album']['shareCount'],
        'like': get_likes(album_id, 'album')
    }
    Album.insert(**album).on_conflict('replace').execute()
    if album['comment']:
        get_comments(album_id, 'album', owner=uid)
    if album['comment'] or album['share']:
        get_comments(album_id, 'album', global_comment=True, owner=uid)

    print(
        u'    fetch album {album_id} {name} ({desc}), {comment}/{share}/{like}'
        .format(album_id=album_id,
                name=album['name'],
                desc=album['desc'],
                comment=album['comment'],
                share=album['share'],
                like=album['like']))

    photo_list = layer['list']
    photo_count = len(photo_list)
    for idx, p in enumerate(photo_list):
        id = int(p['id'])
        date_str = p['date'] if config.py3 else p['date'].encode('utf8')
        photo = {
            'id': id,
            'uid': uid,
            'album_id': album_id,
            'pos': idx,
            'prev': int(photo_list[idx - 1]['id']),
            'next': int(photo_list[idx - photo_count + 1]['id']),
            't': datetime.strptime(date_str, '%Y年%m月%d日'),
            'title': p['title'],
            'src': get_image(p['large']),
            'comment': p['commentCount'],
            'share': p['shareCount'],
            'like': get_likes(id, 'photo'),
            'view': p['viewCount']
        }
        Photo.insert(**photo).on_conflict('replace').execute()
        if photo['comment']:
            get_comments(id, 'photo', owner=uid)
        if photo['comment'] or photo['share']:
            get_comments(id, 'photo', global_comment=True, owner=uid)

        print(u'      photo {id}: {title}, {comment}/{share}/{like}/{view}'.
              format(id=id,
                     title=p['title'][:24],
                     comment=photo['comment'],
                     share=photo['share'],
                     like=photo['like'],
                     view=photo['view']))

    return album['count']
Exemple #6
0
def get_album_summary(album_id, uid=crawler.uid):
    album_data = crawler.get_json(config.ALBUM_SUMMARY_URL,
                                  json_=get_album_payload(uid, album_id),
                                  method="POST")
    photo_list = album_data["data"]

    album = {
        "id": album_id,
        "uid": uid,
        "name": album_data["album"]["name"],
        "desc": "",  # album['album']['description'],
        "cover": get_image(album_data["album"]["thumb_url"]),
        "count": album_data["album"]["size"],
        "comment": 0,  # layer['album']['commentcount'],
        "share": 0,  # layer['album']['shareCount'],
        "like": 0,  # get_likes(album_id, 'album')
    }
    Album.insert(**album).on_conflict("replace").execute()

    try:
        logger.info(
            "    fetch album {album_id} {name} ({desc}), 评{comment}/分{share}/赞{like}"
            .format(
                album_id=album_id,
                name=album["name"],
                desc=album["desc"],
                comment=album["comment"],
                share=album["share"],
                like=album["like"],
            ))
    except UnicodeEncodeError:
        logger.info(
            "    fetch album {album_id}, comment{comment}/share{share}/like{like}"
            .format(
                album_id=album_id,
                comment=album["comment"],
                share=album["share"],
                like=album["like"],
            ))

    while True:
        album_data = crawler.get_json(
            config.ALBUM_SUMMARY_URL,
            json_=get_album_payload(uid, album_id,
                                    after=album_data["tail_id"]),
            method="POST",
        )
        if "count" not in album_data:
            break
        photo_list.extend(album_data["data"])

    # There are invalid urls that missing domain names.
    def maybe_fix_url(url):
        if url.startswith("//"):
            return "http://fmn.rrfmn.com/" + url
        return url

    photo_count = len(photo_list)
    for idx, p in enumerate(photo_list):
        pid = int(p["id"])
        photo = {
            "id": pid,
            "uid": uid,
            "album_id": album_id,
            "pos": idx,
            "prev": int(photo_list[idx - 1]["id"]),
            "next": int(photo_list[idx - photo_count + 1]["id"]),
            "t": datetime.fromtimestamp(p["create_time"] // 1000),
            "title": "",  # p['title'],
            "src": get_image(maybe_fix_url(p["large_url"])),
            "comment": 0,  # p['commentCount'],
            "share": 0,  # p['shareCount'],
            "like": 0,  # get_likes(pid, 'photo'),
            "view": 0,  # p['viewCount']
        }
        Photo.insert(**photo).on_conflict("replace").execute()

        try:
            logger.info(
                "      photo {pid}: {title}, 评{comment}/分{share}/赞{like}/看{view}"
                .format(
                    pid=pid,
                    title=photo["title"][:24],
                    comment=photo["comment"],
                    share=photo["share"],
                    like=photo["like"],
                    view=photo["view"],
                ))
        except UnicodeEncodeError:
            logger.info(
                "      photo {pid}, comment{comment}/share{share}/like{like}/view{view}"
                .format(
                    pid=pid,
                    comment=photo["comment"],
                    share=photo["share"],
                    like=photo["like"],
                    view=photo["view"],
                ))

    return photo_count