def upgrade():
    with db_session() as db:
        db.query(Release).\
            filter(Release.tvshow_metablack_id == MetaBlack.id).\
            filter(MetaBlack.status=='IMPOSSIBLE').\
            update({Release.tvshow_metablack_id: None}, synchronize_session='fetch')
        db.commit()
Beispiel #2
0
def remove_group(group_name):
    with db_session() as db:
        deleted = db.query(Group).filter(Group.name == group_name).delete()
        if deleted:
            db.commit()
            return True
    return False
Beispiel #3
0
def convert_users(mysql):
    """Converts Newznab users table into Pynab format. More or less
    of this may be necessary depending on what people want. I'm pretty
    much just after bare API access, so we only really need rsstoken."""

    print('Converting users...')

    with db_session() as db:

        from_query = """
            SELECT username, email, password, rsstoken, userseed, grabs
            FROM users
            ORDER BY id;
        """

        cursor = mysql.cursor()
        cursor.execute(from_query)

        db.query(User).delete()

        for r in cursor.fetchall():
            u = User(
                email=r[1],
                api_key=r[3],
                grabs=r[5]
            )
            db.add(u)
Beispiel #4
0
def convert_imdb(mysql):
    """Converts Newznab imdb table into Pynab format."""

    print('Converting imdb...')

    with db_session() as db:
        from_query = """
            SELECT imdbID, title, year, language, genre
            FROM movieinfo
            WHERE imdbID > 0
            ORDER BY imdbID
        """

        cursor = mysql.cursor()
        cursor.execute(from_query)

        for r in cursor.fetchall():
            if not r[2]:
                # Blank years do not work, skip them
                continue
            movie = db.query(Movie).filter(Movie.id==str(r[0])).first()
            if not movie:
                movie = Movie(
                    id=r[0],
                    name=r[1],
                    year=r[2],
                    genre=r[4]
                )
                db.add(movie)
Beispiel #5
0
def convert_groups(mysql):
    """Converts Newznab groups table into Pynab. Only really
    copies backfill records and status."""
    # removed minsize/minfiles, since we're not really using them
    # most of the groups I index don't come up with too many stupid
    # releases, so if anyone has problem groups they can re-add it

    print('Converting groups...')

    with db_session() as db:
        from_query = """
            SELECT name, first_record, last_record, active
            FROM groups;
        """
        cursor = mysql.cursor()
        cursor.execute(from_query)

        for r in cursor.fetchall():
            g = db.query(Group).filter(Group.name==r[0]).first()
            if not g:
                g = Group(name=r[0])

            g.first = r[1]
            g.last = r[2]
            g.active = bool(r[3])

            db.add(g)
Beispiel #6
0
def get_nzb(dataset=None):
    user = auth()
    if user:
        id = request.query.guid or None
        if not id:
            id = request.query.id or None

        if id:
            with db_session() as db:
                release = db.query(Release).join(NZB).join(Category).filter(Release.id == id).one()
                if release:
                    release.grabs += 1
                    user.grabs += 1
                    db.merge(release)
                    db.merge(user)
                    db.commit()

                    data = release.nzb.data
                    response.set_header('Content-type', 'application/x-nzb-compressed-gzip')
                    response.set_header('X-DNZB-Name', release.search_name)
                    response.set_header('X-DNZB-Category', release.category.name)
                    response.set_header('Content-Disposition', 'attachment; filename="{0}"'
                                        .format(release.search_name.replace(' ', '_') + '.nzb.gz')
                    )
                    return gzip.decompress(data)
                else:
                    return api_error(300)
        else:
            return api_error(200)
    else:
        return api_error(100)
Beispiel #7
0
def remove_group(group_name):
    with db_session() as db:
        deleted = db.query(Group).filter(Group.name == group_name).delete()
        if deleted:
            db.commit()
            return True
    return False
Beispiel #8
0
def convert_groups(mysql):
    """Converts Newznab groups table into Pynab. Only really
    copies backfill records and status."""
    # removed minsize/minfiles, since we're not really using them
    # most of the groups I index don't come up with too many stupid
    # releases, so if anyone has problem groups they can re-add it

    print('Converting groups...')

    with db_session() as db:
        from_query = """
            SELECT name, first_record, last_record, active
            FROM groups;
        """
        cursor = mysql.cursor()
        cursor.execute(from_query)

        for r in cursor.fetchall():
            g = db.query(Group).filter(Group.name == r[0]).first()
            if not g:
                g = Group(name=r[0])

            g.first = r[1]
            g.last = r[2]
            g.active = bool(r[3])

            db.add(g)
Beispiel #9
0
def details(dataset=None):
    if auth():
        if request.query.id:
            with db_session() as db:
                release = db.query(Release).filter(
                    Release.id == request.query.id).first()
                if release:
                    dataset['releases'] = [release]
                    dataset['detail'] = True
                    dataset['api_key'] = request.query.apikey

                    try:
                        tmpl = Template(filename=os.path.join(
                            root_dir, 'templates/api/result.mako'))
                        return tmpl.render(**dataset)
                    except:
                        log.error('Failed to deliver page: {0}'.format(
                            exceptions.text_error_template().render()))
                        return None
                else:
                    return api_error(300)
        else:
            return api_error(200)
    else:
        return api_error(100)
Beispiel #10
0
def rename_pre_releases():
    count = 0

    with db_session() as db:
        query = db.query(Release).filter(Release.pre_id != None)
        query = query.outerjoin(
            Pre,
            Pre.id == Release.pre_id).filter((Release.name != Pre.name) | (
                Release.search_name != Pre.searchname))

        for release in query.all():
            old_category_id = release.category_id

            release.name = release.pre.name
            release.search_name = release.pre.searchname
            release.category_id = pynab.categories.determine_category(
                release.search_name, release.group.name)

            db.merge(release)

            count += 1
            log.info('rename: [{}] -> [{}]'.format(release.search_name,
                                                   release.pre.searchname))

    db.commit()

    log.info('rename: successfully renamed {} releases'.format(count))
def upgrade():
    with db_session() as db:
        db.query(Release).\
            filter(Release.tvshow_metablack_id == MetaBlack.id).\
            filter(MetaBlack.status=='IMPOSSIBLE').\
            update({Release.tvshow_metablack_id: None}, synchronize_session='fetch')
        db.commit()
Beispiel #12
0
    def test_load_and_categorise(self):
        from pynab.db import db_session, Release, Group, windowed_query
        from pickle import load

        with open('release_categoriser.pkl', 'rb') as cat_file:
            categoriser = load(cat_file)

        with db_session() as db:
            errors = []
            i = 0
            query = db.query(Release).join(Group)
            count = query.count()
            for result in windowed_query(query, Release.id, 500):
                features = extract_features(result.name)
                features['group'] = result.group.name
                features['name'] = result.name

                guess = categoriser.classify(features)
                if guess[:2] != str(result.category_id)[:2]:
                    errors.append((result.category_id, guess, features))

                i += 1
                if i % 500 == 0:
                    print('{} - {:.3f}%'.format((i/count)*100, (1 - (len(errors) / i)) * 100))

        for tag, guess, features in errors:
            print('correct={} guess={} name={}'.format(tag, guess, features['name'].encode('utf-8')))

        print('accuracy={}'.format(1 - (len(errors)/i)))
Beispiel #13
0
def local_postprocess():
    with db_session() as db:
        # noinspection PyComparisonWithNone,PyComparisonWithNone
        db.query(MetaBlack).filter(MetaBlack.status=='IMPOSSIBLE').filter((MetaBlack.movie!=None)|(MetaBlack.tvshow!=None)).delete(synchronize_session=False)

    pynab.ids.process('movie', online=False)
    pynab.ids.process('tv', online=False)
Beispiel #14
0
def process(limit=None, category=0):
    """Process releases for NFO parts and download them."""

    with Server() as server:
        with db_session() as db:
            # noinspection PyComparisonWithNone,PyComparisonWithNone
            query = db.query(Release).join(Group).join(NZB).filter(
                Release.nfo == None).filter(Release.nfo_metablack_id == None)
            if category:
                query = query.filter(Release.category_id == int(category))

            if limit:
                releases = query.order_by(Release.posted.desc()).limit(limit)
            else:
                releases = query.order_by(Release.posted.desc()).all()

            for release in releases:
                found = False
                nzb = pynab.nzbs.get_nzb_details(release.nzb)

                if nzb:
                    nfos = []
                    for nfo in nzb['nfos']:
                        for part in nfo['segments']:
                            if int(part['size']) > NFO_MAX_FILESIZE:
                                continue
                            nfos.append(part)

                    for nfo in nfos:
                        try:
                            article = server.get(release.group.name, [
                                nfo['message_id'],
                            ])
                        except Exception as e:
                            # if usenet's not accessible, don't block it forever
                            log.error('nfo: unable to get nfo: {}'.format(e))
                            continue

                        if article:
                            data = gzip.compress(article.encode('utf-8'))
                            nfo = NFO(data=data)
                            db.add(nfo)

                            release.nfo = nfo
                            release.nfo_metablack_id = None
                            db.add(release)

                            log.debug('nfo: [{}] - nfo added'.format(
                                release.search_name))
                            found = True
                            break

                    if not found:
                        log.debug(
                            'nfo: [{}] - [{}] - no nfos in release'.format(
                                release.id, release.search_name))
                        mb = MetaBlack(nfo=release, status='IMPOSSIBLE')
                        db.add(mb)
                db.commit()
Beispiel #15
0
def process(limit=None, category=0):
    """Process releases for NFO parts and download them."""

    with Server() as server:
        with db_session() as db:
            # noinspection PyComparisonWithNone,PyComparisonWithNone
            query = db.query(Release).join(Group).join(NZB).filter(Release.nfo == None).filter(
                Release.nfo_metablack_id == None)
            if category:
                query = query.filter(Release.category_id == int(category))

            if limit:
                releases = query.order_by(Release.posted.desc()).limit(limit)
            else:
                releases = query.order_by(Release.posted.desc()).all()

            for release in releases:
                found = False
                nzb = pynab.nzbs.get_nzb_details(release.nzb)

                if nzb:
                    nfos = []
                    for nfo in nzb['nfos']:
                        for part in nfo['segments']:
                            if int(part['size']) > NFO_MAX_FILESIZE:
                                continue
                            nfos.append(part)

                    for nfo in nfos:
                        try:
                            article = server.get(release.group.name, [nfo['message_id'], ])
                        except Exception as e:
                            # if usenet's not accessible, don't block it forever
                            log.error('nfo: unable to get nfo: {}'.format(e))
                            continue

                        if article:
                            data = gzip.compress(article.encode('utf-8'))
                            nfo = NFO(data=data)
                            db.add(nfo)

                            release.nfo = nfo
                            release.nfo_metablack_id = None
                            db.add(release)

                            log.debug('nfo: [{}] - nfo added'.format(
                                release.search_name
                            ))
                            found = True
                            break

                    if not found:
                        log.debug('nfo: [{}] - [{}] - no nfos in release'.format(
                            release.id,
                            release.search_name
                        ))
                        mb = MetaBlack(nfo=release, status='IMPOSSIBLE')
                        db.add(mb)
                db.commit()
Beispiel #16
0
def group_list():
    with db_session() as db:
        groups = db.query(Group).order_by(Group.name)
        group_list = []
        for group in groups:
            group_list.append(group)

        return group_list
Beispiel #17
0
def group_list():
    with db_session() as db:
        groups = db.query(Group).order_by(Group.name)
        group_list = []
        for group in groups:
            group_list.append(group)

        return group_list
Beispiel #18
0
    def test_search_releases(self):
        from sqlalchemy_searchable import search
        from pynab.db import Release

        with db_session() as db:
            q = db.query(Release)
            q = search(q, 'engaged e06')
            print(q.first().search_name)
Beispiel #19
0
    def test_nzb_parse(self):
        import pynab.nzbs
        from pynab.db import NZB

        with db_session() as db:
            nzb = db.query(NZB).filter(NZB.id==1).one()
            import pprint
            pprint.pprint(pynab.nzbs.get_nzb_details(nzb))
Beispiel #20
0
def info(email):
    """Information about a specific email."""
    with db_session() as db:
        user = db.query(User).filter(User.email == email).first()
        if user:
            return [user.email, user.api_key, user.grabs]
        else:
            return None
Beispiel #21
0
 def get_categories(self):
     if self.categories:
         return self.categories
     else:
         self.categories = {}
         with db_session() as db:
             for category in db.query(Category).all():
                 self.categories[int(category.id)] = category.name
         return self.categories
Beispiel #22
0
def local_postprocess():
    with db_session() as db:
        # noinspection PyComparisonWithNone,PyComparisonWithNone
        db.query(MetaBlack).filter(MetaBlack.status == 'IMPOSSIBLE').filter((
            MetaBlack.movie != None) | (MetaBlack.tvshow != None)).delete(
                synchronize_session=False)

    pynab.ids.process('movie', online=False)
    pynab.ids.process('tv', online=False)
Beispiel #23
0
def list():
    """List all users."""
    with db_session() as db:
        users = db.query(User).order_by(User.email)
        user_list = []
        for user in users:
            user_list.append([user.email, user.api_key, user.grabs])

        return user_list
Beispiel #24
0
def auth():
    api_key = request.query.apikey or ''

    with db_session() as db:
        user = db.query(User).filter(User.api_key == api_key).first()
        if user:
            return user
        else:
            return None
Beispiel #25
0
 def get_categories(self):
     if self.categories:
         return self.categories
     else:
         self.categories = {}
         with db_session() as db:
             for category in db.query(Category).all():
                 self.categories[int(category.id)] = category.name
         return self.categories
Beispiel #26
0
def disable_group(group_name):
    with db_session() as db:
        group = db.query(Group).filter(Group.name == group_name).first()
        if group:
            group.active = False
            db.add(group)
            db.commit()
            return True
    return False
Beispiel #27
0
def auth():
    api_key = request.query.apikey or ''

    with db_session() as db:
        user = db.query(User).filter(User.api_key == api_key).first()
        if user:
            return user
        else:
            return None
Beispiel #28
0
def disable_group(group_name):
    with db_session() as db:
        group = db.query(Group).filter(Group.name == group_name).first()
        if group:
            group.active = False
            db.add(group)
            db.commit()
            return True
    return False
Beispiel #29
0
def delete(email):
    """Deletes a user by email."""

    with db_session() as db:
        deleted = db.query(User).filter(User.email == email).delete()
        if deleted:
            db.commit()
            return True

    return False
Beispiel #30
0
def reset_group(group_name):
    with db_session() as db:
        group = db.query(Group).filter(Group.name == group_name).first()
        if group:
            group.first = 0
            group.last = 0
            db.add(group)
            db.commit()
            return True
    return False
Beispiel #31
0
def reset_group(group_name):
    with db_session() as db:
        group = db.query(Group).filter(Group.name == group_name).first()
        if group:
            group.first = 0
            group.last = 0
            db.add(group)
            db.commit()
            return True
    return False
Beispiel #32
0
def process(limit=None, category=0):
    """Process releases for SFV parts and download them."""

    with Server() as server:
        with db_session() as db:
            # noinspection PyComparisonWithNone,PyComparisonWithNone
            query = db.query(Release).join(Group).join(NZB).filter(Release.sfv == None).filter(
                Release.sfv_metablack_id == None)
            if category:
                query = query.filter(Release.category_id == int(category))
            if limit:
                releases = query.order_by(Release.posted.desc()).limit(limit)
            else:
                releases = query.order_by(Release.posted.desc()).all()

            for release in releases:
                found = False

                nzb = pynab.nzbs.get_nzb_details(release.nzb)
                if nzb:
                    sfvs = []
                    for sfv in nzb['sfvs']:
                        for part in sfv['segments']:
                            if int(part['size']) > SFV_MAX_FILESIZE:
                                continue
                            sfvs.append(part)

                    for sfv in sfvs:
                        try:
                            article = server.get(release.group.name, [sfv['message_id'], ])
                        except:
                            article = None

                        if article:
                            data = gzip.compress(article.encode('utf-8'))
                            sfv = SFV(data=data)
                            db.add(sfv)

                            release.sfv = sfv
                            release.sfv_metablack_id = None
                            db.add(release)

                            log.info('sfv: [{}] - sfv added'.format(
                                release.search_name
                            ))
                            found = True
                            break

                    if not found:
                        log.debug('sfv: [{}] - no sfvs in release'.format(
                            release.search_name
                        ))
                        mb = MetaBlack(sfv=release, status='IMPOSSIBLE')
                        db.add(mb)
                db.commit()
Beispiel #33
0
def process(limit=None, category=0):
    """Process releases for SFV parts and download them."""

    with Server() as server:
        with db_session() as db:
            # noinspection PyComparisonWithNone,PyComparisonWithNone
            query = db.query(Release).join(Group).join(NZB).filter(
                Release.sfv == None).filter(Release.sfv_metablack_id == None)
            if category:
                query = query.filter(Release.category_id == int(category))
            if limit:
                releases = query.order_by(Release.posted.desc()).limit(limit)
            else:
                releases = query.order_by(Release.posted.desc()).all()

            for release in releases:
                found = False

                nzb = pynab.nzbs.get_nzb_details(release.nzb)
                if nzb:
                    sfvs = []
                    for sfv in nzb['sfvs']:
                        for part in sfv['segments']:
                            if int(part['size']) > SFV_MAX_FILESIZE:
                                continue
                            sfvs.append(part)

                    for sfv in sfvs:
                        try:
                            article = server.get(release.group.name, [
                                sfv['message_id'],
                            ])
                        except:
                            article = None

                        if article:
                            data = gzip.compress(article.encode('utf-8'))
                            sfv = SFV(data=data)
                            db.add(sfv)

                            release.sfv = sfv
                            release.sfv_metablack_id = None
                            db.add(release)

                            log.info('sfv: [{}] - sfv added'.format(
                                release.search_name))
                            found = True
                            break

                    if not found:
                        log.debug('sfv: [{}] - no sfvs in release'.format(
                            release.search_name))
                        mb = MetaBlack(sfv=release, status='IMPOSSIBLE')
                        db.add(mb)
                db.commit()
Beispiel #34
0
def process(limit=None, category=0):
    """Processes release rarfiles to check for passwords and filecounts."""

    with Server() as server:
        with db_session() as db:
            # noinspection PyComparisonWithNone
            query = db.query(Release).join(Group).join(NZB).filter(~Release.files.any()). \
                filter(Release.passworded == 'UNKNOWN').filter(Release.rar_metablack_id == None)
            if category:
                query = query.filter(Release.category_id == int(category))

            if limit:
                releases = query.order_by(Release.posted.desc()).limit(limit)
            else:
                releases = query.order_by(Release.posted.desc()).all()

            for release in releases:
                log.debug('rar: processing {}'.format(release.search_name))
                nzb = pynab.nzbs.get_nzb_details(release.nzb)

                if nzb and nzb['rars']:
                    try:
                        passworded, info = check_release_files(server, release.group.name, nzb)
                    except Exception as e:
                        # if usenet isn't accessible, we don't want to blacklist it
                        log.error('rar: file info failed: {}'.format(e))
                        continue

                    if info:
                        log.info('rar: file info add [{}]'.format(
                            release.search_name
                        ))
                        release.passworded = passworded

                        size = 0
                        for file in info:
                            f = File(name=file['name'][:512],
                                     size=file['size'])
                            f.release = release
                            size += file['size']
                            db.add(f)

                        if size != 0:
                            release.size = size

                        release.rar_metablack_id = None
                        db.add(release)
                        db.commit()
                        continue
                log.debug('rar: [{}] - file info: no readable rars in release'.format(
                    release.search_name
                ))
                mb = MetaBlack(rar=release, status='IMPOSSIBLE')
                db.add(mb)
                db.commit()
Beispiel #35
0
def process(limit=None, category=0):
    """Processes release rarfiles to check for passwords and filecounts."""

    with Server() as server:
        with db_session() as db:
            # noinspection PyComparisonWithNone
            query = db.query(Release).join(Group).join(NZB).filter(~Release.files.any()). \
                filter(Release.passworded == 'UNKNOWN').filter(Release.rar_metablack_id == None)
            if category:
                query = query.filter(Release.category_id == int(category))

            if limit:
                releases = query.order_by(Release.posted.desc()).limit(limit)
            else:
                releases = query.order_by(Release.posted.desc()).all()

            for release in releases:
                log.debug('rar: processing {}'.format(release.search_name))
                nzb = pynab.nzbs.get_nzb_details(release.nzb)

                if nzb and nzb['rars']:
                    try:
                        passworded, info = check_release_files(
                            server, release.group.name, nzb)
                    except Exception as e:
                        # if usenet isn't accessible, we don't want to blacklist it
                        log.error('rar: file info failed: {}'.format(e))
                        continue

                    if info:
                        log.info('rar: file info add [{}]'.format(
                            release.search_name))
                        release.passworded = passworded

                        size = 0
                        for file in info:
                            f = File(name=file['name'][:512],
                                     size=file['size'])
                            f.release = release
                            size += file['size']
                            db.add(f)

                        if size != 0:
                            release.size = size

                        release.rar_metablack_id = None
                        db.add(release)
                        db.commit()
                        continue
                log.debug('rar: [{}] - file info: no readable rars in release'.
                          format(release.search_name))
                mb = MetaBlack(rar=release, status='IMPOSSIBLE')
                db.add(mb)
                db.commit()
Beispiel #36
0
def process(limit=None):
    """Process releases for requests"""

    with db_session() as db:
        query = db.query(Release).join(Group).filter(Release.name.like('REQ:%')).filter(Release.pre_id == None).filter(
            Release.category_id == '8010')

        if limit:
            releases = query.order_by(Release.posted.desc()).limit(limit)
        else:
            releases = query.order_by(Release.posted.desc()).all()

        # create a dict of request id's and releases
        requests = {}

        if releases:
            for release in releases:
                # check if it's aliased
                if release.group.name in GROUP_ALIASES:
                    group_name = GROUP_ALIASES[release.group.name]
                else:
                    group_name = release.group.name

                if group_name not in requests:
                    requests[group_name] = {}

                try:
                    requests[group_name][int(release.name.split(': ')[1])] = release
                except ValueError:
                    # request hash?
                    continue

        else:
            log.info("requests: no release requests to process")

        # per-group
        for group_name, group_requests in requests.items():
            # query for the requestids
            if requests:
                pres = db.query(Pre).filter(Pre.requestgroup==group_name).filter(Pre.requestid.in_(group_requests.keys())).all()
            else:
                log.info("requests: no pre requests found")
                pres = []

            # loop through and associate pres with their requests
            for pre in pres:
                # no longer need to check group
                updated_release = group_requests.get(pre.requestid)
                updated_release.pre_id = pre.id
                db.merge(updated_release)
                log.info("requests: found pre request id {} ({}) for {}".format(pre.requestid, group_name,
                                                                                updated_release.name))

            db.commit()
Beispiel #37
0
def rename_bad_releases(category):
    count = 0
    s_count = 0
    for_deletion = []
    with db_session() as db:
        # noinspection PyComparisonWithNone,PyComparisonWithNone,PyComparisonWithNone,PyComparisonWithNone
        query = db.query(Release).filter(Release.category_id==int(category)).filter(
            (Release.files.any())|(Release.nfo_id!=None)|(Release.sfv_id!=None)|(Release.pre_id!=None)
        ).filter((Release.status!=1)|(Release.status==None)).filter(Release.unwanted==False)
        for release in windowed_query(query, Release.id, config.scan.get('binary_process_chunk_size', 1000)):
            count += 1
            name, category_id = pynab.releases.discover_name(release)

            if not name and category_id:
                # don't change the name, but the category might need changing
                release.category_id = category_id

                # we're done with this release
                release.status = 1

                db.merge(release)
            elif name and category_id:
                # only add it if it doesn't exist already
                existing = db.query(Release).filter(Release.name==name,
                                                    Release.group_id==release.group_id,
                                                    Release.posted==release.posted).first()
                if existing:
                    # if it does, delete this one
                    for_deletion.append(release.id)
                    db.expunge(release)
                else:
                    # we found a new name!
                    s_count += 1

                    release.name = name
                    release.search_name = pynab.releases.clean_release_name(name)
                    release.category_id = category_id

                    # we're done with this release
                    release.status = 1

                    db.merge(release)
            else:
                # nein
                release.status = 0
                release.unwanted = True
        db.commit()

    if for_deletion:
        deleted = db.query(Release).filter(Release.id.in_(for_deletion)).delete(synchronize_session=False)
    else:
        deleted = 0

    log.info('rename: successfully renamed {} of {} releases and deleted {} duplicates'.format(s_count, count, deleted))
Beispiel #38
0
def get_nzb(dataset=None):
    user = auth()
    if user:
        id = request.query.guid or None
        if not id:
            id = request.query.id or None

        # couchpotato doesn't support nzb.gzs, so decompress them
        decompress = 'CouchPotato' in request.headers.get('User-Agent')

        if id:
            with db_session() as db:
                release = db.query(Release).join(NZB).join(Category).filter(
                    Release.id == id).first()
                if release:
                    release.grabs += 1
                    user.grabs += 1
                    db.merge(release)
                    db.merge(user)
                    db.commit()

                    if decompress:
                        data = release.nzb.data
                        response.set_header('Content-type',
                                            'application/x-nzb')
                        response.set_header('X-DNZB-Name', release.search_name)
                        response.set_header('X-DNZB-Category',
                                            release.category.name)
                        response.set_header(
                            'Content-Disposition',
                            'attachment; filename="{0}"'.format(
                                release.search_name.replace(' ', '_') +
                                '.nzb'))
                        return gzip.decompress(data)
                    else:
                        data = release.nzb.data
                        response.set_header(
                            'Content-type',
                            'application/x-nzb-compressed-gzip')
                        response.set_header('X-DNZB-Name', release.search_name)
                        response.set_header('X-DNZB-Category',
                                            release.category.name)
                        response.set_header(
                            'Content-Disposition',
                            'attachment; filename="{0}"'.format(
                                release.search_name.replace(' ', '_') +
                                '.nzb.gz'))
                        return data
                else:
                    return api_error(300)
        else:
            return api_error(200)
    else:
        return api_error(100)
def recategorise():
    with db_session() as db:
        i = 0
        for release in db.query(Release).join(Group).all():
            category_id = pynab.categories.determine_category(release.search_name, release.group.name)
            release.category_id = category_id
            db.merge(release)
            i += 1
            # commit every 50k rows
            if i == 50000:
                db.commit()
                i = 0
Beispiel #40
0
def create(email):
    """Creates a user by email with a random API key."""
    api_key = hashlib.md5(uuid.uuid4().bytes).hexdigest()

    with db_session() as db:
        user = User()
        user.email = email
        user.api_key = api_key
        user.grabs = 0

        db.merge(user)

    return api_key
Beispiel #41
0
def get_stats():
    """
    Retrieve relevant stats for display.
    """
    with db_session() as db:
        parts = db.query(Part).count()
        binaries = db.query(Binary).count()
        releases = db.query(Release).count()
        # backlog = db.query(Release).filter(Release.passworded=='UNKNOWN').count()
        # other-misc releases, ie. hashed, yet-to-be renamed or just bad releases
        others = db.query(Release).filter(Release.category_id==8010).count()

        return parts, binaries, releases, others
Beispiel #42
0
def get_stats():
    """
    Retrieve relevant stats for display.
    """
    with db_session() as db:
        parts = db.query(Part).count()
        binaries = db.query(Binary).count()
        releases = db.query(Release).count()
        # backlog = db.query(Release).filter(Release.passworded=='UNKNOWN').count()
        # other-misc releases, ie. hashed, yet-to-be renamed or just bad releases
        others = db.query(Release).filter(Release.category_id == 8010).count()

        return parts, binaries, releases, others
Beispiel #43
0
def recategorise():
    with db_session() as db:
        i = 0
        for release in db.query(Release).join(Group).all():
            category_id = pynab.categories.determine_category(
                release.search_name, release.group.name)
            release.category_id = category_id
            db.merge(release)
            i += 1
            # commit every 50k rows
            if i == 50000:
                db.commit()
                i = 0
Beispiel #44
0
def process(limit=None):
    """Process releases for requests"""

    with db_session() as db:
        requests = {}
        for group, reg in GROUP_REQUEST_REGEXES.items():
            # noinspection PyComparisonWithNone
            query = db.query(Release).join(Group).filter(Group.name==group).filter(Release.pre_id == None).\
                filter(Release.category_id == '8010').filter("releases.name ~ '{}'".format(reg))

            for release in windowed_query(
                    query, Release.id,
                    config.scan.get('binary_process_chunk_size')):
                # check if it's aliased
                if release.group.name in GROUP_ALIASES:
                    group_name = GROUP_ALIASES[release.group.name]
                else:
                    group_name = release.group.name

                if group_name not in requests:
                    requests[group_name] = {}

                result = regex.search(reg, release.name)
                if result:
                    requests[group_name][result.group(0)] = release

        else:
            log.info("requests: no release requests to process")

        # per-group
        for group_name, group_requests in requests.items():
            # query for the requestids
            if requests:
                pres = db.query(Pre).filter(
                    Pre.requestgroup == group_name).filter(
                        Pre.requestid.in_(group_requests.keys())).all()
            else:
                log.info("requests: no pre requests found")
                pres = []

            # loop through and associate pres with their requests
            for pre in pres:
                # no longer need to check group
                updated_release = group_requests.get(str(pre.requestid))
                updated_release.pre_id = pre.id
                db.merge(updated_release)
                log.info(
                    "requests: found pre request id {} ({}) for {}".format(
                        pre.requestid, group_name, updated_release.name))

            db.commit()
Beispiel #45
0
def scan_missing_segments(group_name):
    """Scan for previously missed segments."""

    log.info('missing: checking for missed segments')

    with db_session() as db:
        # recheck for anything to delete
        expired = db.query(Miss).filter(
            Miss.attempts >= config.scan.get('miss_retry_limit')).filter(
                Miss.group_name == group_name).delete()
        db.commit()
        if expired:
            log.info('missing: deleted {} expired misses'.format(expired))

        # get missing articles for this group
        missing_messages = [
            r for r, in db.query(Miss.message).filter(
                Miss.group_name == group_name).all()
        ]

        if missing_messages:
            # mash it into ranges
            missing_ranges = intspan(missing_messages).ranges()

            server = Server()
            server.connect()

            status, parts, messages, missed = server.scan(
                group_name, message_ranges=missing_ranges)

            # if we got some missing parts, save them
            if parts:
                pynab.parts.save_all(parts)

            # even if they got blacklisted, delete the ones we got from the misses
            if messages:
                db.query(Miss).filter(Miss.message.in_(messages)).filter(
                    Miss.group_name == group_name).delete(False)

            db.commit()

            if missed:
                # clear up those we didn't get
                save_missing_segments(group_name, missed)

            if server.connection:
                try:
                    server.connection.quit()
                except:
                    pass
Beispiel #46
0
def fill_sizes():
    with db_session() as db:
        # noinspection PyComparisonWithNone
        for release in db.query(Release).filter((Release.size == 0) | (
                Release.size == None)).yield_per(500):
            size = pynab.nzbs.get_size(release.nzb)

            if size != 0:
                log.debug('fill_size: [{}] - [{}] - added size: {}'.format(
                    release.id, release.search_name, size))

                release.size = size
                db.add(release)
        db.commit()
Beispiel #47
0
def stats(dataset=None):
    if not dataset:
        dataset = {}

    with db_session() as db:
        tv_totals = db.query(func.count(Release.tvshow_id), func.count(Release.tvshow_metablack_id),
                             func.count(Release.id)).join(Category).filter(Category.parent_id == 5000).one()
        movie_totals = db.query(func.count(Release.movie_id), func.count(Release.movie_metablack_id),
                                func.count(Release.id)).join(Category).filter(Category.parent_id == 2000).one()
        nfo_total = db.query(func.count(Release.nfo_id), func.count(Release.nfo_metablack_id)).one()
        file_total = db.query(Release.id).filter((Release.files.any()) | (Release.passworded != 'UNKNOWN')).count()
        file_failed_total = db.query(func.count(Release.rar_metablack_id)).one()
        release_total = db.query(Release.id).count()

        dataset['totals'] = {
            'TV': {
                'processed': tv_totals[0],
                'failed': tv_totals[1],
                'total': tv_totals[2]
            },
            'Movies': {
                'processed': movie_totals[0],
                'failed': movie_totals[1],
                'total': movie_totals[2]
            },
            'NFOs': {
                'processed': nfo_total[0],
                'failed': nfo_total[1],
                'total': release_total
            },
            'File Info': {
                'processed': file_total,
                'failed': file_failed_total[0],
                'total': release_total
            }
        }

        dataset['categories'] = db.query(Category, func.count(Release.id)).join(Release).group_by(Category).order_by(
            desc(func.count(Release.id))).all()

        dataset['groups'] = db.query(Group, func.min(Release.posted), func.count(Release.id)).join(Release).group_by(Group).order_by(desc(func.count(Release.id))).all()

        try:
            tmpl = Template(
                filename=os.path.join(root_dir, 'templates/api/stats.mako'))
            return tmpl.render(**dataset)
        except:
            log.error('Failed to deliver page: {0}'.format(exceptions.text_error_template().render()))
            return None
Beispiel #48
0
def fill_sizes():
    with db_session() as db:
        for release in db.query(Release).filter((Release.size==0)|(Release.size==None)).yield_per(500):
            size = pynab.nzbs.get_size(release.nzb)

            if size != 0:
                log.debug('fill_size: [{}] - [{}] - added size: {}'.format(
                    release.id,
                    release.search_name,
                    size
                ))

                release.size = size
                db.add(release)
        db.commit()
Beispiel #49
0
def process(limit=None):
    """Process releases for requests"""

    with db_session() as db:
        requests = {}
        for group, reg in GROUP_REQUEST_REGEXES.items():
            # noinspection PyComparisonWithNone
            query = db.query(Release).join(Group).filter(Group.name==group).filter(Release.pre_id == None).\
                filter(Release.category_id == '8010').filter("releases.name ~ '{}'".format(reg))

            for release in windowed_query(query, Release.id, config.scan.get('binary_process_chunk_size')):
                # check if it's aliased
                if release.group.name in GROUP_ALIASES:
                    group_name = GROUP_ALIASES[release.group.name]
                else:
                    group_name = release.group.name

                if group_name not in requests:
                    requests[group_name] = {}

                result = regex.search(reg, release.name)
                if result:
                    requests[group_name][result.group(0)] = release

        else:
            log.info("requests: no release requests to process")

        # per-group
        for group_name, group_requests in requests.items():
            # query for the requestids
            if requests:
                pres = db.query(Pre).filter(Pre.requestgroup==group_name).filter(Pre.requestid.in_(group_requests.keys())).all()
            else:
                log.info("requests: no pre requests found")
                pres = []

            # loop through and associate pres with their requests
            for pre in pres:
                # no longer need to check group
                updated_release = group_requests.get(str(pre.requestid))
                updated_release.pre_id = pre.id
                updated_release.name = pre.name
                updated_release.search_name = pre.searchname
                db.merge(updated_release)
                log.info("requests: found pre request id {} ({}) for {}".format(pre.requestid, group_name,
                                                                                updated_release.name))

            db.commit()
def upgrade():
        with db_session() as db:
            for release in db.query(Release). \
                    filter(Release.added >= dateutil.parser.parse('2014/11/12 16:37 GMT+8')). \
                    filter(Release.added <= dateutil.parser.parse('2014/11/13 16:20 GMT+8')). \
                    order_by(Release.added). \
                    all():
                nzb = gzip.decompress(release.nzb.data).decode('utf-8')
                if '<?xml' not in nzb:
                    nzb = ('<?xml version="1.0" encoding="UTF-8"?>\n'
                        '<!DOCTYPE nzb PUBLIC "-//newzBin//DTD NZB 1.1//EN" "http://www.newzbin.com/DTD/nzb/nzb-1.1.dtd">\n'
                        '<nzb>\n'
                        '<head><meta type="category">{}</meta><meta type="name">{}</meta></head>\n'.format(release.category.name, escape(release.search_name))) + \
                        nzb
                    release.nzb.data = gzip.compress(nzb.encode('utf-8'))
                    db.commit()
def upgrade():
        with db_session() as db:
            for release in db.query(Release). \
                    filter(Release.added >= dateutil.parser.parse('2014/11/12 16:37 GMT+8')). \
                    filter(Release.added <= dateutil.parser.parse('2014/11/13 16:20 GMT+8')). \
                    order_by(Release.added). \
                    all():
                nzb = gzip.decompress(release.nzb.data).decode('utf-8')
                if '<?xml' not in nzb:
                    nzb = ('<?xml version="1.0" encoding="UTF-8"?>\n'
                        '<!DOCTYPE nzb PUBLIC "-//newzBin//DTD NZB 1.1//EN" "http://www.newzbin.com/DTD/nzb/nzb-1.1.dtd">\n'
                        '<nzb>\n'
                        '<head><meta type="category">{}</meta><meta type="name">{}</meta></head>\n'.format(release.category.name, escape(release.search_name))) + \
                        nzb
                    release.nzb.data = gzip.compress(nzb.encode('utf-8'))
                    db.commit()
Beispiel #52
0
def nzedbirc(unformattedPre):
    formattedPre = parseNzedbirc(unformattedPre)

    with db_session() as db:
        p = db.query(Pre).filter(Pre.name == formattedPre['name']).first()

        if not p:
            p = Pre(**formattedPre)
        else:
            for k, v in formattedPre.items():
                setattr(p, k, v)

        try:
            db.add(p)
            log.info("pre: Inserted/Updated - {}".format(formattedPre["name"]))
        except Exception as e:
            log.debug("pre: Error - {}".format(e))
Beispiel #53
0
def save_missing_segments(group_name, missing_segments):
    """Handles any missing segments by mashing them into ranges
    and saving them to the db for later checking."""

    with db_session() as db:
        # we don't want to get the whole db's worth of segments
        # just get the ones in the range we need
        first, last = min(missing_segments), max(missing_segments)

        # get previously-missed parts
        previous_misses = [
            r for r, in db.query(Miss.message).filter(
                Miss.message >= first).filter(Miss.message <= last).filter(
                    Miss.group_name == group_name).all()
        ]

        # find any messages we're trying to get again
        repeats = list(set(previous_misses) & set(missing_segments))

        # update the repeats to include the new attempt
        if repeats:
            stmt = Miss.__table__.update().where(
                Miss.__table__.c.message == bindparam('m')).values(
                    attempts=Miss.__table__.c.attempts + 1)

            db.execute(stmt, [{'m': m} for m in repeats if m])

        # subtract the repeats from our new list
        new_misses = list(set(missing_segments) - set(repeats))

        # batch-insert the missing messages
        if new_misses:
            db.execute(Miss.__table__.insert(), [{
                'message': m,
                'group_name': group_name,
                'attempts': 1
            } for m in new_misses])

        # delete anything that's been attempted enough
        expired = db.query(Miss).filter(
            Miss.attempts >= config.scan.get('miss_retry_limit')).filter(
                Miss.group_name == group_name).delete()
        db.commit()
        log.info(
            'missing: saved {} misses and deleted {} expired misses'.format(
                len(new_misses), expired))