Exemplo n.º 1
0
    def update_last_update(self, dt=None):
        # aggiorna sia variabile di cache che db
        if dt and isinstance(dt, datetime.datetime):
            now = dt.strftime('%d/%m/%Y %H:%M')
        else:
            now = u.now()

        self._last_update = now
        return self._execute(sql.UPDATE_GENERAL_SETTING, (now, 'last_update'),
                             rowcount=True)
Exemplo n.º 2
0
    def get_torrents(self, with_current_datetime=True):
        if not self._torrents:
            return []

        if with_current_datetime:
            now = u.now(stringify=False)
            return [(t.id, t.title, t.description, t.magnet, now)
                    for t in self._torrents]
        else:
            return [(t.id, t.title, t.description, t.magnet)
                    for t in self._torrents]
Exemplo n.º 3
0
    def save_json_file(self):
        if self._torrents:  # non salvare il file se la lista è rimasta vuota
            file_name = u.now(stringify='tntvillage_releases_%Y%m%d_%H%M.json')
            file_path = os.path.join(self._json_base_dir, file_name)

            json_data = [t._asdict() for t in self._torrents]
            with open(file_path, 'w+') as f:
                json.dump(json_data, f, indent=4)

            return file_path
        else:
            return False
Exemplo n.º 4
0
    def iter_all(self,
                 sleep=1,
                 reset_previous_list=True,
                 limit=0,
                 yield_with_current_datetime=True):
        """Fetch all the torrents from all the pages.

        :param sleep: time to wait after every request
        :param reset_previous_list: empty the internal objects list
        :param limit: only fetch this number of pages
        :param yield_with_current_datetime: returns the torrent tuple with the current datetime too
        :return: False if the first request fails, json path if save_json_file is True, else True
        """
        logger.info('fetching all pages (iterable)')

        fetched = self._fetch_total_pages(skip_request_if_present=True)
        if not fetched:
            raise ValueError('cannot fetch total number of pages')

        time.sleep(sleep)
        logger.info(
            'starting loop to fetch all pages (reset_previous_list=%s, limit=%d)',
            reset_previous_list, limit)
        if reset_previous_list:
            self._torrents = list()

        for i in range(
                1, self._total_pages + 1 if limit in (None, 0) else limit + 1):
            logger.info('fetching page %d (sleep=%d)...', i, sleep)
            result = self._fetch_page_by_index(i)
            trs = result.soup.find_all('tr')
            logger.info('fetched %d torrents from page %d', len(trs), i)
            torrents_to_yield = list()
            for tr in trs:
                torrent = self._parse_torrent_data(tr)
                if torrent:
                    self._torrents.append(torrent)
                    torrents_to_yield.append(torrent)

            if torrents_to_yield:
                if yield_with_current_datetime:
                    now = u.now(stringify=False)
                    yield [(t.id, t.title, t.description, t.magnet,
                            t.torrent_download_url, t.leech, t.seed, now)
                           for t in torrents_to_yield]
                else:
                    yield [(t.id, t.title, t.description, t.magnet,
                            t.torrent_download_url, t.leech, t.seed)
                           for t in torrents_to_yield]
            else:
                # se il parsing della pagina non ha dato risultati, restituire l'html
                yield result.html
Exemplo n.º 5
0
import logging
import sqlite3
import datetime

import simplesqlitewrap as ssw

from bot import u
import bot.sql as sql

logger = logging.getLogger(__name__)

DEFAULT_SETTINGS = [('last_update', u.now()), ('json_file_id', None),
                    ('searches_count', 0)]


class Database(ssw.Database):
    def __init__(self, filename, **kwargs):
        ssw.Database.__init__(
            self,
            filename,
            connection_args={'detect_types': sqlite3.PARSE_DECLTYPES},
            **kwargs)

        self._init_tables()

        self._last_update = self._get_general_setting(
            'last_update')  # cache della data/ora dell'ultimo update
        self._json_file_id = self._get_general_setting('json_file_id')
        self._total_entries = 0
        self.update_total_enrties()