def __init__(self, title, page_limit): """Initialisations.""" Config.__init__(self) self.proxies = self.get_proxies('sky') self.title = title self.pages = page_limit self.logger = logging.getLogger('log1') self.class_name = self.__class__.__name__.lower() self.index = 0 self.page = 0 self.total_fetch_time = 0 self.mapper = [] self.mylist = [] self.masterlist = [] self.mylist_crossite = [] self.masterlist_crossite = [] self.soup_dict = {} self.soup = None self.headers = [ "NAME [" + self.colorify("green", "+UPVOTES") + "/" + self.colorify("red", "-DOWNVOTES") + "]", "INDEX", "SIZE", "date", "SE/LE" ] ###################################### self.top = "/top1000/all/ed/%d/?l=en-us" % (self.page)
def __init__(self, title, page_limit): """Initialisations.""" Config.__init__(self) self.proxies = self.get_proxies('tpb') self.proxy = None self.title = title self.pages = page_limit self.logger = logging.getLogger('log1') self.class_name = self.__class__.__name__.lower() self.index = 0 self.page = 0 self.total_fetch_time = 0 self.mylist = [] self.masterlist = [] self.mylist_crossite = [] self.masterlist_crossite = [] self.mapper = [] self.soup_dict = {} self.soup = None self.headers = [ 'CATEG', 'NAME', 'INDEX', 'UPLOADER', 'SIZE', 'SE/LE', 'DATE', 'C' ] ################################### self.non_color_name = None self.top = "/top/all" self.top48 = "/top/48hall"
def __init__(self): """Initialisations.""" Config.__init__(self) self.__version__ = "Torrench (1.0.54)" self.logger = logging.getLogger('log1') self.args = None self.input_title = None self.page_limit = 0
def __init__(self, title): """Initialisations.""" Config.__init__(self) self.proxies = self.get_proxies('rarbg') self.proxy = self.proxies[0] self.logger = logging.getLogger('log1') self.title = title self.index = 0 self.token = None self.mapper = [] self.total_fetch_time = 0 self.output_headers = ['CATEG', 'NAME', 'INDEX', 'SIZE', 'S/L', 'DATE']
def __init__(self, title: str): """Class constructor""" Config.__init__(self) self.title = title self.logger = logging.getLogger('log1') self.output_headers = ['NAME', 'INDEX', 'SIZE', 'S', 'L'] self.index = 0 self.mapper = [] self.proxy = self.check_proxy('nyaa') self.search_parameter = "/?f=0&c=0_0&q={query}&s=seeders&o=desc".format(query=self.title) self.soup = self.http_request(self.proxy+self.search_parameter) self.OS_WIN = False if platform.system() == "Windows": self.OS_WIN = True
def __init__(self, title): """Initialisations.""" Config.__init__(self) self.proxies = self.get_proxies('xbit') self.proxy = self.proxies[0] self.title = title self.logger = logging.getLogger('log1') self.index = 0 self.total_fetch_time = 0 self.mylist = [] self.mapper = [] self.data = {} self.output_headers = [ 'ID', 'NAME', 'INDEX', 'SIZE', 'DISCOVERED']
def __init__(self, title, page_limit): """Initialisations.""" Config.__init__(self) self.proxies = self.get_proxies('idope') self.proxy = self.proxies[0] self.logger = logging.getLogger('log1') self.title = title self.pages = page_limit self.soup = None self.soup_dict = {} self.page = 0 self.total_fetch_time = 0 self.index = 0 self.mylist = [] self.mapper = [] self.output_headers = ['NAME', 'INDEX', 'SIZE', 'SEEDS', 'AGE']
def __init__(self, title, page_limit): """Initialisations.""" Config.__init__(self) self.proxies = self.get_proxies('kat') self.title = title self.pages = page_limit self.logger = logging.getLogger('log1') self.page = 0 self.proxy = None self.soup = None self.soup_dict = {} self.index = 0 self.total_fetch_time = 0 self.mylist = [] self.mapper = [] self.output_headers = [ 'CATEG', 'NAME', 'INDEX', 'UPLOADER', 'SIZE', 'DATE', 'SE/LE', 'C' ]
def __init__(self, title): """Initialisations.""" Config.__init__(self) self.proxies = self.get_proxies('xbit') self.proxy = self.proxies[0] self.title = title self.logger = logging.getLogger('log1') self.class_name = self.__class__.__name__.lower() self.index = 0 self.total_fetch_time = 0 self.mapper = [] self.mylist = [] self.masterlist = [] self.mylist_crossite = [] self.masterlist_crossite = [] self.data = {} self.headers = [ 'ID', 'NAME', 'INDEX', 'SIZE', 'DISCOVERED']
def __init__(self, title): """Initialisations.""" Config.__init__(self) self.proxies = self.get_proxies('rarbg') self.proxy = self.proxies[0] self.title = title self.logger = logging.getLogger('log1') self.class_name = self.__class__.__name__.lower() self.index = 0 self.raw = None self.token = None self.mapper = [] self.mylist = [] self.masterlist = [] self.mylist_crossite = [] self.masterlist_crossite = [] self.total_fetch_time = 0 self.headers = ['CATEG', 'NAME', 'INDEX', 'SIZE', 'S/L', 'DATE']
def __init__(self, title, page_limit): """Initialisations.""" Config.__init__(self) self.proxies = self.get_proxies('1337x') self.proxy = None self.title = title self.pages = page_limit self.logger = logging.getLogger('log1') self.OS_WIN = False if platform.system() == "Windows": self.OS_WIN = True self.index = 0 self.page = 0 self.total_fetch_time = 0 self.mylist = [] self.mapper = [] self.soup_dict = {} self.output_headers = [ 'CATEG', 'NAME', 'INDEX', 'SE', 'LE', 'TIME', 'SIZE', 'UL', 'C']
def __init__(self, title, page_limit): """Initialisations.""" Config.__init__(self) self.proxies = self.get_proxies('sky') self.title = title self.pages = page_limit self.logger = logging.getLogger('log1') self.index = 0 self.page = 0 self.total_fetch_time = 0 self.mylist = [] self.mapper = [] self.soup_dict = {} self.soup = None self.output_headers = ["NAME ["+self.colorify("green", "+UPVOTES")+"/"+self.colorify("red", "-DOWNVOTES")+"]", "INDEX", "SIZE", "FILES", "UPLOADED", "SE/LE"] ###################################### self.top = "/top1000/all/ed/%d/?l=en-us" % (self.page) self.file_count = 0
def __init__(self, isbn): """Initialisations.""" Config.__init__(self) self.proxies = self.get_proxies('libgen') self.proxy = self.proxies[0] self.logger = logging.getLogger('log1') self.isbn = isbn self.index = 0 self.mapper = [] self.mapper2 = [] self.total_fetch_time = 0 self.output_headers = [ 'Author(s)', 'Title', 'INDEX', 'Publisher', 'Year', 'Language', ]
def __init__(self, title, page_limit): Config.__init__(self) self.title = title self.pages = page_limit self.logger = logging.getLogger('log1') self.masterlist = [] self.mapper = [] self.total_time = 0 self.headers = [ "NAME (UPLOADER)", "INDEX", "SIZE", "SE/LE", "UPLOADED" ] self.api_sites = ['rarbg', 'xbit'] self.class_list = [] self.class_name = None self.args = None self.valid_args = [ 'sorted', 'no_merge', 'thepiratebay', 'skytorrents', 'x1337', 'idope', 'kickasstorrent', 'nyaa', 'xbit', 'rarbg', 'limetorrents' ]
def __init__(self, title, page_limit): """Class constructor""" Config.__init__(self) self.proxies = self.get_proxies('nyaa') self.proxy = self.proxies[0] self.title = title self.pages = page_limit self.logger = logging.getLogger('log1') self.class_name = self.__class__.__name__.lower() self.index = 0 self.mapper = [] self.mylist = [] self.masterlist = [] self.mylist_crossite = [] self.masterlist_crossite = [] self.page = 0 self.soup = None self.total_fetch_time = 0 self.soup_dict = {} self.headers = ['NAME', 'INDEX', 'SIZE', 'SE/LE', 'COMPLETED']
def __init__(self, title, page_limit): """Initialisations.""" Config.__init__(self) self.proxies = self.get_proxies('1337x') self.proxy = None self.title = title self.pages = page_limit self.logger = logging.getLogger('log1') self.class_name = self.__class__.__name__.lower() self.index = 0 self.page = 0 self.total_fetch_time = 0 self.mapper = [] self.mylist = [] self.masterlist = [] self.mylist_crossite = [] self.masterlist_crossite = [] self.soup_dict = {} self.headers = [ 'CATEG', 'NAME', 'INDEX', 'SE/LE', 'TIME', 'SIZE', 'UL', 'C']
def __init__(self, title, page_limit): """Initialisations.""" Config.__init__(self) self.proxies = self.get_proxies('tpb') self.top = "/top/all" self.top48 = "/top/48hall" self.title = title self.pages = page_limit self.logger = logging.getLogger('log1') self.page = 0 self.proxy = None self.soup = None self.non_color_name = None self.soup_dict = {} self.OS_WIN = False if platform.system() == "Windows": self.OS_WIN = True self.index = 0 self.total_fetch_time = 0 self.mylist = [] self.mapper = [] self.output_headers = [ 'CATEG', 'NAME', 'INDEX', 'UPLOADER', 'SIZE', 'S', 'L', 'DATE', 'C' ]
def __init__(self, **kwargs): """Initialisations.""" try: Config.__init__(self) ## Initialising class variables self.class_name = self.__class__.__name__.lower() self.logger = Utilities.GetLogger(self.class_name) self.title = None self.search_type = None self.with_magnet = None self.pages = None self.collection = None self.database = None self.db_handler = None self.with_db = False for key, value in kwargs.iteritems(): if "title" == key: self.title = value elif "page_limit" == key: self.pages = value elif "search_type" == key: self.search_type = value elif "with_magnet" == key: self.with_magnet = value elif "collection" == key: self.collection = value elif "database" == key: self.database = value elif "with_db" == key: self.with_db = value self.proxies = self.get_proxies('limetorrents') self.proxy = None self.index = 0 self.page = 0 self.total_fetch_time = 0 self.mylist = [] self.masterlist = [] self.mylist_crossite = [] self.masterlist_crossite = [] self.mapper = [] ## self.soup_dict = {} self.soup_dict = Queue.Queue() self.missed = Queue.Queue() self.soup = None self.headers = ['NAME', 'INDEX', 'SIZE', 'SE/LE', 'UPLOADED'] ## self.key1 = 'magnet:?xt=urn:btih:' ## self.key2 = '&' self.key1 = 'http://itorrents.org/torrent/' self.key2 = '.torrent?' self.supermasterlist = [] self.lock = threading.Lock() self.crawler_finished = False if self.with_db: self.logger.debug( " + Generating database [%s] in [%s] collections" % (self.database, self.collection)) self.db_handler = MongoAccess(debug=True) self.db_handler.connect(self.database, self.collection) except Exception as inst: Utilities.ParseException(inst, logger=self.logger)
def __init__(self): Config.__init__(self) self.logger = logging.getLogger('log1')