def retrieve_remote_video(self, records): """ Retrieve video from remote server. """ for key, val in records.items(): torrent_path = (self.DEFAULT_REMOTE_VIDEO_DIR+val).encode('utf-8') #existence = exists(torrent_path, use_sudo=False, verbose=True) #if not existence: # print (red('Downloaded video not exists on path - %s' %torrent_path)) # continue retriever = Retriever(torrent_path) retriever.start() self.stop_seeding(key)
def main(): args = parser.parse_args() # Define all command-line mutable arguments thread_amount = int(args.threads) if args.threads else 2 timestamp = int(args.start) if args.start else 1420070400000 max_retries = int(args.retries) if args.retries else 5 logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s - %(name)s: %(message)s') file_handler = logging.FileHandler('scraper.log') file_handler.setFormatter(formatter) logger.addHandler(file_handler) logger.debug('Started downloading items.') scraper = cfscrape.create_scraper() items = cached(scraper, '.data/items.json', 'https://rsbuddy.com/exchange/names.json') # Write caching file to .data folder write_file('.data/items.json', json.dumps(items)) logger.debug('Finished downloading items.') # Divide the items into buckets for the retrievers to # process item_ids = list(items.keys()) shuffle(item_ids) item_id_buckets = np.array_split(item_ids, thread_amount) logger.debug('Started downloading item history.') # Create all retrievers and run them as a seperate # thread threads = [] for item_id_bucket in item_id_buckets: thread = Retriever(item_id_bucket, timestamp, cfscrape.create_scraper(), max_retries, logger) thread.start() threads.append(thread) for thread in threads: thread.join() logger.debug('Scraper finished.')
def main(): retriever = Retriever() retriever.start()
class Eris: def __init__(self): self.output = os.path.join(config.logsDir, "output.log") def start(self): try: with open(config.statusFile, "r") as f: if f.read() == config.STATUS_LINE: return except: pass try: with open(config.statusFile, "w") as f: f.write(config.STATUS_LINE) except IOError as e: print >>sys.stderr, e return if self.daemonize(): return self.startTime = datetime.now() self.storage = Storage() self.btserver = BtServer(self.storage) self.retriever = Retriever(self.storage) self.btserver.start() self.retriever.start() daemon = Pyro4.Daemon(port=config.pyroPort) uri = daemon.register(self, config.PNAME) log.info("Eris daemon URI: [{}]".format(uri)) self.running = True daemon.requestLoop(loopCondition=lambda: self.running) daemon.unregister(config.PNAME) daemon.close() def stop(self): log.info("Closing eris") try: self.btserver.kill() self.retriever.kill() self.retriever.join(1.0) self.btserver.join(1.0) with open(config.statusFile, "w"): pass except: log.exception("Something went wrong while shutting down") self.running = False def status(self): pid = os.getpid() proc = psutil.Process(pid) cpu = proc.get_cpu_percent() mem = proc.get_memory_percent() uptime = datetime.now() - self.startTime du = self.storage.size() return (pid, cpu, mem, uptime, du) def put(self, packets): self.storage.put(packets) def get(self, since=0, to=0, limit=0): connId, _ = self.storage.get(since, to, limit) return self.storage.fetchall(connId) def count(self): return self.storage.rowcount() def ping(self): return config.PNAME def daemonize(self): try: pid = os.fork() if pid > 0: return True except OSError as e: sys.stderr.write("Fork #1 failed: {} ({})\n".format(e.errno, e.strerror)) sys.exit(1) os.chdir("/") os.setsid() try: pid = os.fork() if pid > 0: sys.exit(0) except OSError as e: sys.stderr.write("Fork #2 failed: {} ({})\n".format(e.errno, e.strerror)) sys.exit(1) sys.stdout.flush() sys.stderr.flush() with open(self.output, "w"): pass out = file(self.output, "a+", 1) os.dup2(out.fileno(), sys.stdout.fileno()) os.dup2(out.fileno(), sys.stderr.fileno()) return False @staticmethod def getProxy(): uri = "PYRO:{}@localhost:{}".format(config.PNAME, config.pyroPort) return Pyro4.Proxy(uri)