def close(self): #self._lock.unlock() try: DailyLogFile.close(self) except Exception as e: print 'HourLogFile close File error' print traceback.format_exc()
settings = read_settings() scrapy_settings = read_settings("scrapy-settings.json") port = settings["api"].get("port", 8001) if "api" in settings else 8001 parser = argparse.ArgumentParser() parser.add_argument("-p", "--port", default=port, type=int, help="HTTP API port") parser.add_argument("-l", "--log", type=int, default=1, help="Log-file") args = parser.parse_args() api_port = args.port log_file = "daily-log.log" if args.log else sys.stderr log_file = log_file if log_file is not sys.stderr: log_file = DailyLogFile(log_file, "%s/log-broker" % os.getcwd()) MSG = MSG % ( VERSION, datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S"), api_port, log_file, os.getpid(), scrapy_settings["api"]["host"], scrapy_settings["api"]["port"], ) sys.stdout.write(MSG) log.startLogging(log_file) api = BrokerAPI() site = server.Site(api) reactor.listenTCP(api_port, site) reactor.run() log_file.close()
def close(self): #self._lock.unlock() DailyLogFile.close(self)
MSG = MSG % ( VERSION, datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S"), api_port, log_file, os.getpid(), settings["database"]["username"], settings["database"]["host"], settings["database"]["port"], settings["database"]["name"], ) sys.stdout.write(MSG) consumer = make_oauth_consumer(settings) log.startLogging(log_file) api = ScrapyAPI(consumer) site = server.Site(api) reactor.listenTCP(api_port, site) lc1 = LoopingCall(lambda: collect_received(api)) lc1.start(settings["database"]["commit_delay"]) lc2 = LoopingCall(lambda: restart_scrapers(api)) lc2.start(1800) lc3 = LoopingCall(lambda: restart_failed_scrapers(api)) lc3.start(30) reactor.run() log_file.close()