def __init__(self, host=None, port=None, log_level=None, run_as_thread=None, debug=False): self._get_process_lock() cfg = load_config() # Send stderr to logs if we're not running in debug mode # self.logger = setup_logging("log", log_level=log_level, capture_stderr=not debug) bottle.TEMPLATES.clear() bottle.debug(debug) self.app = bottle.Bottle() load_wsgi_endpoints(self.app, cfg) if debug: print("\nLoaded routes:") for r in self.app.routes: print(r) print("") self._init_server( host=cfg.get("Settings", "host") if host is None else host, port=cfg.getint("Settings", "port") if port is None else port, run_as_thread=cfg.getboolean("Settings", "run as thread") if run_as_thread is None else run_as_thread, debug=debug)
init() reset() pull() go_back() def deploy(): get_in() add() commit() pull() push() go_back() if __name__ == "__main__": root_path = os.path.dirname(os.path.abspath(__file__)) init_root_path(root_path) config_file_path = root_path + os.sep + "setup.cfg" load_config(config_file_path) config = get_config_file(config_file_path) define("github_pages_repo", default=config.get("sect_basic", "github_pages_repo"), help="github pages repo url") sync() generate() deploy()
mkdir(dest) posts = markdown_parser.get_all_parsed_hobby_posts(brief=False) params = get_site_info() for post in posts: html = TemplateParser.parse(options.current_template_dir, "post.html", post=post, params=params) post_file = open(dest + os.sep + post["post_name"] + ".html", "wb") post_file.write(html) def generate(): mkdir(options.build_dir) generate_index() copy_static_files() generate_posts() generate_about() generate_hobby_index() generate_hobby_posts() if __name__ == '__main__': root_path = os.path.dirname(os.path.abspath(__file__)) init_root_path(root_path) config_file_path = root_path + os.sep + "setup.cfg" load_config(config_file_path) generate()
def scrape_resetera(): fs = ForumScraper(config=config['scraper'][FORUM_NAME], forum_name=FORUM_NAME, debug_mode=config['scraper']['debug_mode']) fs.scrape_page() def upload_to_bigquery(): # uploading to bigquery # TODO automatize, this is only a test ForumScraper.upload_to_bigquery( json_filename= "../data/scraped_data/resetera/thread_red-dead-redemption-ii-spoiler-thread.75874.json", project_id="spoiler-tagger", dataset_id="spoilers", target_table_id="posts", target_table_location="EU", ) if __name__ == '__main__': print('=== {}:: starting main ==='.format( 'Recommendation pipeline for {}'.format(constants.APP_NAME))) args = utils.parse_arguments(__file__) config = utils.load_config(args.config_path) scrape_resetera() upload_to_bigquery()