def cmd_text(args): """ Download the model descriptions as text """ # pylint: disable=unused-argument with API_SERVICE.app_context(): for model in [Recipe, Ingredient, GroceryItem, Tag]: download_model_descriptions(model)
def run_import(): """ imports the json data into real postgres database """ with API_SERVICE.app_context(): API_SERVICE.config["SQLALCHEMY_ECHO"] = True imp = Importer(os.path.join(APP_ROOT, "scraping", "data"), models.db) imp.run()
def main(): ctx = API_SERVICE.app_context() ctx.push() with open("/home/noelb/veggie.sql", "r") as _file: db.engine.execute(_file.read()) ctx.pop()
def cmd_build(args): """ Build the index_cache_files from the database """ # pylint: disable=unused-argument with API_SERVICE.app_context(): for model in [Recipe, Ingredient, GroceryItem, Tag]: tablename = model.__tablename__ print("Building index for {}...\n".format(tablename)) index_path = os.path.join("search_indices", tablename + "_index.p") os.makedirs(os.path.dirname(index_path), exist_ok=True) build_index(model, index_path)
def database_connect(callback): """ Setup up a database connection, pass the database object to the callback, then teardown the connection. """ ctx = API_SERVICE.app_context() ctx.push() callback(db) ctx.pop()
def cmd_search(args): """ Perform a test search """ assert len(args) >= 3 page = int(args[0]) page_size = int(args[1]) with API_SERVICE.app_context(): results, count = page_search(" ".join(args[2:]), page, page_size) print("\n{} results found.\n".format(count)) for result in results: result.contextualize() for idx, result in enumerate(results): print("{:3d}: {}".format(idx, result)) for context in result.contexts: print("\t" + context)
def get_connection_context(): ctx = API_SERVICE.app_context() ctx.push() return ctx