def more_books(ctx, rd): """ Get more results from the specified search-query, which must be specified as JSON in the request body. Optional: ?num=50&library_id=<default library> """ db, library_id = get_library_data(ctx, rd.query)[:2] try: num = int(rd.query.get("num", DEFAULT_NUMBER_OF_BOOKS)) except Exception: raise HTTPNotFound("Invalid number of books: %r" % rd.query.get("num")) try: search_query = load_json_file(rd.request_body_file) query, offset, sorts, orders = ( search_query["query"], search_query["offset"], search_query["sort"], search_query["sort_order"], ) except KeyError as err: raise HTTPBadRequest("Search query missing key: %s" % as_unicode(err)) except Exception as err: raise HTTPBadRequest("Invalid query: %s" % as_unicode(err)) ans = {} with db.safe_read_lock: ans["search_result"] = search_result(ctx, rd, db, query, num, offset, sorts, orders) mdata = ans["metadata"] = {} for book_id in ans["search_result"]["book_ids"]: data = book_as_json(db, book_id) if data is not None: mdata[book_id] = data return ans
def more_books(ctx, rd): ''' Get more results from the specified search-query, which must be specified as JSON in the request body. Optional: ?num=50&library_id=<default library> ''' db, library_id = get_library_data(ctx, rd.query)[:2] try: num = int(rd.query.get('num', DEFAULT_NUMBER_OF_BOOKS)) except Exception: raise HTTPNotFound('Invalid number of books: %r' % rd.query.get('num')) try: search_query = load_json_file(rd.request_body_file) query, offset, sorts, orders = search_query['query'], search_query['offset'], search_query['sort'], search_query['sort_order'] except KeyError as err: raise HTTPBadRequest('Search query missing key: %s' % as_unicode(err)) except Exception as err: raise HTTPBadRequest('Invalid query: %s' % as_unicode(err)) ans = {} with db.safe_read_lock: ans['search_result'] = search_result(ctx, rd, db, query, num, offset, sorts, orders) mdata = ans['metadata'] = {} for book_id in ans['search_result']['book_ids']: data = book_as_json(db, book_id) if data is not None: mdata[book_id] = data return ans
def get_translations(): global _cached_translations if _cached_translations is None: _cached_translations = False with zipfile.ZipFile(P('content-server/locales.zip', allow_user_override=False), 'r') as zf: names = set(zf.namelist()) lang = get_lang() if lang not in names: xlang = lang.split('_')[0].lower() if xlang in names: lang = xlang if lang in names: _cached_translations = load_json_file(zf.open(lang, 'r')) return _cached_translations
def set_session_data(ctx, rd): ''' Store session data persistently so that it is propagated automatically to new logged in clients ''' if rd.username: try: new_data = load_json_file(rd.request_body_file) if not isinstance(new_data, dict): raise Exception('session data must be a dict') except Exception as err: raise HTTPBadRequest('Invalid data: %s' % as_unicode(err)) ud = ctx.user_manager.get_session_data(rd.username) ud.update(new_data) ctx.user_manager.set_session_data(rd.username, ud)
def __init__(self): # Ensure the needed files are there. assert path.exists("credentials.json"), ("Credentials are missing." " Please add or rename your" " credentials file " "to credentials.json") assert path.exists("collect.json"), "The collect.json file is missing." # Open the configuration file with open("collect.json", encoding="utf-8") as fp: # Store all the options on a variable options: Dict[str, Any] = load_json_file(fp) # Ensure that there is a sheets parameter if not options.get("sheets"): raise SyntaxError("Collection JSON does not have a sheets option.") # Store all the sheets information self.sheets: List[Dict[str, Any]] = options["sheets"] # Store wanted usernames self.filter = [] if not options.get("filter") else options["filter"]
# coding: utf-8 from io import open from json import load as load_json_file f = open("config.json", "r") cfg = load_json_file(f) f.close() host = "127.0.0.1" port = int(cfg['port']) uid = str(cfg['uid'])
else: analysiser = Analyzer.make_instance(r_dir, meta, log) analysiser.insert_into(conn) # Main function if __name__ == '__main__': from psycopg2 import connect as db_connect from json import load as load_json_file from sys import stderr from os import environ from data.connect import pg_connection from cli.config import setup_logging from cli.populate_args import parser as arg_parser args = arg_parser.parse_args() setup_logging(args) l.debug("reading data from %s", args.data_folder) try: with open(args.config_file, 'r') as f: config_meta = load_json_file(f) with pg_connection(args) as conn: run_analysis(r_dir=args.data_folder, meta=config_meta, log=True, conn=conn) except errors.PopulationError as e: l.exception("%s", e)