def test_delete_doc_find_by_id(self): doc = JsonObject() updated_doc = self.test_db.update_doc(doc.to_json_map()) logging.debug(updated_doc) doc_id = updated_doc["_id"] self.test_db.delete_doc(doc_id) self.assertEqual(self.test_db.find_by_id(doc_id), None)
def get_panchangam(city, year, script, computeLagnams=False): fname_det = os.path.expanduser('~/Documents/%s-%s-detailed.json' % (city.name, year)) fname = os.path.expanduser('~/Documents/%s-%s.json' % (city.name, year)) if os.path.isfile(fname) and not computeLagnams: sys.stderr.write('Loaded pre-computed panchangam from %s.\n' % fname) return JsonObject.read_from_file(filename=fname) elif os.path.isfile(fname_det): # Load pickle, do not compute! sys.stderr.write('Loaded pre-computed panchangam from %s.\n' % fname) return JsonObject.read_from_file(filename=fname_det) else: sys.stderr.write( 'No precomputed data available. Computing panchangam... ') sys.stderr.flush() panchangam = Panchangam(city=city, year=year, script=script) panchangam.compute_angams(computeLagnams=computeLagnams) panchangam.assignLunarMonths() sys.stderr.write('done.\n') sys.stderr.write('Writing computed panchangam to %s...' % fname) try: if computeLagnams: panchangam.dump_to_file(filename=fname_det) else: panchangam.dump_to_file(filename=fname) except EnvironmentError: logging.warning("Not able to save.") logging.error(traceback.format_exc()) return panchangam
def test_json_serialization(caplog): caplog.set_level(logging.DEBUG) tmp_file_path = os.path.join(TEST_DATA_DIR, "test_round_trip_serialization.json.local") test_obj = DummyClass.from_details(field1=21, field2={"2.1": DummyClass2(field1=1)}) test_obj.dump_to_file(filename=tmp_file_path) test_obj_2 = JsonObject.read_from_file( filename=tmp_file_path, name_to_json_class_index_extra=json_class_index) assert test_obj.field1 == test_obj_2.field1 assert test_obj.__str__() == test_obj_2.__str__() assert isinstance(test_obj_2.field2["2.1"], DummyClass2) tmp_file_path = os.path.join(TEST_DATA_DIR, "test_round_trip_serialization.toml.local") test_obj = DummyClass.from_details(field1=21, field2={"2.1": DummyClass2(field1=1)}) test_obj.dump_to_file(filename=tmp_file_path) test_obj_2 = JsonObject.read_from_file( filename=tmp_file_path, name_to_json_class_index_extra=json_class_index) assert test_obj.field1 == test_obj_2.field1 assert test_obj.__str__() == test_obj_2.__str__()
def test_update_doc(self): doc = JsonObject() updated_doc = self.test_db.update_doc(doc.to_json_map()) logging.debug(updated_doc) updated_doc["xyz"] = "xyzvalue" updated_doc = self.test_db.update_doc(updated_doc) logging.debug(updated_doc) self.assertNotEqual(updated_doc, None) self.assertEqual("xyz" in updated_doc, True) updated_doc = self.test_db.find_by_id(updated_doc["_id"]) self.assertNotEqual(updated_doc, None)
def test_return_none_by_default(caplog): caplog.set_level(logging.DEBUG) tmp_file_path = os.path.join(TEST_DATA_DIR, "test_none_value.json") with pytest.raises(AttributeError): test_obj_2 = JsonObject.read_from_file( filename=tmp_file_path, name_to_json_class_index_extra=json_class_index, _default_to_none=False) assert test_obj_2.field2 == None test_obj_2 = JsonObject.read_from_file( filename=tmp_file_path, name_to_json_class_index_extra=json_class_index, _default_to_none=True) assert test_obj_2.field2 == None
def from_path(cls, path, db_interface): book_portion_dict = db_interface.find_one(find_filter={"path": path}) if book_portion_dict is None: return None else: book_portion = JsonObject.make_from_dict(book_portion_dict) return book_portion
def from_path(cls, path, my_collection): book_portion_dict = my_collection.find_one(find_filter={"path": path}) if book_portion_dict is None: return None else: book_portion = JsonObject.make_from_dict(book_portion_dict) return book_portion
def get_panchangam(city, year, script, fmt='hh:mm', compute_lagnams=False, precomputed_json_dir="~/Documents", ayanamsha_id=swe.SIDM_TRUE_CITRA, allow_precomputed=True): fname_det = os.path.expanduser('%s/%s-%s-detailed.json' % (precomputed_json_dir, city.name, year)) fname = os.path.expanduser('%s/%s-%s.json' % (precomputed_json_dir, city.name, year)) if os.path.isfile(fname) and not compute_lagnams and allow_precomputed: sys.stderr.write('Loaded pre-computed panchangam from %s.\n' % fname) return JsonObject.read_from_file(filename=fname) elif os.path.isfile(fname_det) and allow_precomputed: # Load pickle, do not compute! sys.stderr.write('Loaded pre-computed panchangam from %s.\n' % fname) return JsonObject.read_from_file(filename=fname_det) else: sys.stderr.write( 'No precomputed data available. Computing panchangam...\n') panchangam = periodical.Panchangam(city=city, start_date='%d-01-01' % year, end_date='%d-12-31' % year, script=script, fmt=fmt, compute_lagnams=compute_lagnams, ayanamsha_id=ayanamsha_id) panchangam.year = year sys.stderr.write('Writing computed panchangam to %s...\n' % fname) try: if compute_lagnams: panchangam.dump_to_file(filename=fname_det) else: panchangam.dump_to_file(filename=fname) except EnvironmentError: logging.warning("Not able to save.") logging.error(traceback.format_exc()) # Save without festival details # Festival data may be updated more frequently and a precomputed panchangam may go out of sync. Hence we keep this method separate. panchangam.update_festival_details() return panchangam
def is_user_admin(): user = JsonObject.make_from_dict(session.get('user', None)) logging.debug(session.get('user', None)) logging.debug(session) logging.debug(user) if user is None or not user.check_permission(service="users", action="admin"): return False else: return True
def get(self): """ Get current user details. PS: Login with <a href="v1/oauth_login/google" target="new">google oauth in a new tab</a>. """ session_user = JsonObject.make_from_dict(session.get('user', None)) if session_user is None: return {"message": "No user found, not authorized!"}, 401 else: return [session_user.to_json_map()], 200
def test_serialization_omit_nones(caplog): caplog.set_level(logging.DEBUG) tmp_file_path = os.path.join(TEST_DATA_DIR, "test_none_value.json") test_obj_2 = JsonObject.read_from_file( filename=tmp_file_path, name_to_json_class_index_extra=json_class_index, _default_to_none=True) test_obj_2.field3 = None test_obj_2_map = test_obj_2.to_json_map() assert "field3" not in test_obj_2_map
def legacy_dict_to_HinduCalendarEventOld_list(old_db_file, new_db_file): with open(old_db_file, 'r') as f: legacy_event_dict = json.load(f) old_style_events = [] for id, legacy_event in legacy_event_dict.items(): event_old_style = HinduCalendarEventOld.from_legacy_event( id, legacy_event) old_style_events.append(event_old_style) json_map_list = JsonObject.get_json_map_list(old_style_events) with open(new_db_file, 'w') as f: json.dump(json_map_list, f, indent=4, sort_keys=True)
def main(argv): def usage(): logging.info("run.py [--action dump]...") exit(1) params = JsonObject() try: opts, args = getopt.getopt(argv, "ha:", ["action="]) for opt, arg in opts: if opt == '-h': usage() elif opt in ("-a", "--action"): params.action = arg except getopt.GetoptError: usage() if params.action == "dump": dump_db() elif params.action == "import": import_db()
def test_find_one(self): doc = JsonObject() doc.xyz = "xyzvalue" updated_doc = self.test_db.update_doc(doc.to_json_map()) logging.debug(updated_doc) found_doc = self.test_db.find_one(find_filter={"xyz": "xyzvalue"}) self.assertTrue(JsonObject.make_from_dict(updated_doc).equals_ignore_id(JsonObject.make_from_dict(found_doc)))
def test_panchanga_chennai_2019(): panchangam_2019 = JsonObject.read_from_file( filename=os.path.join(TEST_DATA_PATH, 'Chennai-2019.json')) panchangam_2019.update_festival_details() orig_ics_file = os.path.join(TEST_DATA_PATH, 'Chennai-2019-devanagari.ics') current_ics_output = os.path.join(TEST_DATA_PATH, 'Chennai-2019-devanagari.ics.local') ics_calendar = compute_calendar(panchangam_2019, all_tags=True) write_to_file(ics_calendar, current_ics_output) with open(orig_ics_file) as orig_tex: with open(current_ics_output) as current_tex: assert orig_tex.read() == current_tex.read()
def test_panchanga_chennai(): panchangam_expected_chennai_18 = JsonObject.read_from_file( filename=os.path.join(TEST_DATA_PATH, 'Chennai-2018.json')) city = City('Chennai', "13:05:24", "80:16:12", "Asia/Calcutta") panchangam = jyotisha.panchangam.spatio_temporal.annual.Panchangam( city=city, year=2018, script=sanscript.DEVANAGARI, ayanamsha_id=swe.SIDM_LAHIRI, compute_lagnams=False) if str(panchangam) != str(panchangam_expected_chennai_18): panchangam.dump_to_file(filename=os.path.join( TEST_DATA_PATH, 'Chennai-2018-actual.json.local')) assert str(panchangam) == str(panchangam_expected_chennai_18)
def delete(self, id): """Delete a user. PS: Login with <a href="v1/oauth_login/google" target="new">google oauth in a new tab</a>. """ matching_user = get_db().find_by_id(id=id) if matching_user is None: return {"message": "User not found!"}, 404 session_user = JsonObject.make_from_dict(session.get('user', None)) logging.info(str(request.json)) if not is_user_admin() and (session_user is None or session_user._id != matching_user._id): return {"message": "Unauthorized!"}, 401 matching_user.delete_in_collection(db_interface=get_db()) return {}, 200
def post(self, id): """Modify a user. PS: Login with <a href="v1/oauth_login/google" target="new">google oauth in a new tab</a>. """ matching_user = get_db().find_by_id(id=id) if matching_user is None: return {"message": "User not found!"}, 404 session_user = JsonObject.make_from_dict(session.get('user', None)) logging.info(str(request.json)) if not is_user_admin() and (session_user is None or session_user._id != matching_user._id): return {"message": "Unauthorized!"}, 401 user = common_data_containers.JsonObject.make_from_dict(request.json) if not isinstance(user, User): return { "message": "Input JSON object does not conform to User.schema: " + User.schema }, 417 # Check to see if there are other entries in the database with identical authentication info. matching_users = get_db().get_matching_users_by_auth_infos(user=user) if len(matching_users) > 1: logging.warning(str(matching_users)) return { "message": "Another object with matching info already exists. Please delete it first.", "another_matching_user": str(matching_users) }, 409 try: user.update_collection(db_interface=get_db()) except ValidationError as e: import traceback message = { "message": "Some input object does not fit the schema.", "exception_dump": (traceback.format_exc()) } return message, 417 return user.to_json_map(), 200
def get(self, id): """Just get the user info. PS: Login with <a href="v1/oauth_login/google" target="new">google oauth in a new tab</a>. :param id: String :return: A User object. """ matching_user = get_db().find_by_id(id=id) if matching_user is None: return {"message": "User not found!"}, 404 session_user = JsonObject.make_from_dict(session.get('user', None)) if not is_user_admin() and (session_user is None or session_user._id != matching_user._id): return {"message": "User is not an admin!"}, 401 return matching_user, 200
def test_panchanga_chennai_18(): panchangam_expected_chennai_18 = JsonObject.read_from_file( filename=os.path.join(TEST_DATA_PATH, 'Chennai-2018.json')) city = City('Chennai', "13:05:24", "80:16:12", "Asia/Calcutta") panchangam = annual.Panchangam(city=city, year=2018, script=sanscript.DEVANAGARI, ayanamsha_id=swe.SIDM_LAHIRI, compute_lagnams=False) if panchangam.__str__(floating_point_precision=4 ) != panchangam_expected_chennai_18.__str__( floating_point_precision=4): panchangam.dump_to_file(filename=os.path.join( TEST_DATA_PATH, 'Chennai-2018-actual.json.local'), floating_point_precision=4) panchangam_expected_chennai_18.dump_to_file(filename=os.path.join( TEST_DATA_PATH, 'Chennai-2018-expected.json.local'), floating_point_precision=4) assert str(panchangam) == str(panchangam_expected_chennai_18)
def test_panchanga_chennai_2018(): panchangam_2018 = JsonObject.read_from_file( filename=os.path.join(TEST_DATA_PATH, 'Chennai-2018.json')) panchangam_2018.update_festival_details() orig_tex_file = os.path.join(TEST_DATA_PATH, 'daily-cal-2018-Chennai-deva.tex') daily_template_file = open( os.path.join( CODE_ROOT, 'jyotisha/panchangam/data/templates/daily_cal_template.tex')) current_tex_output = os.path.join(TEST_DATA_PATH, 'daily-cal-2018-Chennai-deva.tex.local') writeDailyTeX(panchangam_2018, daily_template_file, compute_lagnams=False, output_stream=open(current_tex_output, 'w')) with open(orig_tex_file) as orig_tex: with open(current_tex_output) as current_tex: assert orig_tex.read() == current_tex.read()
def json_compare(actual_object, expected_content_path): if not os.path.exists(expected_content_path): logging.warning("File must have been deliberately deleted as obsolete. So, will dump a new file for future tests.") actual_object.dump_to_file(filename=expected_content_path, floating_point_precision=4) return expected = JsonObject.read_from_file(filename=expected_content_path) try: # The below would be actually slower (1min+), and leads to bug output dump in case of failure. # assert str_actual == str_expected # The below is better, but still slower (35s and leads to bug output dump in case of failure. # assert actual == expected # The below is faster - 20s and produces concise difference. collection_helper.assert_approx_equals(x=actual_object, y=expected, floating_point_precision=4) except: # firefox does not identify files not ending with .json as json. Hence not naming .json.local. actual_content_path = expected_content_path.replace(".json", "_actual.local.json") actual_object.dump_to_file(filename=actual_content_path, floating_point_precision=4) traceback.print_exc() raise
def test_panchanga_orinda(): panchangam_expected_orinda_19 = JsonObject.read_from_file( filename=os.path.join(TEST_DATA_PATH, 'Orinda-2019.json')) city = City('Orinda', '37:51:38', '-122:10:59', 'America/Los_Angeles') panchangam = annual.Panchangam(city=city, year=2019, script=sanscript.DEVANAGARI, ayanamsha_id=swe.SIDM_LAHIRI, compute_lagnams=False) if panchangam.__str__(floating_point_precision=4 ) != panchangam_expected_orinda_19.__str__( floating_point_precision=4): panchangam.dump_to_file(filename=os.path.join( TEST_DATA_PATH, 'Orinda-2019-actual.json.local'), floating_point_precision=4) panchangam_expected_orinda_19.dump_to_file(filename=os.path.join( TEST_DATA_PATH, 'Orinda-2019-expected.json.local'), floating_point_precision=4) assert panchangam.__str__( floating_point_precision=4) == panchangam_expected_orinda_19.__str__( floating_point_precision=4)
def post(self, id): """Modify a user. PS: Login with <a href="v1/oauth_login/google" target="new">google oauth in a new tab</a>. """ matching_user = get_db().find_by_id(id=id) if matching_user is None: return {"message": "User not found!"}, 404 session_user = JsonObject.make_from_dict(session.get('user', None)) logging.info(str(request.json)) if not is_user_admin() and (session_user is None or session_user._id != matching_user._id): return {"message": "Unauthorized!"}, 401 user = common_data_containers.JsonObject.make_from_dict(request.json) if not isinstance(user, User): return {"message": "Input JSON object does not conform to User.schema: " + User.schema}, 417 # Check to see if there are other entries in the database with identical authentication info. matching_users = get_db().get_matching_users_by_auth_infos(user=user) if len(matching_users) > 1: logging.warning(str(matching_users)) return {"message": "Another object with matching info already exists. Please delete it first.", "another_matching_user": str(matching_users) }, 409 try: user.update_collection(db_interface=get_db()) except ValidationError as e: import traceback message = { "message": "Some input object does not fit the schema.", "exception_dump": (traceback.format_exc()) } return message, 417 return user.to_json_map(), 200
def test_panchanga_chennai_19(): panchangam_expected_chennai_19 = JsonObject.read_from_file( filename=os.path.join(TEST_DATA_PATH, 'Chennai-2019.json')) panchangam_expected_chennai_19.update_festival_details() city = City('Chennai', "13:05:24", "80:16:12", "Asia/Calcutta") panchangam = annual.get_panchangam(city=city, year=2019, script=sanscript.DEVANAGARI, ayanamsha_id=swe.SIDM_TRUE_CITRA, compute_lagnams=False, allow_precomputed=False) if panchangam.__str__(floating_point_precision=4 ) != panchangam_expected_chennai_19.__str__( floating_point_precision=4): panchangam.dump_to_file(filename=os.path.join( TEST_DATA_PATH, 'Chennai-2019-actual.json.local'), floating_point_precision=4) panchangam_expected_chennai_19.dump_to_file(filename=os.path.join( TEST_DATA_PATH, 'Chennai-2019-expected.json.local'), floating_point_precision=4) assert panchangam.__str__( floating_point_precision=4) == panchangam_expected_chennai_19.__str__( floating_point_precision=4)
from sanskrit_data.schema.common import JsonObject # Add parent directory to PYTHONPATH, so that vedavaapi_py_api module can be found. sys.path.append(os.path.dirname(os.path.dirname(__file__))) from vedavaapi import common, ullekhanam from sanskrit_data import file_helper from vedavaapi.common.flask_helper import app logging.basicConfig( level=logging.INFO, format="%(levelname)s: %(asctime)s {%(filename)s:%(lineno)d}: %(message)s " ) params = JsonObject() params.set_from_dict({ 'debug': False, 'port': 9000, 'reset': False }) def start_service(name): logging.info("Starting vedavaapi.{} service ...".format(name)) svc_cls = "Vedavaapi" + str.capitalize(name) _tmp = __import__('vedavaapi.{}'.format(name), globals(), locals(), [svc_cls]) svc_cls = eval('_tmp.'+svc_cls) svc_conf = common.server_config[name] if name in common.server_config else {} svc_obj = svc_cls(name, svc_conf) common.VedavaapiServices.register(name, svc_obj)
def find(self, find_filter): for index, key in enumerate(self.db): if JsonObject.make_from_dict( self.db[key]).match_filter(find_filter=find_filter): yield self.db[key]
def get_user(): from flask import session from sanskrit_data.schema.common import JsonObject return JsonObject.make_from_dict(session.get('user', None))
def get_user(): from flask import session return JsonObject.make_from_dict(session.get('user', None))
from sanskrit_data.schema.common import JsonObject # Add parent directory to PYTHONPATH, so that vedavaapi_py_api module can be found. sys.path.append(os.path.dirname(os.path.dirname(__file__))) print(sys.path) from vedavaapi_py_api import common, textract, ullekhanam from sanskrit_data import file_helper from vedavaapi_py_api.common.flask_helper import app logging.basicConfig( level=logging.DEBUG, format="%(levelname)s: %(asctime)s {%(filename)s:%(lineno)d}: %(message)s " ) params = JsonObject() params.set_from_dict({ 'debug': False, 'port': 9000, }) def setup_app(): common.set_configuration(config_file_name=os.path.join( os.path.dirname(__file__), 'server_config_local.json')) server_config = common.server_config client = None if server_config["db"]["db_type"] == "couchdb": from sanskrit_data.db.implementations import couchdb
def main(argv): parms = DotDict({ 'reset': False, 'dbgFlag': True, 'server_baseurl': '', 'auth': DotDict({ 'user': '******', 'passwd': '@utoDump1' }), 'repo_id': 'vedavaapi_test' }) try: opts, args = getopt.getopt(argv, "hru:i:s:", ["url="]) except getopt.GetoptError: logging.info("Error in command line: ", getopt.GetoptError) usage() for opt, arg in opts: if opt == '-h': usage() elif opt in ("-r", "--reset"): parms.reset = True elif opt in ("-u", "--auth"): parms.auth = DotDict(dict(zip(('user', 'passwd'), arg.split(':')))) print(parms.auth) elif opt in ("-i", "--repo_id"): parms.repo_id = arg elif opt in ("-s", "--serverurl"): logging.info("server url = ", arg) parms.server_baseurl = arg else: logging.info("Unknown parameter: ", opt) usage() if not parms.server_baseurl: logging.info("Error: Supply server URL via -s.") usage() if not args: logging.info("Error: Missing book path to import...") usage() vvclient = VedavaapiClient(parms.server_baseurl, parms.repo_id) if not vvclient.authenticate(parms.auth): sys.exit(1) for path in args: for book in import_books(path): pages = [] for page in book.children: pages.append( ('in_files', open(os.path.join(book.content.path, page.content.path), 'rb'))) #print_dict(book.content.to_json_map()) r = vvclient.post( "ullekhanam/v1/books", parms={'book_json': json.dumps(book.content.to_json_map())}, files=pages) if not r: sys.exit(1) book_json = json.loads(r.text) book = JsonObject.make_from_dict(book_json["content"]) #print_dict(book_json['content']) url = "ullekhanam/v1/entities/{}".format(book._id) r = vvclient.get(url, {'depth': 1}) if not r: logging.error("Error: invoking {}".format(url)) continue book_info = r.json() for p in book_info['children']: logging.info("page id " + p['content']['title']) page_id = p['content']['_id'] url = "ullekhanam/v1/pages/{}/annotations".format(page_id) r = vvclient.get(url) if not r: sys.exit(1) print_dict(r.json())
def main(argv): Parms = DotDict({ 'reset' : False, 'dbgFlag' : True, 'server_baseurl' : '', 'auth' : DotDict({'user' : 'vedavaapiAdmin', 'passwd' : '@utoDump1'}), 'dbname' : 'ullekhanam_test' }) try: opts, args = getopt.getopt(argv, "hru:d:s:", ["url="]) except getopt.GetoptError: logging.info("Error in command line: ", getopt.GetoptError) usage() for opt, arg in opts: if opt == '-h': usage() elif opt in ("-r", "--reset"): Parms.reset = True elif opt in ("-u", "--auth"): Parms.auth = DotDict(dict(zip(('user', 'passwd'), arg.split(':')))) print Parms.auth elif opt in ("-d", "--db"): Parms.dbname = arg elif opt in ("-s", "--serverurl"): logging.info("server url = ", arg) Parms.server_baseurl = arg else: logging.info("Unknown parameter: ", opt) usage() if not Parms.server_baseurl: logging.info("Error: Supply server URL via -s.") usage() if not args: logging.info("Error: Missing book path to import...") usage() vvclient = VedavaapiClient(Parms.server_baseurl) if not vvclient.authenticate(Parms.auth): sys.exit(1) for path in args: for book in import_books(path): pages = [] for page in book.children: pages.append(('in_files', \ open(os.path.join(book.content.path, page.content.path), 'rb'))) #print_dict(book.content.to_json_map()) r = vvclient.post("ullekhanam/v1/dbs/{}/books".format(Parms.dbname), parms={'book_json' : json.dumps(book.content.to_json_map())}, files=pages) if not r: sys.exit(1) book_json = json.loads(r.text) book = JsonObject.make_from_dict(book_json["content"]) #print_dict(book_json['content']) url = "ullekhanam/v1/dbs/{}/entities/{}".format(Parms.dbname, book._id) r = vvclient.get(url, {'depth' : 1}) if not r: logging.error("Error: invoking {}".format(url)) continue book_info = r.json() for p in book_info['children']: logging.info("page id " + p['content']['title']) page_id = p['content']['_id'] url = "ullekhanam/v1/dbs/{}/pages/{}/annotations".format(Parms.dbname, page_id) r = vvclient.get(url) if not r: sys.exit(1) print_dict(r.json())