def test_strict_mode(): p = related.to_model(Person, dict(first_name="Grace", last_name="Hopper")) assert p.first_name == "Grace" try: d = dict(first_name="Grace", last_name="Hopper", extra="Failure!") related.to_model(Person, d) assert False, "Did not fail." except ValueError as e: assert e, "Error as expected."
def _upgrade_2019_04_11_specification(specification): old_specification = DashboardSpecification_20190411( specification).serialize() upgraded_specification = related.to_model(DashboardSpecification_20190524, old_specification) upgraded_specification.version = VERSION_2019_05_24 return related.to_dict(upgraded_specification)
def test_store_data_from_json(): original_json = open(JSON_FILE).read().strip() json_dict = from_json(original_json) store_data = to_model(StoreData, json_dict) assert store_data.name == "Acme store" assert store_data.id == 982 assert store_data.data_from == datetime(2017, 12, 18, 0, 0) assert store_data.data_to == datetime(2017, 12, 19, 23, 59, 59) assert len(store_data.days) == 2 assert isinstance(store_data.days[0].open_at, time) assert store_data.days[0].date == datetime(2017, 12, 18).date() assert store_data.days[0].open_at == time(8, 0, 0) assert store_data.days[0].closed_on == time(19, 0, 0) assert store_data.days[0].customers == int(487) assert store_data.days[0].day_type == DayType.NORMAL assert store_data.days[0].sales == float(27223.65) assert store_data.days[1].date == datetime(2017, 12, 19).date() assert store_data.days[1].open_at == time(10, 30, 0) assert store_data.days[1].closed_on == time(17, 30, 0) assert store_data.days[1].customers == int(192) assert store_data.days[1].day_type == DayType.HOLIDAY assert store_data.days[1].sales is None generated_json = to_json(store_data, suppress_empty_values=True, suppress_map_key_values=True).replace(" ", "") print(original_json) print(generated_json) assert original_json.replace(" ", "") == generated_json
def get(cls, api, date_from, date_to): if not date_from: date_from = datetime.date.today() if not date_to: date_to = date_from data = { "DataPoczatkowa": date_from.strftime("%Y-%m-%d"), "DataKoncowa": date_to.strftime("%Y-%m-%d"), } j = api.post("Uczen/Frekwencje", json=data) attendances = sort_and_filter_dates( j.get("Data", {}).get("Frekwencje", []), date_from, date_to, sort_key="Numer", date_key="DzienTekst", ) for attendance in attendances: attendance["subject"] = api.dict.get_subject_json( attendance["IdPrzedmiot"]) attendance["category"] = api.dict.get_attendance_category_json( attendance["IdKategoria"]) attendance["time"] = api.dict.get_lesson_time_json( attendance["IdPoraLekcji"]) yield to_model(cls, attendance)
def lesson_times(self): """List of lesson times :rtype: List[:class:`~vulcan._lesson.LessonTime`] """ return list( map(lambda j: to_model(LessonTime, j), self.lesson_times_json))
def get(cls, api, date_from, date_to): if not date_from: date_from = datetime.date.today() if not date_to: date_to = date_from data = { "DataPoczatkowa": date_from.strftime("%Y-%m-%d"), "DataKoncowa": date_to.strftime("%Y-%m-%d"), } j = api.post("Uczen/PlanLekcjiZeZmianami", json=data) lessons = sort_and_filter_dates( j.get("Data", []), date_from, date_to, sort_key="NumerLekcji", date_key="DzienTekst", ) for lesson in lessons: lesson["time"] = api.dict.get_lesson_time_json(lesson["IdPoraLekcji"]) lesson["teacher"] = api.dict.get_teacher_json(lesson["IdPracownik"]) lesson["subject"] = api.dict.get_subject_json(lesson["IdPrzedmiot"]) yield to_model(cls, lesson)
def _upgrade_2019_09_05_specification(specification): from flask_potion import fields from web.server.api.aqt.api_models import GranularityResource granularity_converter = fields.ToOne(GranularityResource).converter for query in list(specification['queries'].values()): is_advanced_query_item = query['isAdvancedQueryItem'] settings = specification['settings'][query['settingId']] grouping_settings_map = settings['groupBySettings']['groupings'] if is_advanced_query_item: # find the time granularity grouping (if there is one). There # can only be up to one. granularity = None for aqt_group in query['advancedGroups']: if aqt_group['type'] == 'GRANULARITY': granularity = granularity_converter(aqt_group['item']) break if granularity: for group_settings in list(grouping_settings_map.values()): # check if a group setting has an invalid displayValueFormat if (group_settings['id'] == 'timestamp' and group_settings['displayValueFormat'] == 'DEFAULT'): group_settings['displayValueFormat'] = granularity.id break upgraded_specification = related.to_model(DashboardSpecification_20190911, specification) upgraded_specification.version = VERSION_2019_09_11 return related.to_dict(upgraded_specification)
def get(cls, api): j = api.post("Uczen/Slowniki") data = j.get("Data") for i, teacher in enumerate(data["Pracownicy"]): data["Pracownicy"][i]["Nazwa"] = "{} {}".format( teacher["Imie"], teacher["Nazwisko"]) return to_model(cls, data)
def read_config_file(json_config_file_path): metric_configs = list() exit_code = 0 try: logging.info(f"Reading configuration file: {json_config_file_path}") with open(json_config_file_path) as json_config_file: raw_metric_configs = json.loads(json_config_file.read()) if not isinstance(raw_metric_configs, list): raise ValueError() for raw_metric_config in raw_metric_configs: try: metric_config = related.to_model(MetricConfig, raw_metric_config) metric_configs.append(metric_config) except Exception as e: logging.exception( f"Exception {e.__class__.__name__} while reading config file '{e}'! Skipping!" ) except Exception as e: logging.exception( f"Exception {e.__class__.__name__} while reading config file '{json_config_file_path}'! Skipping!" ) trace = traceback.format_exc() logging.debug(f"Traceback: {trace}") exit_code = 1 return (metric_configs, exit_code)
def test_renamed(): obj = MyModel( is_for="Elise", criss="A", cross="B", is_not=True, is_list=["a", "b", "c"], is_dict={5: MyChild(my_int=5, my_uuid=EXAMPLE_UUID, my_float=3.14)}, is_type=DataType.OBJECT, is_enum=IntEnum.a, ) d = related.to_dict(obj) assert related.to_model(MyModel, d) == obj d.pop("dict") assert d == { "for": "Elise", "criss": "B", "cross": "A", "not": True, 'list': ['a', 'b', 'c'], "type": "object", "enum": 1 }
def loads(cls, content, file_path=None): """Load JSON string into a Config object.""" vals = related.from_yaml(content, file_path=file_path, object_pairs_hook=dict) # environment namespace (RIGOR_) env_ns = Namespace(env={ k[6:]: v for k, v in os.environ.items() if k.startswith("RIGOR_") }) # pop profiles and file_path from root config profiles = vals.pop("profiles", {}) file_path = vals.pop("file_path") # iterate and construct profile sub-dictionaries with root info for name, profile in profiles.items(): from_root_profile = copy.deepcopy(vals) profile = utils.nested_update(from_root_profile, profile) eval_update_ns(profile, env_ns) profiles[name] = profile # construct root config profile vals["name"] = "__root__" vals["file_path"] = file_path vals["profiles"] = profiles eval_update_ns(vals, env_ns) return related.to_model(cls, vals)
def gather_schemas(cls, suite): from . import utils schemas = [] for name, path in suite.schemas.items(): json = utils.download_json_with_headers(suite, path) schema = related.to_model(cls, json) schemas.append(schema) return schemas
def grade_categories(self): """List of grade categories :rtype: List[:class:`~vulcan._grade.GradeCategory`] """ return list( map(lambda j: to_model(GradeCategory, j), self.grade_categories_json))
def _upgrade_2019_08_26_specification(specification): specification_keys_to_keep = [ 'options', 'dateRanges', 'filters', 'items', 'queries', 'settings', 'version', ] query_keys_to_move = [ 'type', 'customFields', 'filterModalSelections', 'frontendSelectionsFilter', 'isAdvancedQueryItem', 'settingId', ] # Any query object that is not linked to a layout item should be removed queries = {} for item_id in specification['items']: item = specification['items'][item_id] size = specification['sizes'][item['sizeId']] layout_metadata = { 'upperX': item['upperX'], 'upperY': item['upperY'], 'rows': size['rows'], 'columns': size['columns'], 'isLocked': item['isLocked'], } query_id = item['queryId'] new_item = { 'id': item_id, 'name': item['name'], 'layoutMetadata': layout_metadata, } query = specification['queries'][query_id] for key in query_keys_to_move: query[key] = item[key] query['itemId'] = item_id specification['items'][item_id] = new_item queries[query_id] = query new_specification = { key: specification[key] for key in specification_keys_to_keep } new_specification['queries'] = queries new_specification['text_elements'] = {} upgraded_specification = related.to_model(DashboardSpecification_20190905, new_specification) upgraded_specification.version = VERSION_2019_09_05 return related.to_dict(upgraded_specification)
def __init__(self, certificate): self._session = requests.session() if isinstance(certificate, Certificate): self.cert = certificate else: self.cert = to_model(Certificate, certificate) self.base_url = self.cert.base_url + "mobile-api/Uczen.v3." self.full_url = None self.dict = None self.student = None
def get(cls, api): j = api.post("Uczen/Oceny") for grade in j.get("Data", []): grade["teacher"] = api.dict.get_teacher_json(grade["IdPracownikD"]) grade["subject"] = api.dict.get_subject_json(grade["IdPrzedmiot"]) grade["category"] = api.dict.get_grade_category_json( grade["IdKategoria"]) yield to_model(cls, grade)
def send(cls, api, title, content, teachers): recipients = list() for teacher_repr in teachers: if isinstance(teacher_repr, int) or ( isinstance(teacher_repr, str) and teacher_repr.isnumeric() ): teacher = to_model( Teacher, api.dict.get_teacher_json(int(teacher_repr)) ) elif isinstance(teacher_repr, str): teacher = to_model( Teacher, api.dict.get_teacher_by_name_json(teacher_repr) ) elif isinstance(teacher_repr, dict): teacher = to_model(Teacher, teacher_repr) elif isinstance(teacher_repr, Teacher): teacher = teacher_repr else: continue recipients.append( { "LoginId": teacher.login_id, "Nazwa": teacher.name_reversed, } ) if len(recipients) == 0: raise ValueError("There must be at least 1 correct recipient.") data = { "NadawcaWiadomosci": api.student.account_name, "Tytul": title, "Tresc": content, "Adresaci": recipients, } log.info("Sending a message...") j = api.post("Uczen/DodajWiadomosc", json=data) log.info("Message sent successfully!") return j.get("Data", {}).get("WiadomoscId")
def get(cls, api): j = api.post("Uczen/UwagiUcznia") notices = sorted(j.get("Data", []), key=itemgetter("DataWpisu")) for notice in notices: notice["type"] = api.dict.get_notice_category_json( notice["IdKategoriaUwag"]) notice["teacher"] = api.dict.get_teacher_json( notice["IdPracownik"]) yield to_model(cls, notice)
def load(cls: Type[T], data) -> T: """Deserialize provided ``data`` into an instance of ``cls``. The ``data`` parameter may be: - a JSON string - a dictionary - a handle to a file containing a JSON string :param data: the data to deserialize """ if not data: return None if isinstance(data, dict): return to_model(cls, data) elif isinstance(data, io.IOBase): return to_model(cls, json.load(data)) elif isinstance(data, str): return to_model(cls, json.loads(data)) else: raise ValueError("Unknown data type")
def test_compose_from_yml(): original_yaml = open(YML_FILE).read().strip() yml_dict = from_yaml(original_yaml) compose = to_model(Compose, yml_dict) assert compose.version == '2' assert compose.services['web'].ports == ["5000:5000"] assert compose.services['redis'].image == "redis" generated_yaml = to_yaml(compose, suppress_empty_values=True, suppress_map_key_values=True).strip() assert original_yaml == generated_yaml
def add_query_to_custom_dashboard( raw_specification, view_type, query_selections, query_result_spec, is_advanced_query=False, ): # TODO(vedant) - We will need to actually have a merge function in the # server-side Dashboard Model. This is in the event that we end up # overwriting IDs in the input specification. Since ids are # a concatenation of UUID and type, it is EXTREMELY unlikely that we will # ever run into this scenario but we will need to eventually take it into # account. # First, create a brand new dashboard spec using only this query. # This will create any top-level dictionaries that we need (e.g. for settings, # sizes, items, etc.). From there we just merge these things into the # specification to update. converted_specification = convert_query_to_dashboard( view_type, query_selections, query_result_spec, is_advanced_query) # the input specification may have been from an older version, so we # need to make sure it's upgraded to the latest dashboard spec version raw_specification = upgrade_dashboard_specification(raw_specification) updated_specification = related.to_model(DashboardSpecification, raw_specification) # set the new coordinates for the newest item we just created (upper_x, upper_y) = get_coordinates_for_new_chart(updated_specification) new_item = list(converted_specification.items.values())[0] new_item.layout_metadata.upper_x = upper_x new_item.layout_metadata.upper_y = upper_y # get all the top-level dictionaries from the specification to update date_ranges = updated_specification.date_ranges items = updated_specification.items queries = updated_specification.queries settings = updated_specification.settings filters = updated_specification.filters # merge in all the top-level dictionaries from the specification we just # created using the query the user submitted items.update(converted_specification.items) queries.update(converted_specification.queries) settings.update(converted_specification.settings) date_ranges.update(converted_specification.date_ranges) filters.update(converted_specification.filters) return related.to_dict(updated_specification)
def check_validation(self, validator): actual, expect = validator.actual, validator.expect if self.is_feature_table(expect): expect = utils.parse_into_rows_of_dicts(expect) actual = Namespace.render(actual, self.namespace) expect = Namespace.render(expect, self.namespace) compare = Namespace.render(validator.compare, self.namespace) compare = related.to_model(enums.Comparison, compare) success = compare.evaluate(actual, expect) return ValidationResult( actual=actual, expect=expect, success=success, validator=validator )
def loads(cls, content, file_path=None): try: as_dict = related.from_yaml(content, file_path=file_path, object_pairs_hook=dict) scenarios = as_dict.get("scenarios", []) dir_path = os.path.dirname(file_path) as_dict['scenarios'] = cls.prep_scenarios(scenarios, dir_path) return related.to_model(Case, as_dict) except Exception as e: # raise e get_logger().error("Failed to Load Case", file_path=file_path, error=str(e)) return Case(file_path=file_path, is_valid=False, scenarios=[])
def get(cls, token, symbol, pin, name): token = str(token).upper() symbol = str(symbol).lower() pin = str(pin) firebase_token = get_firebase_token() data = { "PIN": pin, "TokenKey": token, "AppVersion": APP_VERSION, "DeviceId": uuid(), "DeviceName": name, "DeviceNameUser": "", "DeviceDescription": "", "DeviceSystemType": "Python", "DeviceSystemVersion": platform.python_version(), "RemoteMobileTimeKey": now() + 1, "TimeKey": now(), "RequestId": uuid(), "RemoteMobileAppVersion": APP_VERSION, "RemoteMobileAppName": APP_NAME, "FirebaseTokenKey": firebase_token, } headers = { "RequestMobileType": "RegisterDevice", "User-Agent": "MobileUserAgent", } base_url = get_base_url(token) url = "{}/{}/mobile-api/Uczen.v3.UczenStart/Certyfikat".format( base_url, symbol) log.info("Registering...") r = requests.post(url, json=data, headers=headers) j = r.json() log.debug(j) cert = j["TokenCert"] log.info("Registered successfully!") return to_model(cls, cert)
def remove_orphans_from_dashboard(raw_specification): '''Removes all the orphaned date ranges, queries, filters and size objects from a dashboard specification. Orphaned items are items that are not directly consumed by a layout item on the dashboard. Parameters ---------- raw_specification : dict The dashboard specification from which orphaned objects are to be removed. Returns ---------- DashboardSpecification The updated specification with all orphaned items removed. ''' new_specification = related.to_model(DashboardSpecification, raw_specification) queries = {} for query in new_specification.queries.values(): if query.layout_item_id in new_specification.items: queries[query.id] = query query_values = queries.values() new_specification.settings = _extract_values_in_use( new_specification.settings, query_values, attrgetter('setting_id')) new_specification.queries = queries # First remove all orphaned queries before removing orphaned date ranges. # That way any 'newly-orphaned' filters/date ranges will also be removed. new_specification.date_ranges = _extract_values_in_use( new_specification.date_ranges, query_values, attrgetter('date_range_id')) # Filters are slightly different from the other items since the filter ID # is nested inside the query, and each query can reference multiple filters. referenced_filters = set([ filter_id for query in new_specification.queries.values() for filter_id in query.magic_filter_ids ]) new_specification.filters = _extract_values_in_use( new_specification.filters, referenced_filters, lambda x: x) return new_specification
def _upgrade_2019_09_11_specification(specification): specification_keys_to_keep = [ 'options', 'dateRanges', 'filters', 'items', 'queries', 'settings', 'version', ] new_specification = { key: specification[key] for key in specification_keys_to_keep } new_specification['textItems'] = {} upgraded_specification = related.to_model(DashboardSpecification_20190918, new_specification) upgraded_specification.version = VERSION_2019_09_18 return related.to_dict(upgraded_specification)
def get(cls, api, date_from=None, date_to=None): if not date_from: date_from = api.student.period.from_ if not date_to: date_to = api.student.period.to data = { "DataPoczatkowa": date_from.strftime("%Y-%m-%d"), "DataKoncowa": date_to.strftime("%Y-%m-%d"), } j = api.post("Uczen/WiadomosciOdebrane", json=data) messages = j.get("Data", []) for message in messages: message["sender"] = api.dict.get_teacher_by_login_id_json( message["NadawcaId"] ) yield to_model(cls, message)
def get(cls, api, date_from, date_to): if not date_from: date_from = datetime.date.today() if not date_to: date_to = date_from data = { "DataPoczatkowa": date_from.strftime("%Y-%m-%d"), "DataKoncowa": date_to.strftime("%Y-%m-%d"), } j = api.post("Uczen/Sprawdziany", json=data) exams = sort_and_filter_dates(j.get("Data", []), date_from, date_to) for exam in exams: exam["teacher"] = api.dict.get_teacher_json(exam["IdPracownik"]) exam["subject"] = api.dict.get_subject_json(exam["IdPrzedmiot"]) yield to_model(cls, exam)
def _upgrade_2019_08_23_specification(specification): from config.ui import FILTER_ORDER, DEFAULT_DATE_PICKER_TYPE, DEFAULT_FILTER_OPTIONS old_options = specification['options'] new_options = { 'columnCount': old_options['columnCount'], 'title': old_options['title'], 'filterPanelSettings': { 'showDashboardFilterButton': old_options['showDashboardFilterButton'], 'datePickerType': DEFAULT_DATE_PICKER_TYPE, 'filterPanelComponents': DEFAULT_FILTER_OPTIONS, 'enabledFilters': FILTER_ORDER, }, } specification['options'] = new_options # NOTE(pablo): there was a bug in the 2019-05-24 upgrade where some default # grouping items were not created. This fixes it for any dashboards that # were already upgraded after 2019-06-10: for layout_item in list(specification['items'].values()): settings = specification['settings'][layout_item['settingId']] groupings = settings['groupBySettings']['groupings'] has_string_dimension = False for grouping_obj in groupings.values(): if grouping_obj['type'] == 'STRING': has_string_dimension = True break if not has_string_dimension: groupings['nation'] = { 'id': 'nation', 'type': 'STRING', 'displayValueFormat': 'DEFAULT', 'label': None, } upgraded_specification = related.to_model(DashboardSpecification_20190826, specification) upgraded_specification.version = VERSION_2019_08_26 return related.to_dict(upgraded_specification)
def from_config(cls, cfg): # TODO - create a nicer error message - see above # Verbose unrecognized field # for k in kwargs.keys(): # if k not in cls.REQ_FIELDS + cls.OPT_FIELDS: # raise ValueError("Unrecognized field in info: '{f}'. Avaiable fields are: {l}". # format(f=k, l=cls.REQ_FIELDS)) # # Verbose undefined field # undefined_set = set(cls.REQ_FIELDS) - kwargs.keys() # if undefined_set: # raise ValueError("The following arguments were not specified: {0}. Please specify them.". # format(undefined_set)) attrs = fields(cls) cls_keys = {a.metadata.get('key') or a.name for a in attrs} cfg_keys = set(cfg.keys()) extra_keys = cfg_keys - cls_keys if len(extra_keys) > 0: raise ValueError("Unrecognized fields: {0}. Available fields are {1}".format(extra_keys, cls_keys)) return related.to_model(cls, cfg)