def test_extract_index_meta(self): self.maxDiff = None dt = met.get_datetime_now() base_meta = { "created_at": dt, "model": "docfreq", "series": "pga-2018", "uuid": "12345678-9abc-def0-1234-56789abcdef0", "version": [1, 0, 2], "parent": "f64bacd4-67fb-4c64-8382-399a8e7db52a", "dependencies": ["1e3da42a-28b6-4b33-94a2-a5671f4102f4"], "description": "model_description", "license": "MIT", } extra_meta = { "code": "model_code %s", "datasets": [["any", "https://any"]], "description": "override", "references": [["any", "ref"]], "tags": ["one", "two"], "extra": { "feature": "value" }, } def route(url): self.assertEqual("https://xxx", url) return b"content" met.requests = FakeRequests(route) model_meta = met.extract_model_meta(base_meta, extra_meta, "https://xxx") self.assertIsInstance(model_meta, dict) self.assertGreater(len(met.format_datetime(dt)), 0) self.assertDictEqual( model_meta, { "default": { "default": "12345678-9abc-def0-1234-56789abcdef0", "description": "model_description", "code": "model_code %s" }, "model": { "created_at": met.format_datetime(dt), "code": "model_code %s", "description": "model_description", "dependencies": ["1e3da42a-28b6-4b33-94a2-a5671f4102f4"], "license": "MIT", "parent": "f64bacd4-67fb-4c64-8382-399a8e7db52a", "datasets": [["any", "https://any"]], "references": [["any", "ref"]], "size": "7 Bytes", "series": "pga-2018", "url": "https://xxx", "tags": ["one", "two"], "version": [1, 0, 2], "extra": { "feature": "value" } } })
def _write_tree(self, tree: dict, output: Union[str, BinaryIO], file_mode: int = 0o666) -> None: """ Write the model to disk. :param tree: The data dict - will be the ASDF tree. :param output: The output file path or a file object. :param file_mode: The output file's permissions. :return: None """ self.meta["created_at"] = get_datetime_now() meta = self.meta.copy() meta["environment"] = collect_environment() final_tree = {} final_tree.update(tree) final_tree["meta"] = meta isfileobj = not isinstance(output, str) if not isfileobj: self._source = output path = output output = open(output, "wb") os.chmod(path, file_mode) pos = 0 else: pos = output.tell() try: with asdf.AsdfFile(final_tree) as file: queue = [("", tree)] while queue: path, element = queue.pop() if isinstance(element, dict): for key, val in element.items(): queue.append((path + "/" + key, val)) elif isinstance(element, (list, tuple)): for child in element: queue.append((path, child)) elif isinstance(element, numpy.ndarray): path += "/" if path not in self._compression_prefixes: self._log.debug("%s -> %s compression", path, self.ARRAY_COMPRESSION) file.set_array_compression(element, self.ARRAY_COMPRESSION) else: self._log.debug("%s -> compression disabled", path) file.write_to(output) self._size = output.seek(0, os.SEEK_END) - pos finally: if not isfileobj: output.close()
def get_timezone() -> Tuple[datetime.tzinfo, str]: """Discover the current time zone and it's standard string representation (for source{d}).""" dt = get_datetime_now().astimezone() tzstr = dt.strftime("%z") tzstr = tzstr[:-2] + ":" + tzstr[-2:] return dt.tzinfo, tzstr
from modelforge.meta import get_datetime_now logs_are_structured = False def get_timezone() -> Tuple[datetime.tzinfo, str]: """Discover the current time zone and it's standard string representation (for source{d}).""" dt = get_datetime_now().astimezone() tzstr = dt.strftime("%z") tzstr = tzstr[:-2] + ":" + tzstr[-2:] return dt.tzinfo, tzstr timezone, tzstr = get_timezone() _now = get_datetime_now() if _now.month == 12: _fest = "🎅" elif _now.month == 10 and _now.day > (31 - 7): _fest = "🎃" else: _fest = "" del _now def format_datetime(dt: datetime.datetime): """Represent the date and time in source{d} format.""" return dt.strftime("%Y-%m-%dT%k:%M:%S.%f000") + tzstr def reduce_thread_id(thread_id: int) -> str: