def _check_for_update( cls, map_file: Path, entities: dict, metadata_hash: str ) -> Tuple[bool, Sequence[str]]: if not map_file.is_file(): return True, [] files = [] try: map_data = json.loads(map_file.read_text()) files = map_data.get("files", []) for file in files: if not Path(file).is_file(): return True, files new_times = { item.id: cls._get_last_update_time(item).replace(tzinfo=timezone.utc) for item in chain.from_iterable(entities.values()) } old_times = map_data.get("entities", {}) if set(new_times.keys()) != set(old_times.keys()): return True, files for id_, new_timestamp in new_times.items(): if new_timestamp != old_times[id_]: return True, files if metadata_hash != map_data.get("metadata_hash", ""): return True, files except Exception as ex: print("Error reading map file. " + str(ex)) return True, files return False, files
def update_call_data(call, req): """ Use request payload/form to fill call data or batched data """ if req.content_type == "application/json-lines": items = [] for i, line in enumerate(req.data.splitlines()): try: event = json.loads(line) if not isinstance(event, dict): raise BadRequest( f"json lines must contain objects, found: {type(event).__name__}" ) items.append(event) except ValueError as e: msg = f"{e} in batch item #{i}" req.on_json_loading_failed(msg) call.batched_data = items else: json_body = req.get_json(force=True, silent=False) if req.data else None # merge form and args form = req.form.copy() form.update(req.args) form = form.to_dict() # convert string numbers to floats for key in form: if form[key].replace(".", "", 1).isdigit(): if "." in form[key]: form[key] = float(form[key]) else: form[key] = int(form[key]) elif form[key].lower() == "true": form[key] = True elif form[key].lower() == "false": form[key] = False call.data = json_body or form or {}
def get_user_preferences(call: APICall, company_id): user_id = call.identity.user preferences = get_user(call, company_id, user_id, only=["preferences"]).get("preferences") if preferences and isinstance(preferences, str): preferences = loads(preferences) return preferences or {}
def get_tags(self, company, include_system: bool = False, filter_: Sequence[str] = None) -> dict: """ Get tags and optionally system tags for the company Return the dictionary of tags per tags field name The function retrieves both cached values from Redis in one call and re calculates any of them if missing in Redis """ fields = [ self._tags_field, *([self._system_tags_field] if include_system else []), ] redis_keys = [ self._get_tags_cache_key(company, f, filter_) for f in fields ] cached = self.redis.mget(redis_keys) ret = {} for field, tag_data, key in zip(fields, cached, redis_keys): if tag_data is not None: tags = json.loads(tag_data) else: tags = list(self._get_tags_from_db(company, field, filter_)) self.redis.setex( key, time=self._tags_cache_expiration_seconds, value=json.dumps(tags), ) ret[field] = tags return ret
def _import_events(cls, f: BinaryIO, full_name: str, company_id: str, _): _, _, task_id = full_name[0 : -len(cls.events_file_suffix)].rpartition("_") print(f"Writing events for task {task_id} into database") for events_chunk in chunked_iter(cls.json_lines(f), 1000): events = [json.loads(item) for item in events_chunk] cls.event_bll.add_events( company_id, events=events, worker="", allow_locked_tasks=True )
def import_from_zip( cls, filename: str, artifacts_path: str, company_id: Optional[str] = None, user_id: str = "", user_name: str = "", ): metadata = None with ZipFile(filename) as zfile: try: with zfile.open(cls.metadata_filename) as f: metadata = json.loads(f.read()) meta_public = metadata.get("public") if company_id is None and meta_public is not None: company_id = "" if meta_public else get_default_company() if not user_id: meta_user_id = metadata.get("user_id", "") meta_user_name = metadata.get("user_name", "") user_id, user_name = meta_user_id, meta_user_name except Exception: pass if not user_id: user_id, user_name = "__allegroai__", "Allegro.ai" # Make sure we won't end up with an invalid company ID if company_id is None: company_id = "" # Always use a public user for pre-populated data user_id = _ensure_backend_user( user_id=user_id, user_name=user_name, company_id="", ) cls._import(zfile, company_id, user_id, metadata) if artifacts_path and os.path.isdir(artifacts_path): artifacts_file = Path(filename).with_suffix(cls.artifacts_ext) if artifacts_file.is_file(): print(f"Unzipping artifacts into {artifacts_path}") with ZipFile(artifacts_file) as zfile: zfile.extractall(artifacts_path)
def from_json(cls: Type[ModelBase], s): return cls(**loads(s))
def from_json(cls, s): return cls(**loads(s))
def get_user_preferences(call): user_id = call.identity.user preferences = get_user(call, user_id, ["preferences"]).get("preferences") if preferences and isinstance(preferences, str): preferences = loads(preferences) return preferences or {}