def recompose_base(self, obj: dict) -> Base: """Steps through a base object dictionary and recomposes the base object Arguments: obj {dict} -- the dictionary representation of the object Returns: Base -- the base object with all its children attached """ # make sure an obj was passed and create dict if string was somehow passed if not obj: return if isinstance(obj, str): obj = safe_json_loads(obj) if "speckle_type" in obj and obj["speckle_type"] == "reference": obj = self.get_child(obj=obj) speckle_type = obj.get("speckle_type") # if speckle type is not in the object definition, it is treated as a dict if not speckle_type: return obj # get the registered type from base register. object_type = Base.get_registered_type(speckle_type) # initialise the base object using `speckle_type` fall back to base if needed base = object_type() if object_type else Base.of_type( speckle_type=speckle_type) # get total children count if "__closure" in obj: if not self.read_transport: raise SpeckleException( message= "Cannot resolve reference - no read transport is defined") closure = obj.pop("__closure") base.totalChildrenCount = len(closure) for prop, value in obj.items(): # 1. handle primitives (ints, floats, strings, and bools) or None if isinstance(value, PRIMITIVES) or value is None: base.__setattr__(prop, value) continue # 2. handle referenced child objects elif "referencedId" in value: ref_hash = value["referencedId"] ref_obj_str = self.read_transport.get_object(id=ref_hash) if not ref_obj_str: raise SpeckleException( f"Could not find the referenced child object of id `{ref_hash}` in the given read transport: {self.read_transport.name}" ) ref_obj = safe_json_loads(ref_obj_str, ref_hash) base.__setattr__(prop, self.recompose_base(obj=ref_obj)) # 3. handle all other cases (base objects, lists, and dicts) else: base.__setattr__(prop, self.handle_value(value)) return base
def __init__( self, stream_id: str, client: SpeckleClient = None, token: str = None, url: str = None, **data: Any, ) -> None: super().__init__(**data) # TODO: replace client with account or some other auth avenue if client is None and token is None and url is None: raise SpeckleException( "You must provide either a client or a token and url to construct a ServerTransport." ) if client: if not client.me: raise SpeckleException( "The provided SpeckleClient was not authenticated.") token = client.me["token"] url = client.url self.stream_id = stream_id self.url = url self._batch_sender = BatchSender(self.url, self.stream_id, token, max_batch_size_mb=1) self.session = requests.Session() self.session.headers.update({ "Authorization": f"Bearer {token}", "Accept": "text/plain" })
def get_local_accounts(base_path: str = None) -> List[Account]: """Gets all the accounts present in this environment Arguments: base_path {str} -- custom base path if you are not using the system default Returns: List[Account] -- list of all local accounts or an empty list if no accounts were found """ metrics.track(metrics.ACCOUNT_LIST) account_storage = SQLiteTransport(scope="Accounts", base_path=base_path) json_path = os.path.join(account_storage._base_path, "Accounts") os.makedirs(json_path, exist_ok=True) json_acct_files = [ file for file in os.listdir(json_path) if file.endswith(".json") ] accounts = [] res = account_storage.get_all_objects() if res: accounts.extend(Account.parse_raw(r[1]) for r in res) if json_acct_files: try: accounts.extend( Account.parse_file(os.path.join(json_path, json_file)) for json_file in json_acct_files) except Exception as ex: raise SpeckleException( "Invalid json accounts could not be read. Please fix or remove them.", ex, ) return accounts
async def check_wsclient_wrapper(self, *args, **kwargs): if self.client is None: raise SpeckleException( "You must authenticate before you can subscribe to events" ) else: return await function(self, *args, **kwargs)
def __init__(self, host: str = DEFAULT_HOST, use_ssl: bool = USE_SSL) -> None: ws_protocol = "ws" http_protocol = "http" if use_ssl: ws_protocol = "wss" http_protocol = "https" # sanitise host input by removing protocol and trailing slash host = re.sub(r"((^\w+:|^)\/\/)|(\/$)", "", host) self.url = f"{http_protocol}://{host}" self.graphql = self.url + "/graphql" self.ws_url = f"{ws_protocol}://{host}/graphql" self.me = None self.httpclient = Client(transport=RequestsHTTPTransport( url=self.graphql, verify=True, retries=3)) self.wsclient = None self._init_resources() # Check compatibility with the server try: serverInfo = self.server.get() if not isinstance(serverInfo, ServerInfo): raise Exception("Couldn't get ServerInfo") except Exception as ex: raise SpeckleException( f"{self.url} is not a compatible Speckle Server", ex)
def send( base: Base, transports: List[AbstractTransport] = [], use_default_cache: bool = True, ): """Sends an object via the provided transports. Defaults to the local cache. Arguments: obj {Base} -- the object you want to send transports {list} -- where you want to send them use_default_cache {bool} -- toggle for the default cache. If set to false, it will only send to the provided transports Returns: str -- the object id of the sent object """ metrics.track(metrics.SEND) if not transports and not use_default_cache: raise SpeckleException( message="You need to provide at least one transport: cannot send with an empty transport list and no default cache" ) if use_default_cache: transports.insert(0, SQLiteTransport()) serializer = BaseObjectSerializer(write_transports=transports) for t in transports: t.begin_write() hash, _ = serializer.write_json(base=base) for t in transports: t.end_write() return hash
def search(self, search_query: str, limit: int = 25) -> List[User]: """Searches for user by name or email. The search query must be at least 3 characters long Arguments: search_query {str} -- a string to search for limit {int} -- the maximum number of results to return Returns: List[User] -- a list of User objects that match the search query """ if len(search_query) < 3: return SpeckleException( message="User search query must be at least 3 characters") metrics.track(metrics.USER, self.account, {"name": "search"}) query = gql(""" query UserSearch($search_query: String!, $limit: Int!) { userSearch(query: $search_query, limit: $limit) { items { id name bio company avatar verified } } } """) params = {"search_query": search_query, "limit": limit} return self.make_request(query=query, params=params, return_type=["userSearch", "items"])
def make_request( self, query: gql, params: Dict = None, return_type: str or List = None, schema=None, parse_response: bool = True, ) -> Dict or GraphQLException: """Executes the GraphQL query""" try: response = self.client.execute(query, variable_values=params) except Exception as e: if isinstance(e, TransportQueryError): return GraphQLException( message= f"Failed to execute the GraphQL {self.name} request. Errors: {e.errors}", errors=e.errors, data=e.data, ) else: return SpeckleException( message= f"Failed to execute the GraphQL {self.name} request. Inner exception: {e}", exception=e, ) response = self._step_into_response(response=response, return_type=return_type) if parse_response: return self._parse_response(response=response, schema=schema) else: return response
def get_child(self, obj: Dict): ref_hash = obj["referencedId"] ref_obj_str = self.read_transport.get_object(id=ref_hash) if not ref_obj_str: raise SpeckleException( f"Could not find the referenced child object of id `{ref_hash}` in the given read transport: {self.read_transport.name}" ) return json.loads(ref_obj_str)
def get_encoding_from_units(unit: str): try: return UNITS_ENCODINGS[unit] except KeyError: raise SpeckleException( message= f"No encoding exists for unit {unit}. Please enter a valid unit to encode (eg {UNITS_ENCODINGS})." )
def get_units_from_encoding(unit: int): for name, encoding in UNITS_ENCODINGS.items(): if unit == encoding: return name raise SpeckleException( message= f"Could not understand what unit {unit} is referring to. Please enter a valid unit encoding (eg {UNITS_ENCODINGS})." )
def __getattr__(self, name): try: attr = getattr(resources, name) return attr.Resource(me=self.me, basepath=self.url, client=self.httpclient) except: raise SpeckleException( f"Method {name} is not supported by the SpeckleClient class")
def _bg_send_batch(self, session, batch): object_ids = [obj[0] for obj in batch] try: server_has_object = session.post( url=f"{self.server_url}/api/diff/{self.stream_id}", data={"objects": json.dumps(object_ids)}, ).json() except Exception as ex: raise SpeckleException( f"Invalid credentials - cannot send objects to server {self.server_url}" ) from ex new_object_ids = [x for x in object_ids if not server_has_object[x]] new_object_ids = set(new_object_ids) new_objects = [obj[1] for obj in batch if obj[0] in new_object_ids] if not new_objects: LOG.info( f"Uploading batch of {len(batch)} objects: all objects are already in the server" ) return upload_data = "[" + ",".join(new_objects) + "]" upload_data_gzip = gzip.compress(upload_data.encode()) LOG.info( "Uploading batch of %s objects (%s new): (size: %s, compressed size: %s)" % (len(batch), len(new_objects), len(upload_data), len(upload_data_gzip)) ) try: r = session.post( url=f"{self.server_url}/objects/{self.stream_id}", files={"batch-1": ("batch-1", upload_data_gzip, "application/gzip")}, ) if r.status_code != 201: LOG.warning("Upload server response: %s", r.text) raise SpeckleException( message=f"Could not save the object to the server - status code {r.status_code}" ) except json.JSONDecodeError as error: return SpeckleException( f"Failed to send objects to {self.server_url}. Please ensure this stream ({self.stream_id}) exists on this server and that you have permission to send to it.", error, )
def get_units_from_string(unit: str): unit = str.lower(unit) for name, alternates in UNITS_STRINGS.items(): if unit in alternates: return name raise SpeckleException( message= f"Could not understand what unit {unit} is referring to. Please enter a valid unit (eg {UNITS})." )
def get_object(self, id: str) -> str: # endpoint = f"{self.url}/objects/{self.stream_id}/{id}/single" # r = self.session.get(endpoint, stream=True) # _, obj = next(r.iter_lines().decode("utf-8")).split("\t") # return obj raise SpeckleException( "Getting a single object using `ServerTransport.get_object()` is not implemented. To get an object from the server, please use the `SpeckleClient.object.get()` route", NotImplementedError, )
def __init__(self, url: str) -> None: metrics.track("streamwrapper") self.stream_url = url parsed = urlparse(url) self.host = parsed.netloc self.use_ssl = parsed.scheme == "https" segments = parsed.path.strip("/").split("/") if not segments or len(segments) > 4 or len(segments) < 2: raise SpeckleException( f"Cannot parse {url} into a stream wrapper class - invalid URL provided." ) while segments: segment = segments.pop(0) if segments and segment.lower() == "streams": self.stream_id = segments.pop(0) elif segments and segment.lower() == "commits": self.commit_id = segments.pop(0) elif segments and segment.lower() == "branches": self.branch_name = unquote(segments.pop(0)) elif segments and segment.lower() == "objects": self.object_id = segments.pop(0) elif segment.lower() == "globals": self.branch_name = "globals" if segments: self.commit_id = segments.pop(0) else: raise SpeckleException( f"Cannot parse {url} into a stream wrapper class - invalid URL provided." ) if not self.stream_id: raise SpeckleException( f"Cannot parse {url} into a stream wrapper class - no stream id found." )
def from_objects(cls, objects: List[Base]) -> "ObjectArray": data_list = cls() if not objects: return data_list speckle_type = objects[0].speckle_type for obj in objects: if speckle_type != obj.speckle_type: raise SpeckleException( "All objects in chunk should have the same speckle_type. " f"Found {speckle_type} and {obj.speckle_type}") data_list.encode_object(object=obj) return data_list
def get_units_from_string(unit: str): if not isinstance(unit, str): warn( f"Invalid units: expected type str but received {type(unit)} ({unit}). Skipping - no units will be set.", SpeckleWarning, ) return unit = str.lower(unit) for name, alternates in UNITS_STRINGS.items(): if unit in alternates: return name raise SpeckleException( message= f"Could not understand what unit {unit} is referring to. Please enter a valid unit (eg {UNITS})." )
def _type_check(self, name: str, value: Any): """ Lightweight type checking of values before setting them NOTE: Does not check subscripted types within generics as the performance hit of checking each item within a given collection isn't worth it. Eg if you have a type Dict[str, float], we will only check if the value you're trying to set is a dict. """ types = getattr(self, "_attr_types", {}) t = types.get(name, None) if t is None: return value if value is None: return None if t.__module__ == "typing": origin = getattr(t, "__origin__") t = (tuple( getattr(sub_t, "__origin__", sub_t) for sub_t in t.__args__) if origin is Union else origin) if not isinstance(t, (type, tuple)): warn( f"Unrecognised type '{t}' provided for attribute '{name}'. Type will not been validated." ) return value if isinstance(value, t): return value # to be friendly, we'll parse ints and strs into floats, but not the other way around # (to avoid unexpected rounding) if isinstance(t, tuple): t = t[0] try: if t is float: return float(value) if t is str and value: return str(value) except ValueError: pass raise SpeckleException( f"Cannot set '{self.__class__.__name__}.{name}': it expects type '{t.__name__}', but received type '{type(value).__name__}'" )
def __dict_helper(self, obj: Any) -> Any: if not obj or isinstance(obj, PRIMITIVES): return obj if isinstance(obj, Base): return self.__dict_helper(obj.__dict__) if isinstance(obj, (list, set)): return [self.__dict_helper(v) for v in obj] if not isinstance(obj, dict): raise SpeckleException( message= f"Could not convert to dict due to unrecognized type: {type(obj)}" ) for k, v in obj.items(): if v and not isinstance(obj, PRIMITIVES): obj[k] = self.__dict_helper(v) return obj
def _bg_send_batch(self, session, batch): upload_data = "[" + ",".join(batch) + "]" upload_data_gzip = gzip.compress(upload_data.encode()) LOG.info( "Uploading batch of %s objects (size: %s, compressed size: %s)" % (len(batch), len(upload_data), len(upload_data_gzip))) r = session.post( url=self.endpoint, files={ "batch-1": ("batch-1", upload_data_gzip, "application/gzip") }, ) if r.status_code != 201: LOG.warning("Upload server response: %s", r.text) raise SpeckleException( message= f"Could not save the object to the server - status code {r.status_code}" )
def __init__( self, stream_id: str, client: SpeckleClient = None, account: Account = None, token: str = None, url: str = None, **data: Any, ) -> None: super().__init__(**data) if client is None and account is None and token is None and url is None: raise SpeckleException( "You must provide either a client or a token and url to construct a ServerTransport." ) if account: self.account = account url = account.serverInfo.url elif client: url = client.url if not client.account.token: warn( SpeckleWarning( f"Unauthenticated Speckle Client provided to Server Transport for {self.url}. Receiving from private streams will fail." )) else: self.account = client.account else: self.account = get_account_from_token(token, url) self.stream_id = stream_id self.url = url self._batch_sender = BatchSender(self.url, self.stream_id, self.account.token, max_batch_size_mb=1) self.session = requests.Session() self.session.headers.update({ "Authorization": f"Bearer {self.account.token}", "Accept": "text/plain" })
def object_class(self) -> Type: from . import geometry if self == self.Arc: return geometry.Arc elif self == self.Circle: return geometry.Circle elif self == self.Curve: return geometry.Curve elif self == self.Ellipse: return geometry.Ellipse elif self == self.Line: return geometry.Line elif self == self.Polyline: return geometry.Polyline elif self == self.Polycurve: return geometry.Polycurve raise SpeckleException( f"No corresponding object class for CurveTypeEncoding: {self}")
def save_object(self, id: str, serialized_object: str) -> None: """Directly saves an object into the database. Arguments: id {str} -- the object id serialized_object {str} -- the full string representation of the object """ self.__check_connection() try: with closing(self.__connection.cursor()) as c: c.execute( "INSERT OR IGNORE INTO objects(hash, content) VALUES(?,?)", (id, serialized_object), ) self.__connection.commit() except Exception as ex: raise SpeckleException( f"Could not save the object to the local db. Inner exception: {ex}", ex )
def update(self, name: str = None, company: str = None, bio: str = None, avatar: str = None): """Updates your user profile. All arguments are optional. Arguments: name {str} -- your name company {str} -- the company you may or may not work for bio {str} -- tell us about yourself avatar {str} -- a nice photo of yourself Returns: bool -- True if your profile was updated successfully """ metrics.track(metrics.USER, self.account, {"name": "update"}) query = gql(""" mutation UserUpdate($user: UserUpdateInput!) { userUpdate(user: $user) } """) params = { "name": name, "company": company, "bio": bio, "avatar": avatar } params = {"user": {k: v for k, v in params.items() if v is not None}} if not params["user"]: return SpeckleException( message= "You must provide at least one field to update your user profile" ) return self.make_request(query=query, params=params, return_type="userUpdate", parse_response=False)
def copy_object_and_children(self, id: str, target_transport: AbstractTransport) -> str: endpoint = f"{self.url}/objects/{self.stream_id}/{id}/single" r = self.session.get(endpoint) if r.encoding is None: r.encoding = "utf-8" if r.status_code != 200: raise SpeckleException( f"Can't get object {self.stream_id}/{id}: HTTP error {r.status_code} ({r.text[:1000]})" ) root_obj_serialized = r.text root_obj = json.loads(root_obj_serialized) closures = root_obj.get("__closure", {}) # Check which children are not already in the target transport children_ids = list(closures.keys()) children_found_map = target_transport.has_objects(children_ids) new_children_ids = [ id for id in children_found_map if not children_found_map[id] ] # Get the new children endpoint = f"{self.url}/api/getobjects/{self.stream_id}" r = self.session.post(endpoint, data={"objects": json.dumps(new_children_ids)}, stream=True) if r.encoding is None: r.encoding = "utf-8" lines = r.iter_lines(decode_unicode=True) # iter through returned objects saving them as we go for line in lines: if line: hash, obj = line.split("\t") target_transport.save_object(hash, obj) target_transport.save_object(id, root_obj_serialized) return root_obj_serialized
def __init__( self, base_path: str = None, app_name: str = None, scope: str = None, **data: Any, ) -> None: super().__init__(**data) self.app_name = app_name or "Speckle" self.scope = scope or "Objects" self._base_path = base_path or self.get_base_path(self.app_name) try: os.makedirs(self._base_path, exist_ok=True) self._root_path = os.path.join( os.path.join(self._base_path, f"{self.scope}.db")) self.__initialise() except Exception as ex: raise SpeckleException( f"SQLiteTransport could not initialise {self.scope}.db at {self._base_path}. Either provide a different `base_path` or use an alternative transport.", ex, )
def receive( obj_id: str, remote_transport: AbstractTransport = None, local_transport: AbstractTransport = None, ) -> Base: """Receives an object from a transport. Arguments: obj_id {str} -- the id of the object to receive remote_transport {Transport} -- the transport to receive from local_transport {Transport} -- the local cache to check for existing objects (defaults to `SQLiteTransport`) Returns: Base -- the base object """ metrics.track(metrics.RECEIVE) if not local_transport: local_transport = SQLiteTransport() serializer = BaseObjectSerializer(read_transport=local_transport) # try local transport first. if the parent is there, we assume all the children are there and continue wth deserialisation using the local transport obj_string = local_transport.get_object(obj_id) if obj_string: return serializer.read_json(obj_string=obj_string) if not remote_transport: raise SpeckleException( message="Could not find the specified object using the local transport, and you didn't provide a fallback remote from which to pull it." ) obj_string = remote_transport.copy_object_and_children( id=obj_id, target_transport=local_transport ) return serializer.read_json(obj_string=obj_string)
def __init__(self, url: str = None) -> None: raise SpeckleException( message="The StreamWrapper has moved as of v2.6.0! Please import from specklepy.api.wrapper", exception=DeprecationWarning, )