def blank_update(guid, rev, hash_type, hash_value, size, urls, authz, cred): indexer = helper.build_auth_indexer(cred) authz_list = str(authz).split(',') if authz is not None else None url_list = str(urls).split(',') if urls is not None else None p = {'rev': rev} data = { 'size': size, 'hashes': { hash_type: hash_value }, 'urls': url_list, 'authz': authz_list, } response = indexer.client._put( "index/blank/", guid, headers={"content-type": "application/json"}, params=p, auth=indexer.client.auth, data=client.json_dumps(data), ) response.raise_for_status() click.echo(json.dumps(response.json()))
def update_blank(self, guid, rev, hashes, size, urls=None, authz=None): """ Update only hashes and size for a blank index Args: guid (string): record id rev (string): data revision - simple consistency mechanism hashes (dict): {hash type: hash value,} eg ``hashes={'md5': ab167e49d25b488939b1ede42752458b'}`` size (int): file size metadata associated with a given uuid """ params = {"rev": rev} json = {"hashes": hashes, "size": size} if urls: json["urls"] = urls if authz: json["authz"] = authz response = self.client._put( "index/blank/", guid, headers={"content-type": "application/json"}, params=params, auth=self.client.auth, data=client.json_dumps(json), ) response.raise_for_status() rec = response.json() return self.get_record(rec["did"])
def blank_create(uploader, filename, authz, cred): indexer = helper.build_auth_indexer(cred) authz_list = str(authz).split(',') data = { 'uploader': uploader, 'file_name': filename, 'authz': authz_list, } response = indexer.client._post( "index/blank", headers={"content-type": "application/json"}, auth=indexer.client.auth, data=client.json_dumps(data), ) response.raise_for_status() click.echo(json.dumps(response.json()))
def create_blank(self, uploader, file_name=None): """ Create a blank record Args: json - json in the format: { 'uploader': type(string) 'file_name': type(string) (optional*) } """ json = {"uploader": uploader, "file_name": file_name} response = self.client._post( "index/blank", headers={"content-type": "application/json"}, auth=self.client.auth, data=client.json_dumps(json), ) response.raise_for_status() rec = response.json() return self.get_record(rec["did"])
def create_new_version( self, guid, hashes, size, did=None, urls=None, file_name=None, metadata=None, acl=None, urls_metadata=None, version=None, authz=None, ): """ Add new version for the document associated to the provided uuid Since data content is immutable, when you want to change the size or hash, a new index document with a new uuid needs to be created as its new version. That uuid is returned in the did field of the response. The old index document is not deleted. Args: guid: (string): record id hashes (dict): {hash type: hash value,} eg ``hashes={'md5': ab167e49d25b488939b1ede42752458b'}`` size (int): file size metadata associated with a given uuid did (str): provide a UUID for the new indexd to be made urls (list): list of URLs where you can download the UUID file_name (str): name of the file associated with a given UUID metadata (dict): additional key value metadata for this entry acl (list): access control list urls_metadata (dict): metadata attached to each url version (str): entry version string authz (str): RBAC string body: json/dictionary format - Metadata object that needs to be added to the store. Providing size and at least one hash is necessary and sufficient. Note: it is a good idea to add a version number """ if urls is None: urls = [] json = { "urls": urls, "form": "object", "hashes": hashes, "size": size, "file_name": file_name, "metadata": metadata, "urls_metadata": urls_metadata, "acl": acl, "authz": authz, "version": version, } if did: json["did"] = did response = self.client._post( "index", guid, headers={"content-type": "application/json"}, data=client.json_dumps(json), auth=self.client.auth, ) response.raise_for_status() rec = response.json() if rec and "did" in rec: return self.get_record(rec["did"]) return None