def get_modified_ts(self): if self.is_reference: return self.entry.modified_ts else: if not self.obj: self.obj = DataObject.find(self.obj_id) if self.obj is None: return self.entry.modified_ts return self.obj.modified_ts
def get_size(self): if self.is_reference: return 0 else: if not self.obj: self.obj = DataObject.find(self.obj_id) if self.obj is None: return 0 return self.obj.size
def get_metadata(self): if self.is_reference: return self.entry.metadata else: if not self.obj: self.obj = DataObject.find(self.obj_id) if self.obj is None: return self.entry.metadata return self.obj.metadata
def get_checksum(self): if self.is_reference: return None else: if not self.obj: self.obj = DataObject.find(self.obj_id) if self.obj is None: return None return self.obj.checksum
def get_create_ts(self): if self.is_reference: return self.entry.create_ts else: if not self.obj: self.obj = DataObject.find(self.obj_id) if self.obj is None: return self.entry.create_ts return self.obj.create_ts
def receive_data_chunk(self, raw_data, start): """ Will be called to write 1Mb chunks of data (except for the last chunk). """ print u"Received {} bytes - {}".format(len(raw_data), self.seq_number) if not self.uuid: data_object = DataObject.create(raw_data, settings.COMPRESS_UPLOADS) self.uuid = data_object.uuid else: DataObject.append_chunk(self.uuid, raw_data, self.seq_number, settings.COMPRESS_UPLOADS) self.seq_number += 1 self.hasher.update(raw_data) return None
def get_acl(self): if self.is_reference: return self.entry.acl else: if not self.obj: self.obj = DataObject.find(self.obj_id) if self.obj is None: return self.entry.acl return self.obj.acl
def __init__(self, entry, obj=None): self.entry = entry self.url = self.entry.url self.path = self.entry.path() self.container = self.entry.container self.name = self.entry.name self.is_reference = is_reference(self.url) self.uuid = self.entry.uuid if not self.is_reference: self.obj_id = self.url.replace("cassandra://", "") self.obj = DataObject.find(self.obj_id) else: self.obj = None
def get_mimetype(self): # if self.resource.get_mimetype(): # return self.resource.get_mimetype() # mimetype = self.resource.get_metadata_key('cdmi_mimetype') # if mimetype: # return mimetype if self.is_reference: return self.entry.mimetype else: if not self.obj: self.obj = DataObject.find(self.obj_id) if self.obj is None: return self.entry.mimetype return self.obj.mimetype
def create(cls, container, name, uuid=None, metadata=None, url=None, mimetype=None, username=None, size=None): """Create a new resource in the tree_entry table""" from indigo.models import Collection from indigo.models import Notification # Check if parent collection exists parent = Collection.find(container) if parent is None: raise NoSuchCollectionError(container) if uuid is None: uuid = default_cdmi_id() create_ts = datetime.now() modified_ts = create_ts path = merge(container, name) if metadata: metadata_cass = meta_cdmi_to_cassandra(metadata) # Check the container exists collection = Collection.find(container) if not collection: raise NoSuchCollectionError(container) # Make sure parent/name are not in use. existing = cls.find(path) if existing: raise ResourceConflictError(path) kwargs = { "container": container, "name": name, "url": url, "uuid": uuid, } if is_reference(url): kwargs["create_ts"] = create_ts kwargs["modified_ts"] = modified_ts kwargs["mimetype"] = mimetype if metadata: kwargs["metadata"] = metadata_cass else: obj_id = url.replace("cassandra://", "") data_obj = DataObject.find(obj_id) if metadata: data_obj.update(mimetype=mimetype, metadata=metadata_cass) else: if mimetype: data_obj.update(mimetype=mimetype) if size: data_obj.update(size=size) data_entry = TreeEntry.create(**kwargs) new = Resource(data_entry) session = get_graph_session() add_user_edge = "" if username: user = User.find(username) if user: add_user_edge = """v_user = {}.next(); v_user.addEdge('owns', v_new); """.format(gq_get_vertex_user(user)) else: add_user_edge = "" session.execute_graph("""v_parent = {}.next(); v_new = {}; v_parent.addEdge('son', v_new); {} """.format(gq_get_vertex_collection(parent), gq_add_vertex_resource(new), add_user_edge)) if metadata: new.update_graph(metadata) state = new.mqtt_get_state() payload = new.mqtt_payload({}, state) Notification.create_resource(username, path, payload) # Index the resource new.index() return new
def delete_blobs(self): """Delete all blobs of the corresponding uuid""" if not self.is_reference: DataObject.delete_id(self.obj_id)
def process_create_entry_work(self, resc_dict, context, is_reference): # MOSTLY the resource will not exist... so start by calculating the URL and trying to insert the entire record.. if is_reference: url = "file://{}{}/{}".format(context['local_ip'], context['path'], context['entry']) else: with open(context['fullpath'], 'r') as f: seq_number = 0 data_uuid = None for chk in read_in_chunks(f): if seq_number == 0: data_object = DataObject.create( chk, resc_dict['compress']) data_uuid = data_object.uuid else: DataObject.append_chunk(data_uuid, chk, seq_number, resc_dict['compress']) seq_number += 1 if data_uuid: url = "cassandra://{}".format(data_uuid) else: return None try: # OK -- try to insert ( create ) the record... t1 = time.time() resource = Resource.create(container=resc_dict['container'], name=resc_dict['name'], url=url, mimetype=resc_dict['mimetype'], username=context['user'], size=resc_dict['size']) resource.create_acl_list(resc_dict['read_access'], resc_dict['write_access']) msg = 'Resource {} created --> {}'.format(resource.get_name(), time.time() - t1) logger.info(msg) except ResourceConflictError: # If the create fails, the record already exists... so retrieve it... t1 = time.time() resource = Resource.find( merge(resc_dict['container'], resc_dict['name'])) msg = "{} ::: Fetch Object -> {}".format(resource.get_name(), time.time() - t1) logger.info(msg) # if the url is not correct then update # TODO: if the url is a block set that is stored internally then reduce its count so that it can be GC'd. # t3 = None if resource.url != url: t2 = time.time() # if url.startswith('cassandra://') : tidy up the stored block count... resource.update(url=url) t3 = time.time() msg = "{} ::: update -> {}".format(resource.get_name(), t3 - t2) logger.info(msg) # t1 = time.time() SearchIndex.reset(resource.uuid) SearchIndex.index(resource, ['name', 'metadata'])
def append_data_object(self, uuid, seq_num, raw_data): DataObject.append_chunk(uuid, raw_data, seq_num, settings.COMPRESS_UPLOADS)
def create_empty_data_object(self): data_object = DataObject.create(None) return data_object.uuid
def create_data_object(self, raw_data, metadata=None, create_ts=None, acl=None): data_object = DataObject.create(raw_data, settings.COMPRESS_UPLOADS, metadata=metadata,create_ts=create_ts, acl=acl) return data_object.uuid