def getUploadAssetstore(self, event): """ Handle the model.upload.assetstore event. This event passes a dictionary consisting of a model type and resource document. If the base document has an assetstore policy, then set an assetstore key of this dictionary to an assetstore document that should be used or prevent the default action if no appropriate assetstores are allowed. :param event: event record. """ model, resource = self._getBaseResource(event.info['model'], event.info['resource']) if resource is None: return policy = resource[QUOTA_FIELD] assetstore = self._checkAssetstore( policy.get('preferredAssetstore', None)) if assetstore is False: assetstore = self._checkAssetstore( policy.get('fallbackAssetstore', None)) if assetstore is not False: logger.info( 'preferredAssetstore not available for %s %s, ' 'using fallbackAssetstore', model, resource['_id']) if assetstore is False: raise GirderException('Required assetstore is unavailable') if assetstore: event.addResponse(assetstore)
def _submitEmail(msg, recipients): from girderformindlogger.models.setting import Setting setting = Setting() smtp = _SMTPConnection(host=setting.get(SettingKey.SMTP_HOST), port=setting.get(SettingKey.SMTP_PORT), encryption=setting.get(SettingKey.SMTP_ENCRYPTION), username=setting.get(SettingKey.SMTP_USERNAME), password=setting.get(SettingKey.SMTP_PASSWORD)) logger.info('Sending email to %s through %s', ', '.join(recipients), smtp.host) with smtp: smtp.send(msg['From'], recipients, msg.as_string())
def recalculateSize(self, item): """ Recalculate the item size based on the files that are in it. If this is different than the recorded size, propagate the changes. :param item: The item to recalculate the size of. :returns: the recalculated size in bytes """ size = 0 for file in self.childFiles(item): # We could add a recalculateSize to the file model, in which case # this would be: # size += File().recalculateSize(file) size += file.get('size', 0) delta = size - item.get('size', 0) if delta: logger.info('Item %s was wrong size: was %d, is %d' % (item['_id'], item['size'], size)) item['size'] = size self.update({'_id': item['_id']}, update={'$set': {'size': size}}) self.propagateSizeChange(item, delta) return size
def uploadChunk(self, upload, chunk): """ Stores the uploaded chunk in fixed-sized pieces in the chunks collection of this assetstore's database. """ # If we know the chunk size is too large or small, fail early. self.checkUploadSize(upload, self.getChunkSize(chunk)) if isinstance(chunk, six.text_type): chunk = chunk.encode('utf8') if isinstance(chunk, six.binary_type): chunk = BytesIO(chunk) # Restore the internal state of the streaming SHA-512 checksum checksum = _hash_state.restoreHex(upload['sha512state'], 'sha512') # TODO: when saving uploads is optional, we can conditionally try to # fetch the last chunk. Add these line before `lastChunk = ...`: # lastChunk = None # if '_id' in upload or upload['received'] != 0: lastChunk = self.chunkColl.find_one({ 'uuid': upload['chunkUuid'] }, projection=['n'], sort=[('n', pymongo.DESCENDING)]) if lastChunk: # This bit of code will only do anything if there is a discrepancy # between the received count of the upload record and the length of # the file stored as chunks in the database. This code updates the # sha512 state with the difference before reading the bytes sent # from the user. if self.requestOffset(upload) > upload['received']: # This isn't right -- the last received amount may not be a # complete chunk. cursor = self.chunkColl.find({ 'uuid': upload['chunkUuid'], 'n': {'$gte': upload['received'] // CHUNK_SIZE} }, projection=['data']).sort('n', pymongo.ASCENDING) for result in cursor: checksum.update(result['data']) n = lastChunk['n'] + 1 if lastChunk else 0 size = 0 startingN = n while upload['received'] + size < upload['size']: data = chunk.read(CHUNK_SIZE) if not data: break # If a timeout occurs while we are trying to load data, we might # have succeeded, in which case we will get a DuplicateKeyError # when it automatically retries. Therefore, log this error but # don't stop. try: self.chunkColl.insert_one({ 'n': n, 'uuid': upload['chunkUuid'], 'data': bson.binary.Binary(data) }) except pymongo.errors.DuplicateKeyError: logger.info('Received a DuplicateKeyError while uploading, ' 'probably because we reconnected to the database ' '(chunk uuid %s part %d)', upload['chunkUuid'], n) n += 1 size += len(data) checksum.update(data) chunk.close() try: self.checkUploadSize(upload, size) except ValidationException: # The user tried to upload too much or too little. Delete # everything we added self.chunkColl.delete_many({ 'uuid': upload['chunkUuid'], 'n': {'$gte': startingN} }) raise # Persist the internal state of the checksum upload['sha512state'] = _hash_state.serializeHex(checksum) upload['received'] += size return upload
def finalizeUpload(self, upload, assetstore=None): """ This should only be called manually in the case of creating an empty file, i.e. one that has no chunks. :param upload: The upload document. :type upload: dict :param assetstore: If known, the containing assetstore for the upload. :type assetstore: dict :returns: The file object that was created. """ from girderformindlogger.models.assetstore import Assetstore from girderformindlogger.models.file import File from girderformindlogger.models.item import Item from girderformindlogger.utility import assetstore_utilities events.trigger('model.upload.finalize', upload) if assetstore is None: assetstore = Assetstore().load(upload['assetstoreId']) if 'fileId' in upload: # Updating an existing file's contents file = File().load(upload['fileId'], force=True) # Delete the previous file contents from the containing assetstore assetstore_utilities.getAssetstoreAdapter(Assetstore().load( file['assetstoreId'])).deleteFile(file) item = Item().load(file['itemId'], force=True) File().propagateSizeChange(item, upload['size'] - file['size']) # Update file info file['creatorId'] = upload['userId'] file['created'] = datetime.datetime.utcnow() file['assetstoreId'] = assetstore['_id'] file['size'] = upload['size'] # If the file was previously imported, it is no longer. if file.get('imported'): file['imported'] = False else: # Creating a new file record if upload.get('attachParent'): item = None elif upload['parentType'] == 'folder': # Create a new item with the name of the file. item = Item().createItem(name=upload['name'], creator={'_id': upload['userId']}, folder={'_id': upload['parentId']}) elif upload['parentType'] == 'item': item = Item().load(id=upload['parentId'], force=True) else: item = None file = File().createFile(item=item, name=upload['name'], size=upload['size'], creator={'_id': upload['userId']}, assetstore=assetstore, mimeType=upload['mimeType'], saveFile=False) if upload.get('attachParent'): if upload['parentType'] and upload['parentId']: file['attachedToType'] = upload['parentType'] file['attachedToId'] = upload['parentId'] adapter = assetstore_utilities.getAssetstoreAdapter(assetstore) file = adapter.finalizeUpload(upload, file) event_document = {'file': file, 'upload': upload} events.trigger('model.file.finalizeUpload.before', event_document) file = File().save(file) events.trigger('model.file.finalizeUpload.after', event_document) if '_id' in upload: self.remove(upload) logger.info('Upload complete. Upload=%s File=%s User=%s' % (upload['_id'], file['_id'], upload['userId'])) # Add an async event for handlers that wish to process this file. eventParams = { 'file': file, 'assetstore': assetstore, 'currentToken': rest.getCurrentToken(), 'currentUser': rest.getCurrentUser() } if 'reference' in upload: eventParams['reference'] = upload['reference'] events.daemon.trigger('data.process', eventParams) return file