def _copyFromStaticFileStore(self, urlPrefix, contentServiceType): ''' Gets the static file from Static File Store if available and put it in static contents directory ''' status = False hash_value = self._sfsHash.get(urlPrefix) if hash_value: bdata = self._sFileStore.getFile(urlPrefix) if bdata is not None and archive.is_data_archive(bdata): self._logger.debug( 'Getting Static File from Static File Store for {0} with hash {1}' .format(urlPrefix, hash_value)) status = self._extractStaticFiles(bdata, hash_value, urlPrefix, contentServiceType) else: # try one more time self._sFileStore.reConnection() bdata = self._sFileStore.getFile(urlPrefix) if bdata is not None and archive.is_data_archive(bdata): self._logger.debug( 'Getting Static File from Static File Store for {0} with hash {1} after retrying' .format(urlPrefix, hash_value)) status = self._extractStaticFiles(bdata, hash_value, urlPrefix, contentServiceType) else: # Delete Static File Store Content; files may be corrupted while uploading to Static File Store self._sFileStore.deleteFile(urlPrefix) return status
def _addInStaticFileStore(self, urlPrefix, data): ''' If file is in archive format, and not already in Static File Store Update Static File Store and trigger ZookeeperWatch Node. ''' static_content_max_size = int(gConfig.static_content_max_size) if 0 < len(data) > static_content_max_size: raise ValueError('Data for {0} is empty or exceeds the limit {1}MB'.format(urlPrefix, static_content_max_size/1048576)) if not archive.is_data_archive(data): raise StaticContentException(message='Not an Archive', urls=[urlPrefix]) self._putInStaticFileStore(urlPrefix, data)
def _addInStaticFileStore(self, urlPrefix, data): ''' If file is in archive format, and not already in Static File Store Update Static File Store and trigger ZookeeperWatch Node. ''' static_content_max_size = int(gConfig.static_content_max_size) if 0 < len(data) > static_content_max_size: raise ValueError( 'Data for {0} is empty or exceeds the limit {1}MB'.format( urlPrefix, static_content_max_size / 1048576)) if not archive.is_data_archive(data): raise StaticContentException(message='Not an Archive', urls=[urlPrefix]) self._putInStaticFileStore(urlPrefix, data)
def _copyFromStaticFileStore(self, urlPrefix, contentServiceType): ''' Gets the static file from Static File Store if available and put it in static contents directory ''' status = False hash_value = self._sfsHash.get(urlPrefix) if hash_value: bdata = self._sFileStore.getFile(urlPrefix) if bdata is not None and archive.is_data_archive(bdata): self._logger.debug('Getting Static File from Static File Store for {0} with hash {1}'.format(urlPrefix, hash_value)) status = self._extractStaticFiles(bdata, hash_value, urlPrefix, contentServiceType) else: # try one more time self._sFileStore.reConnection() bdata = self._sFileStore.getFile(urlPrefix) if bdata is not None and archive.is_data_archive(bdata): self._logger.debug('Getting Static File from Static File Store for {0} with hash {1} after retrying'.format(urlPrefix, hash_value)) status = self._extractStaticFiles(bdata, hash_value, urlPrefix, contentServiceType) else: # Delete Static File Store Content; files may be corrupted while uploading to Static File Store self._sFileStore.deleteFile(urlPrefix) return status