# Try to download the file. Increase the number of retries, or the # timeout duration, if the server is particularly slow. # eg: Akamai usually takes 3-15 seconds to make an uploaded file # available over HTTP. for i in xrange(max_tries): try: temp_file = urlopen(file_url) dl_hash = sha1(temp_file.read()).hexdigest() temp_file.close() except HTTPError, http_err: # Don't raise the exception now, wait until all attempts fail time.sleep(3) else: # If the downloaded file matches, success! Otherwise, we can # be pretty sure that it got corrupted during FTP transfer. if orig_hash == dl_hash: return True else: msg = _('The file transferred to your FTP server is '\ 'corrupted. Please try again.') raise FTPUploadError(msg, None, None) # Raise the exception from the last download attempt msg = _('Could not download the file from your FTP server: %s')\ % http_err.message raise FTPUploadError(msg, None, None) FileStorageEngine.register(FTPStorage)
orig_hash = sha1(file.read()).hexdigest() # Try to download the file. Increase the number of retries, or the # timeout duration, if the server is particularly slow. # eg: Akamai usually takes 3-15 seconds to make an uploaded file # available over HTTP. for i in xrange(max_tries): try: temp_file = urlopen(file_url) dl_hash = sha1(temp_file.read()).hexdigest() temp_file.close() except HTTPError, http_err: # Don't raise the exception now, wait until all attempts fail time.sleep(3) else: # If the downloaded file matches, success! Otherwise, we can # be pretty sure that it got corrupted during FTP transfer. if orig_hash == dl_hash: return True else: msg = _('The file transferred to your FTP server is '\ 'corrupted. Please try again.') raise FTPUploadError(msg, None, None) # Raise the exception from the last download attempt msg = _('Could not download the file from your FTP server: %s')\ % http_err.message raise FTPUploadError(msg, None, None) FileStorageEngine.register(FTPStorage)
s3_bucket_dir = self._data['s3_bucket_dir'] s3_bucket_url = 'https://%s.s3.amazonaws.com/' % s3_bucket_name cf_download_domain = self._data['cf_download_domain'] cf_streaming_domain = self._data['cf_streaming_domain'] file_path = self._get_path(media_file.unique_id) if cf_download_domain: cf_download_url = 'http://%s' % cf_download_domain uris.append(StorageURI(media_file, 'http', file_path, cf_download_url)) else: uris.append(StorageURI(media_file, 'http', file_path, s3_bucket_url)) if cf_streaming_domain: cf_streaming_url = 'http://%s/cfx/st' % cf_streaming_domain uris.append(StorageURI(media_file, 'rtmp', file_path, cf_streaming_url)) return uris def _get_path(self, unique_id): """Return the local file path for the given unique ID. This method is exclusive to this engine. """ basepath = self._data['s3_bucket_dir'] if basepath: return os.path.join(basepath, unique_id) return unique_id FileStorageEngine.register(AmazonS3Storage)
# Remotely *download* accessible URL url = url_for(controller='/media', action='serve', id=media_file.id, slug=media_file.media.slug, container=media_file.container, qualified=True, download=1) uris.append(StorageURI(media_file, 'download', url, None)) # Internal file URI that will be used by MediaController.serve path = urlunsplit( ('file', '', self._get_path(media_file.unique_id), '', '')) uris.append(StorageURI(media_file, 'file', path, None)) return uris def _get_path(self, unique_id): """Return the local file path for the given unique ID. This method is exclusive to this engine. """ basepath = self._data.get('path', None) if not basepath: basepath = config['media_dir'] return os.path.join(basepath, unique_id) FileStorageEngine.register(LocalFileStorage)
uris.append(StorageURI(media_file, 'http', url, None)) # An optional streaming RTMP URI rtmp_server_uri = self._data.get('rtmp_server_uri', None) if rtmp_server_uri: uris.append(StorageURI(media_file, 'rtmp', media_file.unique_id, rtmp_server_uri)) # Remotely *download* accessible URL url = url_for(controller='/media', action='serve', id=media_file.id, slug=media_file.media.slug, container=media_file.container, qualified=True, download=1) uris.append(StorageURI(media_file, 'download', url, None)) # Internal file URI that will be used by MediaController.serve path = urlunsplit(('file', '', self._get_path(media_file.unique_id), '', '')) uris.append(StorageURI(media_file, 'file', path, None)) return uris def _get_path(self, unique_id): """Return the local file path for the given unique ID. This method is exclusive to this engine. """ basepath = self._data.get('path', None) if not basepath: basepath = config['media_dir'] return os.path.join(basepath, unique_id) FileStorageEngine.register(LocalFileStorage)
request.commit_callbacks.append(autocommitted_transcode) else: DBSession.commit() transcode() def get_uris(self, media_file): """Return a list of URIs from which the stored file can be accessed. :type media_file: :class:`~mediacore.model.media.MediaFile` :param media_file: The associated media file object. :rtype: list :returns: All :class:`StorageURI` tuples for this file. """ base_urls = list(self.base_urls) # Skip s3 http url if cloudfront http url is available if base_urls[1][0]: base_urls = base_urls[1:] uris = [] file_uri = media_file.unique_id for scheme, base_url in base_urls: if not scheme: continue uri = StorageURI(media_file, scheme, file_uri, base_url) uris.append(uri) return uris FileStorageEngine.register(PandaStorage)