def upload_part(apikey, filename, filepath, filesize, storage, start_response, part): with open(filepath, 'rb') as f: f.seek(part['seek_point']) part_bytes = io.BytesIO(f.read(DEFAULT_PART_SIZE)) payload_base = { 'apikey': apikey, 'uri': start_response['uri'], 'region': start_response['region'], 'upload_id': start_response['upload_id'], 'store': { 'location': storage }, 'part': part['num'] } global CHUNK_SIZE chunk_data = part_bytes.read(CHUNK_SIZE) offset = 0 while chunk_data: payload = payload_base.copy() payload.update({ 'size': len(chunk_data), 'md5': b64encode( hashlib.md5(chunk_data).digest()).strip().decode('utf-8'), 'offset': offset, 'fii': True }) try: url = 'https://{}/multipart/upload'.format( start_response['location_url']) api_resp = requests.post(url, json=payload).json() s3_resp = requests.put(api_resp['url'], headers=api_resp['headers'], data=chunk_data) if not s3_resp.ok: raise Exception('Incorrect S3 response') offset += len(chunk_data) chunk_data = part_bytes.read(CHUNK_SIZE) except Exception as e: log.error('Upload failed: %s', str(e)) with lock: if CHUNK_SIZE >= len(chunk_data): decrease_chunk_size() part_bytes.seek(offset) chunk_data = part_bytes.read(CHUNK_SIZE) payload = payload_base.copy() payload.update({'size': filesize}) url = 'https://{}/multipart/commit'.format(start_response['location_url']) requests.post(url, json=payload)
def test_req_wrapper_overwrite_headers(post_mock): requests.post(TEST_URL) post_args, post_kwargs = post_mock.call_args headers_sent = post_kwargs['headers'] assert post_args[0] == TEST_URL assert headers_sent['User-Agent'] == 'filestack-python {}'.format( __version__) assert headers_sent['Filestack-Source'] == 'Python-{}'.format(__version__) assert re.match(r'\d+-[a-zA-Z0-9]{10}', headers_sent['Filestack-Trace-Id']) assert re.match(r'pythonsdk-[a-zA-Z0-9]{10}', headers_sent['Filestack-Trace-Span'])
def upload_external_url(url, apikey, storage, store_params=None, security=None): store_params = store_params or {} if storage and not store_params.get('location'): store_params['location'] = storage # remove params that are currently not supported in external url upload for item in ('mimetype', 'upload_tags'): store_params.pop(item, None) payload = { 'apikey': apikey, 'sources': [url], 'tasks': [{ 'name': 'store', 'params': store_params }] } if security is not None: payload['tasks'].append({ 'name': 'security', 'params': { 'policy': security.policy_b64, 'signature': security.signature } }) response = requests.post('{}/process'.format(config.CDN_URL), json=payload) return response.json()
def store(self, filename=None, location=None, path=None, container=None, region=None, access=None, base64decode=None, workflows=None): """ Stores current object as a new :class:`filestack.Filelink`. Args: filename (str): name for the stored file location (str): your storage location, one of: :data:`"s3"` :data:`"azure"` :data:`"dropbox"` :data:`"rackspace"` :data:`"gcs"` container (str): the bucket or container (folder) in which to store the file (does not apply when storing to Dropbox) path (str): the path to store the file within the specified container region (str): your storage region (applies to S3 only) access (str): :data:`"public"` or :data:`"private"` (applies to S3 only) base64decode (bool): indicates if content should be decoded before it is stored workflows (list): IDs of `Filestack Workflows <https://www.filestack.com/products/workflows>`_ that should be triggered after upload Returns: :class:`filestack.Filelink`: new Filelink object """ if path: path = '"{}"'.format(path) instance = self._add_transform_task('store', locals()) response = requests.post(instance.url) return filestack.models.Filelink(handle=response.json()['handle'])
def upload_external_url(url, apikey, store_params=None, security=None): store_task = build_store_task(store_params or {}) url_elements = [config.CDN_URL, apikey, store_task, url] if security is not None: url_elements.insert(3, security.as_url_string()) response = requests.post('/'.join(url_elements)) return response.json()['handle']
def multipart_request(url, payload, params=None, security=None): for key in ('path', 'location', 'region', 'container', 'access'): if key in params: payload['store'][key] = params[key] if security: payload.update({ 'policy': security.policy_b64, 'signature': security.signature }) return requests.post(url, json=payload).json()
def upload_chunk(apikey, filename, storage, start_response, chunk): payload = { 'apikey': apikey, 'part': chunk.num, 'size': len(chunk.bytes), 'md5': b64encode(hashlib.md5(chunk.bytes).digest()).strip().decode('utf-8'), 'uri': start_response['uri'], 'region': start_response['region'], 'upload_id': start_response['upload_id'], 'store': { 'location': storage, } } fs_resp = requests.post( 'https://{}/multipart/upload'.format(start_response['location_url']), json=payload ).json() resp = requests.put(fs_resp['url'], headers=fs_resp['headers'], data=chunk.bytes) return {'part_number': chunk.num, 'etag': resp.headers['ETag']}
def overwrite(self, *, filepath=None, url=None, file_obj=None, base64decode=False, security=None): """ Overwrites filelink with new content Args: filepach (str): path to file url (str): file URL file_obj (io.BytesIO or similar): file-like object base64decode (bool): indicates if content should be decoded before it is stored security (:class:`filestack.Security`): Security object that will be used to overwrite filelink Note: This method accepts keyword arguments only. Out of filepath, url and file_obj only one should be provided. """ sec = security or self.security if sec is None: raise Exception('Security is required to overwrite filelink') req_params = { 'policy': sec.policy_b64, 'signature': sec.signature, 'base64decode': str(base64decode).lower() } request_url = '{}/file/{}'.format(config.API_URL, self.handle) if url: requests.post(request_url, params=req_params, data={'url': url}) elif filepath: with open(filepath, 'rb') as f: files = {'fileUpload': ('filename', f, 'application/octet-stream')} requests.post(request_url, params=req_params, files=files) elif file_obj: files = {'fileUpload': ('filename', file_obj, 'application/octet-stream')} requests.post(request_url, params=req_params, files=files) else: raise Exception('filepath, file_obj or url argument must be provided') return self
def upload(apikey, filepath, file_obj, storage, params=None, security=None): params = params or {} filename = params.get('filename') or os.path.split(filepath)[1] mimetype = params.get('mimetype') or mimetypes.guess_type(filepath)[0] or config.DEFAULT_UPLOAD_MIMETYPE filesize = os.path.getsize(filepath) payload = { 'apikey': apikey, 'filename': filename, 'mimetype': mimetype, 'size': filesize, 'fii': True, 'store': { 'location': storage } } for key in ('path', 'location', 'region', 'container', 'access'): if key in params: payload['store'][key] = params[key] if security: payload.update({ 'policy': security.policy_b64, 'signature': security.signature }) start_response = requests.post(config.MULTIPART_START_URL, json=payload).json() parts = [ { 'seek_point': seek_point, 'num': num + 1 } for num, seek_point in enumerate(range(0, filesize, DEFAULT_PART_SIZE)) ] fii_upload = functools.partial( upload_part, apikey, filename, filepath, filesize, storage, start_response ) with ThreadPool(NUM_THREADS) as pool: pool.map(fii_upload, parts) payload.update({ 'uri': start_response['uri'], 'region': start_response['region'], 'upload_id': start_response['upload_id'], }) if params.get('workflows'): payload['store']['workflows'] = params['workflows'] complete_url = 'https://{}/multipart/complete'.format(start_response['location_url']) for wait_time in (0, 1, 2, 3, 5): time.sleep(wait_time) complete_response = requests.post(complete_url, json=payload, headers=config.HEADERS) log.debug('Complete response: %s. Content: %s', complete_response, complete_response.content) if complete_response.status_code == 200: break else: log.error( 'Did not receive a correct complete response: %s. Content %s', complete_response, complete_response.content ) raise return complete_response.json()
def upload(apikey, filepath, file_obj, storage, params=None, security=None): params = params or {} filename = params.get('filename') or os.path.split(filepath)[1] mimetype = params.get('mimetype') or mimetypes.guess_type( filepath)[0] or config.DEFAULT_UPLOAD_MIMETYPE filesize = os.path.getsize(filepath) payload = { 'apikey': apikey, 'filename': filename, 'mimetype': mimetype, 'size': filesize, 'fii': True, 'store': { 'location': storage } } for key in ('path', 'location', 'region', 'container', 'access'): if key in params: payload['store'][key] = params[key] if security: payload.update({ 'policy': security.policy_b64, 'signature': security.signature }) start_response = requests.post(config.MULTIPART_START_URL, json=payload).json() parts = [{ 'seek_point': seek_point, 'num': num + 1 } for num, seek_point in enumerate(range(0, filesize, DEFAULT_PART_SIZE))] fii_upload = functools.partial(upload_part, apikey, filename, filepath, filesize, storage, start_response) with ThreadPool(NUM_THREADS) as pool: pool.map(fii_upload, parts) payload.update({ 'uri': start_response['uri'], 'region': start_response['region'], 'upload_id': start_response['upload_id'], }) if 'workflows' in params: payload['store']['workflows'] = params.pop('workflows') if 'upload_tags' in params: payload['upload_tags'] = params.pop('upload_tags') complete_url = 'https://{}/multipart/complete'.format( start_response['location_url']) session = requests.Session() retries = Retry(total=7, backoff_factor=0.2, status_forcelist=[202], method_whitelist=frozenset(['POST'])) session.mount('http://', requests.adapters.HTTPAdapter(max_retries=retries)) response = session.post(complete_url, json=payload, headers=config.HEADERS) if response.status_code != 200: log.error( 'Did not receive a correct complete response: %s. Content %s', response, response.content) raise Exception('Invalid complete response: {}'.format( response.content)) return response.json()
def test_req_wrapper_raise_exception(post_mock): post_mock.return_value = DummyHttpResponse(ok=False, content=b'oops!') with pytest.raises(Exception, match='oops!'): requests.post(TEST_URL)
def test_req_wrapper_use_provided_headers(post_mock): custom_headers = {'something': 'used explicitly'} requests.post(TEST_URL, headers=custom_headers) post_args, post_kwargs = post_mock.call_args assert post_args[0] == TEST_URL assert post_kwargs['headers'] == custom_headers