def upload_assets(user, secret, bucket, _retries=5): access_key = user if user else os.environ["AWS_ACCESS_KEY_ID"] access_secret = secret if secret else os.environ["AWS_SECRET_ACCESS_KEY"] bucket = bucket if bucket else os.environ["S3_BUCKET_NAME"] retries = 0 import subprocess # build assets and capture the output print "Building assets..." proc = subprocess.Popen( ["python", "manage.py", "assets", "build"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) proc.wait() print "Connecting to S3..." conn, bucket = connect_to_s3(access_key, access_secret, bucket) print "Uploading files..." for path in proc.communicate()[0].split("\n")[:-1]: key = path.split("public")[1] print "Uploading {}".format(key) while retries <= _retries: try: upload_file(key, bucket, root=current_app.config["APP_DIR"] + "/static") break except Exception, e: print "Error: {}".format(e), "Retrying..." retries += 1 if retries > _retries: print "File {} did not upload".format(key) retries = 0
def upload_assets(user, secret, bucket, _retries=5): access_key = user if user else os.environ['AWS_ACCESS_KEY_ID'] access_secret = secret if secret else os.environ['AWS_SECRET_ACCESS_KEY'] bucket = bucket if bucket else os.environ['S3_BUCKET_NAME'] retries = 0 import subprocess # build assets and capture the output print 'Building assets...' proc = subprocess.Popen( ['python', 'manage.py', 'assets', 'build'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) proc.wait() print 'Connecting to S3...' conn, bucket = connect_to_s3(access_key, access_secret, bucket) print 'Uploading files...' for path in proc.communicate()[0].split('\n')[:-1]: key = path.split('public')[1] print 'Uploading {}'.format(key) while retries <= _retries: try: upload_file(key, bucket, root=current_app.config['APP_DIR'] + '/static') break except Exception, e: print 'Error: {}'.format(e), 'Retrying...' retries += 1 if retries > _retries: print 'File {} did not upload'.format(key) retries = 0
def upload_document(self, _id): if self.document.data is None or self.document.data == '': return None, None filename = secure_filename(self.document.data.filename) if filename == '': return None, None _filename = 'opportunity-{}-{}'.format(_id, filename) if current_app.config.get('UPLOAD_S3') is True: # upload file to s3 conn, bucket = connect_to_s3( current_app.config['AWS_ACCESS_KEY_ID'], current_app.config['AWS_SECRET_ACCESS_KEY'], current_app.config['UPLOAD_DESTINATION'] ) _document = bucket.new_key(_filename) aggressive_headers = _get_aggressive_cache_headers(_document) _document.set_contents_from_file(self.document.data, headers=aggressive_headers, replace=True) _document.set_acl('public-read') return _document.name, _document.generate_url(expires_in=0, query_auth=False) else: try: os.mkdir(current_app.config['UPLOAD_DESTINATION']) except: pass filepath = os.path.join(current_app.config['UPLOAD_DESTINATION'], _filename) self.document.data.save(filepath) return _filename, filepath
def upload_document(self, _id): '''Take the document and filename and either upload it to S3 or the local uploads folder Arguments: _id: The id of the :py:class:`~purchasing.opportunities.models.Opportunity` the document will be attached to Returns: A two-tuple of (the document name, the document filepath/url) ''' if self.document.data is None or self.document.data == '': return None, None filename = secure_filename(self.document.data.filename) if filename == '': return None, None _filename = 'opportunity-{}-{}'.format(_id, filename) if current_app.config.get('UPLOAD_S3') is True: # upload file to s3 conn, bucket = connect_to_s3( current_app.config['AWS_ACCESS_KEY_ID'], current_app.config['AWS_SECRET_ACCESS_KEY'], current_app.config['UPLOAD_DESTINATION']) _document = bucket.new_key(_filename) aggressive_headers = _get_aggressive_cache_headers(_document) _document.set_contents_from_file(self.document.data, headers=aggressive_headers, replace=True) _document.set_acl('public-read') return _document.name, _document.generate_url(expires_in=0, query_auth=False) else: try: os.mkdir(current_app.config['UPLOAD_DESTINATION']) except: pass filepath = os.path.join(current_app.config['UPLOAD_DESTINATION'], _filename) self.document.data.save(filepath) return _filename, filepath
def upload_document(self, _id): '''Take the document and filename and either upload it to S3 or the local uploads folder Arguments: _id: The id of the :py:class:`~purchasing.opportunities.models.Opportunity` the document will be attached to Returns: A two-tuple of (the document name, the document filepath/url) ''' if self.document.data is None or self.document.data == '': return None, None filename = secure_filename(self.document.data.filename) if filename == '': return None, None _filename = 'opportunity-{}-{}'.format(_id, filename) if current_app.config.get('UPLOAD_S3') is True: # upload file to s3 conn, bucket = connect_to_s3( current_app.config['AWS_ACCESS_KEY_ID'], current_app.config['AWS_SECRET_ACCESS_KEY'], current_app.config['UPLOAD_DESTINATION'] ) _document = bucket.new_key(_filename) aggressive_headers = _get_aggressive_cache_headers(_document) _document.set_contents_from_file(self.document.data, headers=aggressive_headers, replace=True) _document.set_acl('public-read') return _document.name, _document.generate_url(expires_in=0, query_auth=False) else: try: os.mkdir(current_app.config['UPLOAD_DESTINATION']) except: pass filepath = os.path.join(current_app.config['UPLOAD_DESTINATION'], _filename) self.document.data.save(filepath) return _filename, filepath
def upload_costars_contract(_file): '''Upload a COSTARS pdf document to S3 Arguments: _file: A werkzeug `FileStorage`_ object Returns: A two-tuple of (the name of the uploaded file, the path/url to the file) ''' filename = secure_filename(_file.filename) if current_app.config['UPLOAD_S3']: try: turn_off_sqlalchemy_events() conn, bucket = connect_to_s3( current_app.config['AWS_ACCESS_KEY_ID'], current_app.config['AWS_SECRET_ACCESS_KEY'], 'costars') file_href = upload_file(filename, bucket, input_file=_file, prefix='/', from_file=True) return filename, file_href except Exception: raise finally: turn_on_sqlalchemy_events() refresh_search_view() else: try: os.mkdir(current_app.config['UPLOAD_DESTINATION']) except: pass filepath = os.path.join(current_app.config['UPLOAD_DESTINATION'], filename) _file.save(filepath) return filename, filepath
def upload_costars_contract(_file): filename = secure_filename(_file.filename) if current_app.config['UPLOAD_S3']: conn, bucket = connect_to_s3( current_app.config['AWS_ACCESS_KEY_ID'], current_app.config['AWS_SECRET_ACCESS_KEY'], 'costars' ) file_href = upload_file(filename, bucket, input_file=_file, prefix='/', from_file=True) return filename, file_href else: try: os.mkdir(current_app.config['UPLOAD_DESTINATION']) except: pass filepath = os.path.join(current_app.config['UPLOAD_DESTINATION'], filename) _file.save(filepath) return filename, filepath
def upload_costars_contract(_file): '''Upload a COSTARS pdf document to S3 Arguments: _file: A werkzeug `FileStorage`_ object Returns: A two-tuple of (the name of the uploaded file, the path/url to the file) ''' filename = secure_filename(_file.filename) if current_app.config['UPLOAD_S3']: try: turn_off_sqlalchemy_events() conn, bucket = connect_to_s3( current_app.config['AWS_ACCESS_KEY_ID'], current_app.config['AWS_SECRET_ACCESS_KEY'], 'costars' ) file_href = upload_file(filename, bucket, input_file=_file, prefix='/', from_file=True) return filename, file_href except Exception: raise finally: turn_on_sqlalchemy_events() refresh_search_view() else: try: os.mkdir(current_app.config['UPLOAD_DESTINATION']) except: pass filepath = os.path.join(current_app.config['UPLOAD_DESTINATION'], filename) _file.save(filepath) return filename, filepath