def create_signature(request, *args, **argv): #pylint: disable=unused-argument """Save a signature for a user, given their pin. Args: request (HTTPRequest): A request to get the user's session. Returns: (JsonResponse): A JSON response with a success message. """ user_claims = verify_session(request) uid = user_claims['uid'] post_data = loads(request.body.decode('utf-8')) data_url = post_data['data_url'] ref = f'admin/auth/{uid}/user_settings/signature.png' upload_file(BUCKET_NAME, ref, data_url=data_url) url = get_file_url(ref, bucket_name=BUCKET_NAME) signature_created_at = datetime.now().isoformat() signature_data = { 'signature_created_at': signature_created_at, 'signature_url': url, 'signature_ref': ref, } update_document(f'admin/auth/{uid}/user_settings', signature_data) create_log(f'users/{uid}/logs', user_claims, 'Created signature.', 'signature', 'signature_create', [{ 'created_at': signature_created_at }]) return JsonResponse({ 'success': True, 'message': 'Signature saved.', 'signature_url': url })
def test_storage(): """Test Firebase Storage by managing a test file.""" # Get the path to your service account. from dotenv import dotenv_values config = dotenv_values('./env') key_path = config['GOOGLE_APPLICATION_CREDENTIALS'] bucket_name = config['FIREBASE_STORAGE_BUCKET'] # Initialize Firebase. firebase.initialize_firebase(key_path, bucket_name) # Define file names. bucket_folder = 'tests/assets/pdfs' destination_blob_name = 'tests/assets/pdfs/pandas_cheat_sheet.pdf' local_folder = './assets/pdfs' source_file_name = './assets/pdfs/Pandas_Cheat_Sheet.pdf' download_folder = './assets/downloads/pdfs' download_file_name = './assets/downloads/pdfs/Pandas_Cheat_Sheet.pdf' file_name = 'pandas_cheat_sheet.pdf' file_copy = 'pandas_cheat_sheet_copy.pdf' newfile_name = 'tests/assets/pdfs/' + file_copy # Upload a file to a Firebase Storage bucket, with and without bucket name. firebase.upload_file(destination_blob_name, source_file_name) firebase.upload_file(destination_blob_name, source_file_name, bucket_name) # Upload all files in a folder to a Firebase Storage bucket, with and without bucket name. firebase.upload_files(bucket_folder, local_folder) firebase.upload_files(bucket_folder, local_folder, bucket_name) # List all files in the Firebase Storage bucket folder, with and without bucket name. files = firebase.list_files(bucket_folder) assert isinstance(files, list) files = firebase.list_files(bucket_folder, bucket_name) assert isinstance(files, list) # Download a file from Firebase Storage, with and without bucket name. firebase.download_file(destination_blob_name, download_file_name) firebase.download_file(destination_blob_name, download_file_name, bucket_name) # Download all files in a given Firebase Storage folder, with and without bucket name. firebase.download_files(bucket_folder, download_folder) firebase.download_files(bucket_folder, download_folder, bucket_name) # Rename a file in the Firebase Storage bucket, with and without bucket name. firebase.rename_file(bucket_folder, file_name, newfile_name) firebase.rename_file(bucket_folder, file_name, newfile_name, bucket_name) # Delete a file from the Firebase Storage bucket, with and without bucket name. firebase.delete_file(bucket_folder, file_copy) firebase.delete_file(bucket_folder, file_copy, bucket_name)
def download_dataset(claims, collection, data_points): """Download a given dataset.""" # Get the user's data, returning if not authenticated. try: uid = claims['uid'] user_email = claims['email'] name = claims.get('name', 'Unknown') except KeyError: return None, None # Get data points in specified order. collection_data = get_collection(collection, order_by='state') dataframe = DataFrame.from_dict(collection_data, orient='columns') data = dataframe[data_points] # Convert JSON to CSV. with NamedTemporaryFile(delete=False) as temp: temp_name = temp.name + '.csv' data.to_csv(temp_name, index=False) temp.close() # Post a copy of the data to Firebase storage. now = datetime.now() timestamp = now.strftime('%Y-%m-%d_%H-%M-%S') destination = 'public/data/downloads/' data_type = collection.split('/')[-1] filename = f'{data_type}_{timestamp}.csv' ref = destination + filename upload_file(ref, temp_name, bucket_name=STORAGE_BUCKET) # Create an activity log. log_entry = { 'data_points': len(data), 'file': ref, 'email': user_email, 'name': name, 'uid': uid, } create_log( ref='logs/website/downloads', claims=claims, action=f'User ({user_email}) downloaded {data_type} data.', log_type='download', key=f'download_{data_type}_data', changes=log_entry, ) # Return the file that can be downloaded. return temp_name, filename
def upload_video(api_key, bucket_name, file_name, destination, video_data): """Upload a video to Firebase Storage, get a storage URL reference for the video, and finally create a short link for the video. Args: api_key (str): Firebase project API key. bucket_name (str): The name of the strorage bucket. file_name (str): The path of the file to upload. video_data (dict): Metadata about the video. Returns: (dict): The video data updated with the storage ref and URL. """ video_data['uploaded_at'] = datetime.now().isoformat() firebase.upload_file(bucket_name, destination, file_name, verbose=True) video_data['storage_url'] = firebase.get_file_url(destination, bucket_name) video_data['short_link'] = firebase.create_short_url( api_key, video_data['storage_url']) firebase.update_document(destination, video_data) return video_data
def upload_dataset_files(root: Optional[str] = '.datasets'): """Upload files accompanying each dataset. Args: root (str): The root folder of the datasets JSON (optional). """ bucket = os.environ['FIREBASE_STORAGE_BUCKET'] with open(f'{root}/{FILENAME}') as datasets: data = json.load(datasets) with open(f'{root}/{DATAFILES}') as datafiles: files = json.load(datafiles) for item in data: datafile = files[item['id']] ref = datafile['ref'] file_name = datafile['file_name'] upload_file(ref, file_name, bucket_name=bucket) file_url = get_file_url(ref, bucket_name=bucket) datafile['url'] = file_url datafile['updated_at'] = datetime.now().isoformat() update_document(datafile['doc'], datafile)
def upload_file_to_storage(ref: str, file_name: str) -> str: """Set the `updated_at` field on all documents in a collection. Args: ref (str): The location for the file. file_name (str): The full file name of the file to upload. Returns: (tuple): Returns a tuple of the URL link to the file and a short URL. """ print('Uploading file to ', ref) bucket = config['FIREBASE_STORAGE_BUCKET'] upload_file(ref, file_name, bucket_name=bucket) file_url = get_file_url(ref, bucket_name=bucket) print('File uploaded. URL:', file_url) api_key = config['FIREBASE_API_KEY'] project_name = config['FIREBASE_PROJECT_ID'] # TODO: Allow for specifying suffix options. short_url = create_short_url(api_key, file_url, project_name) print('Short URL:', short_url) return file_url, short_url
def test_storage(): """Test Firebase Storage by managing a test file.""" # Initialize Firebase. env = environ.Env() env.read_env('../.env') credentials = env('GOOGLE_APPLICATION_CREDENTIALS') os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = credentials firebase.initialize_firebase() # Define file names. bucket_name = 'cannlytics.appspot.com' bucket_folder = 'tests/assets/pdfs' destination_blob_name = 'tests/assets/pdfs/pandas_cheat_sheet.pdf' local_folder = './assets/pdfs' source_file_name = './assets/pdfs/Pandas_Cheat_Sheet.pdf' download_folder = './assets/downloads/pdfs' download_file_name = './assets/downloads/pdfs/Pandas_Cheat_Sheet.pdf' file_name = 'pandas_cheat_sheet.pdf' file_copy = 'pandas_cheat_sheet_copy.pdf' newfile_name = 'tests/assets/pdfs/' + file_copy # Upload a file to a Firebase Storage bucket. firebase.upload_file(bucket_name, destination_blob_name, source_file_name) # Upload all files in a folder to a Firebase Storage bucket. firebase.upload_files(bucket_name, bucket_folder, local_folder) # List all files in the Firebase Storage bucket folder. files = firebase.list_files(bucket_name, bucket_folder) assert isinstance(files, list) == True # Download a file from Firebase Storage. firebase.download_file(bucket_name, destination_blob_name, download_file_name) # Download all files in a given Firebase Storage folder. firebase.download_files(bucket_name, bucket_folder, download_folder) # Rename a file in the Firebase Storage bucket. firebase.rename_file(bucket_name, bucket_folder, file_name, newfile_name) # Delete a file from the Firebase Storage bucket. firebase.delete_file(bucket_name, bucket_folder, file_copy)
from console import state if __name__ == '__main__': # Initialize Firebase. env = environ.Env() env.read_env('../../.env') credentials = env('GOOGLE_APPLICATION_CREDENTIALS') os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = credentials db = firebase.initialize_firebase() api_key = firebase.get_document('admin/firebase')['web_api_key'] bucket_name = env('FIREBASE_STORAGE_BUCKET') # Upload worksheets to Firebase Storage and update model data in Firestore. for key, data_model in state.material['data_models'].items(): try: destination = f'public/state/data_models/{key}_worksheet.xlsm' file_name = f'../../console/static/console/worksheets/{key}_worksheet.xlsm' firebase.upload_file(bucket_name, destination, file_name) data_model['worksheet_url'] = firebase.get_file_url( destination, bucket_name) data_model['worksheet_short_link'] = firebase.create_short_url( api_key, data_model['worksheet_url']) data_model['worksheet_uploaded_at'] = datetime.now().isoformat() firebase.update_document(f'public/state/data_models/{key}', data_model) firebase.update_document( f'organizations/test-company/data_models/{key}', data_model) except FileNotFoundError: print('No worksheet for %s data model.' % data_model['key'])