def create_signature(request, *args, **argv): #pylint: disable=unused-argument """Save a signature for a user, given their pin. Args: request (HTTPRequest): A request to get the user's session. Returns: (JsonResponse): A JSON response with a success message. """ user_claims = verify_session(request) uid = user_claims['uid'] post_data = loads(request.body.decode('utf-8')) data_url = post_data['data_url'] ref = f'admin/auth/{uid}/user_settings/signature.png' upload_file(BUCKET_NAME, ref, data_url=data_url) url = get_file_url(ref, bucket_name=BUCKET_NAME) signature_created_at = datetime.now().isoformat() signature_data = { 'signature_created_at': signature_created_at, 'signature_url': url, 'signature_ref': ref, } update_document(f'admin/auth/{uid}/user_settings', signature_data) create_log(f'users/{uid}/logs', user_claims, 'Created signature.', 'signature', 'signature_create', [{ 'created_at': signature_created_at }]) return JsonResponse({ 'success': True, 'message': 'Signature saved.', 'signature_url': url })
def upload_video(api_key, bucket_name, file_name, destination, video_data): """Upload a video to Firebase Storage, get a storage URL reference for the video, and finally create a short link for the video. Args: api_key (str): Firebase project API key. bucket_name (str): The name of the strorage bucket. file_name (str): The path of the file to upload. video_data (dict): Metadata about the video. Returns: (dict): The video data updated with the storage ref and URL. """ video_data['uploaded_at'] = datetime.now().isoformat() firebase.upload_file(bucket_name, destination, file_name, verbose=True) video_data['storage_url'] = firebase.get_file_url(destination, bucket_name) video_data['short_link'] = firebase.create_short_url( api_key, video_data['storage_url']) firebase.update_document(destination, video_data) return video_data
def upload_dataset_files(root: Optional[str] = '.datasets'): """Upload files accompanying each dataset. Args: root (str): The root folder of the datasets JSON (optional). """ bucket = os.environ['FIREBASE_STORAGE_BUCKET'] with open(f'{root}/{FILENAME}') as datasets: data = json.load(datasets) with open(f'{root}/{DATAFILES}') as datafiles: files = json.load(datafiles) for item in data: datafile = files[item['id']] ref = datafile['ref'] file_name = datafile['file_name'] upload_file(ref, file_name, bucket_name=bucket) file_url = get_file_url(ref, bucket_name=bucket) datafile['url'] = file_url datafile['updated_at'] = datetime.now().isoformat() update_document(datafile['doc'], datafile)
def upload_file_to_storage(ref: str, file_name: str) -> str: """Set the `updated_at` field on all documents in a collection. Args: ref (str): The location for the file. file_name (str): The full file name of the file to upload. Returns: (tuple): Returns a tuple of the URL link to the file and a short URL. """ print('Uploading file to ', ref) bucket = config['FIREBASE_STORAGE_BUCKET'] upload_file(ref, file_name, bucket_name=bucket) file_url = get_file_url(ref, bucket_name=bucket) print('File uploaded. URL:', file_url) api_key = config['FIREBASE_API_KEY'] project_name = config['FIREBASE_PROJECT_ID'] # TODO: Allow for specifying suffix options. short_url = create_short_url(api_key, file_url, project_name) print('Short URL:', short_url) return file_url, short_url
def buy_data(request): """Facilitate the purchase of a dataset on the data market.""" # Ensure that the user has a valid email. data = loads(request.body) try: user_email = data['email'] validate_email(user_email) except ValidationError: response = { 'success': False, 'message': 'Invalid email in request body.' } return JsonResponse(response) # Check if the payment ID is valid. # FIXME: Make this required. try: payment_id = data['payment_id'] print('Checking payment ID:', payment_id) project_id = os.environ['GOOGLE_CLOUD_PROJECT'] payload = access_secret_version(project_id, 'paypal', 'latest') paypal_secrets = loads(payload) paypal_client_id = paypal_secrets['client_id'] paypal_secret = paypal_secrets['secret'] paypal_access_token = get_paypal_access_token(paypal_client_id, paypal_secret) payment = get_paypal_payment(paypal_access_token, payment_id) assert payment['id'] == payment_id print('Payment ID matched captured payment ID.') except: pass # Future work: Ensure that the user has a .edu email for student discount? # Get the dataset zipped folder. dataset = data['dataset'] file_name = dataset['file_name'] file_ref = dataset['file_ref'] download_url = get_file_url(file_ref) # Optional: Allow for specifying suffix options. short_url = create_short_url(FIREBASE_API_KEY, download_url, FIREBASE_PROJECT_ID) data['download_url'] = download_url data['short_url'] = short_url # Keep track of a user's downloaded data if the user is signed in. now = datetime.now() iso_time = now.isoformat() data['created_at'] = iso_time data['updated_at'] = iso_time try: claims = authenticate_request(request) uid = claims['uid'] update_document(f'users/{uid}/datasets', {**data, **{'uid': uid}}) except KeyError: pass # Optional: Read the email template from storage? # Optional: Use HTML template. # Optional: Load messages from state? # # template_url = 'website/emails/newsletter_subscription_thank_you.html' # Optional: Actually attach the datafile (too large a file problem?) # Email the data to the user. message = f'Congratulations on your new dataset,\n\nYou can access your data with the following link:\n\n{short_url}\n\nYou can monitor the market for new datasets.\n\nAlways here to help,\nThe Cannlytics Team' #pylint: disable=line-too-long subject = 'Dataset Purchased - Your Dataset is Attached' send_mail( subject=subject, message=message, from_email=DEFAULT_FROM_EMAIL, recipient_list=[user_email, DEFAULT_FROM_EMAIL], fail_silently=False, # html_message = render_to_string(template_url, {'context': 'values'}) ) # Create an activity log. create_log( ref='logs/website/payments', claims=claims, action=f'User ({user_email}) bought a dataset.', log_type='market', key='buy_data', changes=data, ) # Return the file to download. return FileResponse(open(download_url, 'rb'), filename=file_name)
from console import state if __name__ == '__main__': # Initialize Firebase. env = environ.Env() env.read_env('../../.env') credentials = env('GOOGLE_APPLICATION_CREDENTIALS') os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = credentials db = firebase.initialize_firebase() api_key = firebase.get_document('admin/firebase')['web_api_key'] bucket_name = env('FIREBASE_STORAGE_BUCKET') # Upload worksheets to Firebase Storage and update model data in Firestore. for key, data_model in state.material['data_models'].items(): try: destination = f'public/state/data_models/{key}_worksheet.xlsm' file_name = f'../../console/static/console/worksheets/{key}_worksheet.xlsm' firebase.upload_file(bucket_name, destination, file_name) data_model['worksheet_url'] = firebase.get_file_url( destination, bucket_name) data_model['worksheet_short_link'] = firebase.create_short_url( api_key, data_model['worksheet_url']) data_model['worksheet_uploaded_at'] = datetime.now().isoformat() firebase.update_document(f'public/state/data_models/{key}', data_model) firebase.update_document( f'organizations/test-company/data_models/{key}', data_model) except FileNotFoundError: print('No worksheet for %s data model.' % data_model['key'])