def upload_licensees(data, state): """Upload cannabis licensees.""" for key, values in data.iterrows(): values['state'] = state key = values['license_number'] ref = f'public/data/licensees/{key}' firebase.update_document(ref, values.to_dict())
def create_project_secret(project_id, secret_id, secret, ref, field): """Create a secret for the project. Args: project_id (str): The Firebase project ID. secret_id (str): A unique ID for the secret. secret (str): The secret message. ref (str): The document to store the secret ID and version ID. field (str): The field in the document for the secret IDs. Returns: (dict): A dictionary with the `project_id`, `secret_id`, and `version_id`. """ # Create a secret. try: firebase.create_secret(project_id, secret_id, secret) except: pass # Secret may already be created (AlreadyExists). # Add the secret's secret data. secret = firebase.add_secret_version(project_id, secret_id, secret) version_id = secret.split('/')[-1] # Save the project ID, secret ID, version ID in Firestore. data = { field: { 'project_id': project_id, 'secret_id': secret_id, 'version_id': version_id, } } firebase.update_document(ref, data) return data
def create_signature(request, *args, **argv): #pylint: disable=unused-argument """Save a signature for a user, given their pin. Args: request (HTTPRequest): A request to get the user's session. Returns: (JsonResponse): A JSON response with a success message. """ user_claims = verify_session(request) uid = user_claims['uid'] post_data = loads(request.body.decode('utf-8')) data_url = post_data['data_url'] ref = f'admin/auth/{uid}/user_settings/signature.png' upload_file(BUCKET_NAME, ref, data_url=data_url) url = get_file_url(ref, bucket_name=BUCKET_NAME) signature_created_at = datetime.now().isoformat() signature_data = { 'signature_created_at': signature_created_at, 'signature_url': url, 'signature_ref': ref, } update_document(f'admin/auth/{uid}/user_settings', signature_data) create_log(f'users/{uid}/logs', user_claims, 'Created signature.', 'signature', 'signature_create', [{ 'created_at': signature_created_at }]) return JsonResponse({ 'success': True, 'message': 'Signature saved.', 'signature_url': url })
def test_publish_data(dataset): """Publish a dataset on the data market.""" # Initialize Ocean market. ocean, config = initialize_ocean_market() # Mint a test OCEAN. os.environ['FACTORY_DEPLOYER_PRIVATE_KEY'] = config['FACTORY_DEPLOYER_PRIVATE_KEY'] mint_fake_OCEAN(ocean.config) # Publish a dataset. data_token, asset = market.publish_data( ocean, config.get(SELLER_KEY), files=dataset['files'], name=dataset['datatoken_name'], symbol=dataset['datatoken_symbol'], author=dataset['author'], data_license=dataset['license'], ) # Upload the datatoken and asset information. os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = config['GOOGLE_APPLICATION_CREDENTIALS'] initialize_firebase() ref = f'public/market/datasets/{asset.asset_id}' entry = {**dataset, **asset.as_dictionary()} update_document(ref, entry) return data_token, asset
def create_user_pin(request, *args, **argv): #pylint: disable=unused-argument """Using a pin for a given user, create and store a hash of the `pin:uid`. Args: request (HTTPRequest): A request to get the user's session. Returns: (JsonResponse): A JSON response with a success message. """ user_claims = verify_session(request) uid = user_claims['uid'] post_data = loads(request.body.decode('utf-8')) pin = post_data['pin'] message = f'{pin}:{uid}' app_secret = get_document('admin/api')['app_secret_key'] code = sha256_hmac(app_secret, message) post_data = loads(request.body.decode('utf-8')) now = datetime.now() # Optional: Add expiration to pins user_claims['hmac'] = code delete_user_pin(request) update_document(f'admin/api/pin_hmacs/{code}', user_claims) update_document(f'users/{uid}', {'pin_created_at': now.isoformat()}) create_log(f'users/{uid}/logs', user_claims, 'Created pin.', 'pin', 'pin_create', [{ 'created_at': now }]) return JsonResponse({ 'success': True, 'message': 'Pin successfully created.' })
def import_data_model(directory): """Import analyses to Firestore from a .csv or .xlsx file. Args: filename (str): The full filename of a data file. """ analyses = pd.read_excel(directory + 'analyses.xlsx') analytes = pd.read_excel(directory + 'analytes.xlsx') for index, analysis in analyses.iterrows(): analyte_data = [] analyte_names = analysis.analyte_keys.split(', ') for analyte_key in analyte_names: analyte_item = analytes.loc[analytes.key == analyte_key] analyte_data.append(analyte_item.to_dict(orient='records')) analyses.at[index, 'analytes'] = analyte_data analyses_data = analyses.to_dict(orient='records') for index, values in analyses_data.iterrows(): doc_id = str(values.key) doc_data = values.to_dict() ref = '' update_document(ref, doc_data) # doc_data = data.to_dict(orient='index') # data_ref = create_reference(db, ref) # data_ref.document(doc_id).set(doc_data, merge=True) # data_ref.set(doc_data, merge=True) raise NotImplementedError
def logout(request, *args, **argv): #pylint: disable=unused-argument """Functional view to remove a user session. FIXME: Does not appear to delete the user's session! """ try: print('Signing user out.') session_cookie = request.COOKIES.get('__session') if session_cookie is None: session_cookie = request.session['__session'] claims = verify_session_cookie(session_cookie) uid = claims['uid'] create_log(ref=f'users/{uid}/logs', claims=claims, action='Signed out.', log_type='auth', key='logout') update_document(f'users/{uid}', {'signed_in': False}) print('Updated user as signed-out in Firestore:', uid) revoke_refresh_tokens(claims['sub']) # request.session['__session'] = '' response = HttpResponse(status=205) # response = JsonResponse({'success': True}, status=205) response['Set-Cookie'] = '__session=None; Path=/' response['Cache-Control'] = 'public, max-age=300, s-maxage=900' return response except: # request.session['__session'] = '' response = HttpResponse(status=205) # response = JsonResponse({'success': True}, status=205) response['Set-Cookie'] = '__session=None; Path=/' response['Cache-Control'] = 'public, max-age=300, s-maxage=900' return response
def upload_latest_video(datafile): """ Upload the lastest video data. Args: datafile (str): The path to a .json file containing the video data. """ # Read in the video data. with open(datafile) as f: data = json.load(f) # Get the current count of videos. doc = firebase.get_document('public/videos') number = doc['total_videos'] # Upload subscription plan data to Firestore. # Only incrementing the number of videos if the video doesn't exist yet. for item in data[-1:]: doc_id = item['video_id'] ref = f'public/videos/video_data/{doc_id}' existing_doc = firebase.get_document(ref) if not existing_doc: number += 1 item['number'] = number item['published'] = parser.parse(item['published_at']) firebase.update_document(ref, item) # Update video statistics. firebase.update_document('public/videos', {'total_videos': len(data)}) return data[-1]
def upload_data( file_name: str, collection: str, id_key: Optional[str] = 'id', stats_doc: Optional[str] = '', ): """ Upload a dataset to Firestore. Args: datafile (str): The path to a .json file containing the data. collection (str): The path of the collection where data will be stored. id_key (str): The key of the ID. stats_doc (str): An optional document to store statistics about the data. Returns: data (list): A list of partner data (dict). """ database = initialize_firebase() with open(file_name) as datafile: data = json.load(datafile) print('Uploading dataset...') for item in data: item['updated_at'] = datetime.now().isoformat() doc_id = item[id_key] ref = f'{collection}/{doc_id}' update_document(ref, item, database=database) print('Updated:', ref) if stats_doc: update_document(stats_doc, {'total': len(data)}, database=database) print('Updated:', stats_doc) print('Finished uploading data.') return data
def users(request): """Get, update, or create user's data.""" try: # Authenticate the user. claims = authenticate_request(request) print('User claims:', claims) uid = claims['uid'] # Get the user's data. if request.method == 'GET': user_data = get_document(f'users/{uid}') response = {'success': True, 'data': user_data} return Response(response, content_type='application/json') # Edit user data if a 'POST' request. post_data = loads(request.body.decode('utf-8')) update_document(f'users/{uid}', post_data) create_log( ref=f'users/{uid}/logs', claims=claims, action='Updated user data.', log_type='users', key='user_data', changes=[post_data] ) return Response({'success': True}, content_type='application/json') except: return Response( {'success': False}, content_type='application/json', status=status.HTTP_500_INTERNAL_SERVER_ERROR )
def upload_contributors(org_name): """Get Github contributors and save them to Firestore. Args: org_name (str): The name of a GitHub organization. Returns: (list): A list of users (dict). """ users = [] client = Github() org = client.get_organization(org_name) repos = org.get_repos() initialize_firebase() for repo in repos: contributors = repo.get_contributors() for user in contributors: if user.name not in users: users.append(user.name) data = { 'company': user.company, 'description': user.bio, 'name': user.name, 'location': user.location, 'image': user.avatar_url, 'url': user.html_url, 'slug': user.login, } update_document(f'public/contributors/contributor_data/{user.id}', data) return users
def users(request): """Get, update, or create user's data.""" print('Request to users endpoint!') try: # Authenticate the user. claims = authenticate_request(request) # Get user data. if request.method == 'GET': user_data = get_document(f'users/{claims["uid"]}') return Response(user_data, content_type='application/json') # Edit user data. if request.method == 'POST': # Get the user's ID. post_data = loads(request.body.decode('utf-8')) uid = claims['uid'] post_data['uid'] = uid # Update the user's data, create a log, and return the data. try: update_document(f'users/{uid}', post_data) create_log(ref=f'users/{uid}/logs', claims=claims, action='Updated user data.', log_type='users', key='user_data', changes=[post_data]) return Response(post_data, content_type='application/json') except: # Create the user's data, create a log, and return the data. user_email = post_data['email'] user = { 'email': user_email, 'created_at': utils.get_timestamp(), 'uid': post_data['uid'], 'photo_url': f'https://robohash.org/${user_email}?set=set5', } update_document(f'users/{uid}', post_data) create_log(f'users/{uid}/logs', claims, 'Created new user.', 'users', 'user_data', [post_data]) return Response(user, content_type='application/json') except: # Return a server error. return Response({'success': False}, content_type='application/json', status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def set_updated_at(ref: str): """Set the `updated_at` field on all documents in a collection. Args: ref (str): The original collection. """ print(f'Setting `updated_at` for all documents in {ref}...') updated_at = datetime.now().isoformat() docs = get_collection(ref) for doc in docs: entry = {'updated_at': updated_at} update_document(ref + '/' + doc['id'], entry) print(f'Finished setting `updated_at` for all documents in {ref}.')
def to_fb(self, ref='', col=''): """Upload the model's properties as a dictionary to Firestore. Args: ref (str): The Firestore document reference. col (str): A Firestore collection, with the UID as document ID. """ data = vars(self).copy() [data.pop(x, None) for x in ['_license', 'client']] if col: update_document(f'{col}/{self.uid}', data) else: update_document(ref, data)
def set_updated_at(ref): """Set the `updated_at` field on all documents in a collection. Args: ref (str): The original collection. dest (str): The new collection. delete (bool): Wether or not to delete the original documents, `False` by default. """ updated_at = datetime.now().isoformat() docs = firebase.get_collection(ref) for doc in docs: entry = {'updated_at': updated_at} firebase.update_document(ref + '/' + doc['id'], entry)
def move_collection(ref, dest, delete=False): """Move one collection to another collection. Args: ref (str): The original collection. dest (str): The new collection. delete (bool): Wether or not to delete the original documents, `False` by default. """ docs = firebase.get_collection(ref) for doc in docs: firebase.update_document(dest + '/' + doc['id'], doc) if delete: firebase.delete_document(ref + '/' + doc['id'])
def login(request, *args, **argv): #pylint: disable=unused-argument """Functional view to create a user session. Optional: Ensure that the request succeeds on the client! """ try: print('Logging user in...') authorization = request.headers.get('Authorization', '') token = authorization.split(' ').pop() if not token: # return HttpResponse(status=401) message = 'Authorization token not provided in the request header.' return JsonResponse({ 'error': True, 'message': message }, status=401) initialize_firebase() print('Initialized Firebase.') # Set session cookie in a cookie in the response. # response = HttpResponse(status=200) response = JsonResponse({'success': True}, status=200) # Optional: Let user specify cookie duration? # expires_in = timedelta(days=5) # expires = datetime.now() + expires_in session_cookie = create_session_cookie(token) response['Set-Cookie'] = f'__session={session_cookie}; Path=/' response[ 'Cache-Control'] = 'public, max-age=300, s-maxage=900' # TODO: Set the expiration time # Save session cookie in the session. # Preferred over cookies (but cookies are still needed for production). request.session['__session'] = session_cookie # Verify the user, create a log, update the user as signed-in, # and return a response with the session cookie. claims = verify_token(token) uid = claims['uid'] print('Verified user with Firebase Authentication:', claims['email']) create_log(ref=f'users/{uid}/logs', claims=claims, action='Signed in.', log_type='auth', key='login') update_document(f'users/{uid}', {'signed_in': True}) print('Logged user sign-in in Firestore:', uid) return response except: # return HttpResponse(status=401) message = 'Authorization failed in entirety. Please contact support.' return JsonResponse({'error': True, 'message': message}, status=401)
def move_collection(ref: str, dest: str, delete: Optional[bool] = False): """Move one collection to another collection. Args: ref (str): The original collection. dest (str): The new collection. delete (bool): Wether or not to delete the original documents, `False` by default. """ print(f'Moving documents from {ref} to {dest}...') docs = get_collection(ref) for doc in docs: update_document(dest + '/' + doc['id'], doc) if delete: delete_document(ref + '/' + doc['id']) print(f'Moved all documents in {ref} to {dest}.')
def save_analytics(request: Any, context: dict) -> dict: """Save page analytics to Firestore.""" now = datetime.now().isoformat() date = now[:10] values = { 'date': date, 'time': now, 'page': request.path, 'query': request.GET.get('q'), } # Optional: Merge more user information and more elegantly. user = context['user'] if user: values['email'] = user['email'] values['uid'] = user['uid'] ref = f'logs/website/page_visits/{now}' update_document(ref, values)
def login(request): """Functional view to create a user session.""" try: # Ensure that the user passed an authorization bearer token. authorization = request.headers.get('Authorization') token = authorization.split(' ').pop() if not token: message = 'Authorization token not provided in the request header.' return JsonResponse({ 'success': False, 'message': message }, status=401) # Initialize Firebase and verify the Firebase ID token. initialize_firebase() claims = verify_token(token) uid = claims['uid'] # Create and set a session cookie in the response. cache = f'public, max-age={SESSION_COOKIE_AGE}, s-maxage={SESSION_COOKIE_AGE}' session_cookie = create_session_cookie(token) response = JsonResponse({'success': True}, status=200) response['Cache-Control'] = cache response['Set-Cookie'] = f'__session={session_cookie}; Path=/' # Also save the session cookie in the session. # Note: The session is preferred over cookies, # but cookies are currently needed for production. request.session['__session'] = session_cookie # Log the login and update the user as signed-in. update_document(f'users/{uid}', {'signed_in': True}) create_log(ref=f'users/{uid}/logs', claims=claims, action='Signed in.', log_type='auth', key='login') return response except: message = f'Authorization failed in entirety. Please contact {DEFAULT_FROM_EMAIL}' return JsonResponse({'success': False, 'message': message}, status=401)
def upload_video(api_key, bucket_name, file_name, destination, video_data): """Upload a video to Firebase Storage, get a storage URL reference for the video, and finally create a short link for the video. Args: api_key (str): Firebase project API key. bucket_name (str): The name of the strorage bucket. file_name (str): The path of the file to upload. video_data (dict): Metadata about the video. Returns: (dict): The video data updated with the storage ref and URL. """ video_data['uploaded_at'] = datetime.now().isoformat() firebase.upload_file(bucket_name, destination, file_name, verbose=True) video_data['storage_url'] = firebase.get_file_url(destination, bucket_name) video_data['short_link'] = firebase.create_short_url( api_key, video_data['storage_url']) firebase.update_document(destination, video_data) return video_data
def delete_signature(request, *args, **argv): #pylint: disable=unused-argument """Delete a user's signature. Args: request (HTTPRequest): A request to get the user's session. Returns: (JsonResponse): A JSON response containing the user's claims. """ user_claims = authenticate_request(request) uid = user_claims['uid'] entry = { 'signature_created_at': '', 'signature_url': '', 'signature_ref': '', } delete_file(BUCKET_NAME, f'users/{uid}/user_settings/signature.png') update_document(f'users/{uid}', entry) update_document(f'users/{uid}/user_settings/signature', entry) create_log(f'users/{uid}/logs', user_claims, 'Deleted signature.', 'signature', 'signature_delete', [{'deleted_at': datetime.now().isoformat()}]) return JsonResponse({'success': True, 'message': 'Signature deleted.'})
def upload_dataset_files(root: Optional[str] = '.datasets'): """Upload files accompanying each dataset. Args: root (str): The root folder of the datasets JSON (optional). """ bucket = os.environ['FIREBASE_STORAGE_BUCKET'] with open(f'{root}/{FILENAME}') as datasets: data = json.load(datasets) with open(f'{root}/{DATAFILES}') as datafiles: files = json.load(datafiles) for item in data: datafile = files[item['id']] ref = datafile['ref'] file_name = datafile['file_name'] upload_file(ref, file_name, bucket_name=bucket) file_url = get_file_url(ref, bucket_name=bucket) datafile['url'] = file_url datafile['updated_at'] = datetime.now().isoformat() update_document(datafile['doc'], datafile)
def update_object(request, claims, model_type, model_type_singular, organization_id): """Create or update object(s) through the API. Parse the data and add the data to Firestore. Return the data and success. Args: request (HTTPRequest): An HTTP request used to retrieve parameters. claims (dict): User-specific custom claims. model_type (str): The type of data model. model_type_singular (str): The singular of the type of data model. organization_id (str): An organization ID to narrow matches. Returns: (list): A list of dictionaries of the data posted. """ updated_at = datetime.now().isoformat() data = loads(request.body.decode('utf-8')) if isinstance(data, dict): doc_id = data[f'{model_type_singular}_id'] data['updated_at'] = updated_at data['updated_by'] = claims['uid'] update_document( f'organizations/{organization_id}/{model_type}/{doc_id}', data) elif isinstance(data, list): for item in data: doc_id = item[f'{model_type_singular}_id'] item['updated_at'] = updated_at item['updated_by'] = claims['uid'] print('Saving item:\n', item) update_document( f'organizations/{organization_id}/{model_type}/{doc_id}', item) else: return [] update_totals(model_type, organization_id, doc_id) if model_type != 'logs': changes = [data] if isinstance(data, list): changes = data create_log(f'organizations/{organization_id}/logs', claims, f'{model_type.title()} edited.', model_type, doc_id, changes) return data
def delete_license(request, *args, **argv): #pylint: disable=unused-argument """Delete a license from an organization's licenses.""" # Authenticate the user. _, project_id = google.auth.default() user_claims = authenticate_request(request) data = loads(request.body.decode('utf-8')) deletion_reason = data.get('deletion_reason', 'No deletion reason.') license_number = request.query_params.get('license') org_id = request.query_params.get('license') if not license_number or not org_id: message = 'Parameters `license` and `org_id` are required.' return Response({'error': True, 'message': message}, status=403) # Delete the license data and redact the secret. doc = get_document(f'organizations/{org_id}') existing_licenses = doc['licenses'] licenses = [] for license_data in existing_licenses: license_number = license_data['license_number'] if license_data['license_number'] != license_number: licenses.append(license_data) else: add_secret_version( project_id, license_data['user_api_key_secret']['secret_id'], 'redacted' ) doc['licenses'] = licenses update_document(f'organizations/{org_id}', doc) # Create a log. create_log( ref=f'organizations/{org_id}/logs', claims=user_claims, action='License deleted.', log_type='traceability', key='delete_license', changes=[license_number, deletion_reason] ) return JsonResponse({'status': 'success', 'message': 'License deleted.'})
def logout(request): """Functional view to remove a user session.""" claims = authenticate_request(request) try: uid = claims['uid'] update_document(f'users/{uid}', {'signed_in': False}) create_log(ref=f'users/{uid}/logs', claims=claims, action='Signed out.', log_type='auth', key='logout') revoke_refresh_tokens(claims['sub']) response = JsonResponse({'success': True}, status=200) response['Set-Cookie'] = '__session=None; Path=/' request.session['__session'] = '' return response except KeyError: response = JsonResponse({'success': False}, status=205) response['Set-Cookie'] = '__session=None; Path=/' request.session['__session'] = '' return response
def upload_all_videos(datafile): """ Upload all video data. Args: datafile (str): The path to a .json file containing the video data. """ # Read in the video data. with open(datafile) as f: data = json.load(f) # Upload subscription plan data to Firestore. number = 0 for item in data: number += 1 item['number'] = number item['published'] = parser.parse(item['published_at']) doc_id = item['video_id'] ref = f'public/videos/video_data/{doc_id}' firebase.update_document(ref, item) # Update video statistics. firebase.update_document('public/videos', {'total_videos': len(data)}) return data
def create_api_key(request, *args, **argv): #pylint: disable=unused-argument """Mint an API key for a user, granting programmatic use at the same level of permission as the user. Args: request (HTTPRequest): A request to get the user's session. Returns: (JsonResponse): A JSON response containing the API key in an `api_key` field. """ user_claims = verify_session(request) uid = user_claims['uid'] api_key = token_urlsafe(48) app_secret = get_document('admin/api')['app_secret_key'] code = sha256_hmac(app_secret, api_key) post_data = loads(request.body.decode('utf-8')) now = datetime.now() expiration_at = post_data['expiration_at'] try: expiration_at = datetime.fromisoformat(expiration_at) except: expiration_at = datetime.strptime(expiration_at, '%m/%d/%Y') if expiration_at - now > timedelta(365): expiration_at = now + timedelta(365) key_data = { 'created_at': now.isoformat(), 'expiration_at': expiration_at.isoformat(), 'name': post_data['name'], 'permissions': post_data['permissions'], 'uid': uid, 'user_email': user_claims['email'], 'user_name': user_claims.get('name', 'No Name'), } update_document(f'admin/api/api_key_hmacs/{code}', key_data) update_document(f'users/{uid}/api_key_hmacs/{code}', key_data) create_log(f'users/{uid}/logs', user_claims, 'Created API key.', 'api_key', 'api_key_create', [key_data]) return JsonResponse({'status': 'success', 'api_key': api_key})
def automatic_collection(org_id=None, env_file='.env', minutes_ago=None): """Automatically collect results from scientific instruments. Args: org_id (str): The organization ID to associate with instrument results. env_file (str): The environment variable file, `.env` by default. Either a `GOOGLE_APPLICATION_CREDENTIALS` or a `CANNLYTICS_API_KEY` is needed to run the routine. minutes_ago (int): The number of minutes in the past to restrict recently modified files. Returns: (list): A list of measurements (dict) that were collected. """ # Try to initialize Firebase, otherwise an API key will be used. try: env = environ.Env() env.read_env(env_file) credentials = env('GOOGLE_APPLICATION_CREDENTIALS') os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = credentials initialize_firebase() except: pass # Get the organization ID from the .env file if not specified. if not org_id: org_id = env('CANNLYTICS_ORGANIZATION_ID') # Format the last modified time cut-off as a datetime. last_modified_at = None if minutes_ago: last_modified_at = datetime.now() - timedelta(minutes=minutes_ago) # Get the instruments, trying Firestore, then the API. try: ref = f'organizations/{org_id}/instruments' instrument_data = get_collection(ref) except: api_key = env('CANNLYTICS_API_KEY') headers = { 'Authorization': 'Bearer %s' % api_key, 'Content-type': 'application/json', } url = f'{API_BASE}/instruments?organization_id={org_id}' response = requests.get(url, headers=headers) instrument_data = response.json()['data'] # Iterate over instruments, collecting measurements. measurements = [] for instrument in instrument_data: # Iterate over analyses that the instrument may be running. try: analyses = instrument.get('analyses', '').split(',') except AttributeError: continue # FIXME: Handle missing analyses more elegantly. analyses = [x.strip() for x in analyses] for n in range(len(analyses)): # Optional: Handle multiple data paths more elegantly. analysis = analyses[n] try: data_paths = instrument['data_path'].split(',') except AttributeError: continue # No data path. data_paths = [x.strip() for x in data_paths] data_path = data_paths[n] if not data_path: continue # Identify the analysis being run and identify the import routine. # Optional: Identify more elegantly. if 'micro' in analysis or 'MICR' in analysis: import_method = globals()['import_micro'] elif 'metal' in analysis or 'HEAV' in analysis: import_method = globals()['import_heavy_metals'] else: import_method = globals()['import_results'] # Search for recently modified files in the instrument directory # and parse any recently modified file. for root, _, filenames in os.walk(data_path): for filename in filenames: if filename.endswith('.xlsx') or filename.endswith('.xls'): data_file = os.path.join(root, filename) modifed_at = os.stat(data_file).st_mtime # FIXME: Ensure date restriction works. if last_modified_at: if modifed_at < last_modified_at: continue samples = import_method(data_file) if isinstance(samples, dict): sample_data = {**instrument, **samples} measurements.append(sample_data) else: for sample in samples: sample_data = {**instrument, **sample} measurements.append(sample_data) # Upload measurement data to Firestore. now = datetime.now() updated_at = now.isoformat() for measurement in measurements: try: measurement['sample_id'] = measurement['sample_name'] except: continue # Already has `sample_id`. # TODO: Format a better measurement ID. measurement_id = measurement.get( 'acq_inj_time') # E.g. 12-Jun-21, 15:21:07 if not measurement_id: # measurement_id = now.strftime('%d-%b-%y-%H-%M-%S') + '_' + str(measurement['sample_id']) measurement_id = measurement['sample_id'] else: try: measurement_id = measurement_id.replace(',', '').replace( ' ', '-').replace(':', '-') except AttributeError: pass measurement_id = str(measurement_id) + '_' + str( measurement['sample_id']) measurement['measurement_id'] = measurement_id measurement['updated_at'] = updated_at ref = f'organizations/{org_id}/measurements/{measurement_id}' try: update_document(ref, measurement) except: url = f'{API_BASE}/measurements/{measurement_id}?organization_id={org_id}' response = requests.post(url, json=measurement, headers=headers) print('Uploaded measurement:', ref) # Upload result data to Firestore for result in measurement['results']: analyte = result['analyte'] result_id = f'{measurement_id}_{analyte}' result['sample_id'] = measurement['sample_name'] result['result_id'] = result_id result['measurement_id'] = measurement_id result['updated_at'] = updated_at ref = f'organizations/{org_id}/results/{result_id}' try: update_document(ref, result) except: url = f'{API_BASE}/results/{result_id}?organization_id={org_id}' response = requests.post(url, json=result, headers=headers) print('Uploaded result:', ref) # Return the measurements return measurements
cultivator, lab, retailer = None, None, None for facility in facilities: license_type = facility.license_type if cultivator is None and license_type == 'Grower': cultivator = facility elif lab is None and license_type == 'Testing Laboratory': lab = facility elif retailer is None and license_type == 'Dispensary': retailer = facility # Save facility to Firestore. license_number = facility.license_number ref = f'tests/metrc/organizations/1/facilities/{license_number}' data = clean_nested_dictionary(facility.to_dict()) data['license_number'] = license_number fb.update_document(ref, data) # Get facilities from Firestore. ref = 'tests/metrc/organizations/1/facilities' cultivator = Facility.from_fb(track, f'{ref}/redacted') retailer = Facility.from_fb(track, f'{ref}/redacted') processor = Facility.from_fb(track, f'{ref}/redacted') lab = Facility.from_fb(track, f'{ref}/redacted') transporter = Facility.from_fb(track, f'{ref}/redacted') #------------------------------------------------------------------ # Locations ✓ #------------------------------------------------------------------ # # Create a new location. test_metrc_create_locations()