def get_verification_count(project_id): firebase = firebase_admin_auth() fb_db = firebase.database() # get the verification count for this project from firebase verification_count = float( fb_db.child("projects").child(project_id).child( "verificationCount").shallow().get().val()) return verification_count
def upload_project_firebase(project): try: firebase = firebase_admin_auth() fb_db = firebase.database() fb_db.child("projects").child(str(project['id']).zfill(4)).set(project) logging.warning('uploaded project in firebase for project %s' % project['id']) return True except: return False
def get_all_projects(): # connect to firebase firebase = firebase_admin_auth() fb_db = firebase.database() # get all projects all_projects = fb_db.child("projects").get().val() print('got project information from firebase.') logging.warning('got project information from firebase.') return all_projects
def delete_groups_projects_firebase(): firebase = firebase_admin_auth() fb_db = firebase.database() print('Deleting all groups from Firebase...') fb_db.child("groups").remove() print('Done.') print('Deleting all projects from Firebase...') fb_db.child("projects").remove() print('Done.')
def get_highest_project_id(): firebase = firebase_admin_auth() fb_db = firebase.database() project_keys = fb_db.child('projects').shallow().get().val() if not project_keys: project_keys = [-1] project_ids = list(map(int, list(project_keys))) project_ids.sort() highest_project_id = project_ids[-1] logging.warning('returned highest project id: %s' % highest_project_id) return highest_project_id
def download_group_progress(project_id, verification_count): # this functions uses threading to get the completed counts of all groups per project # create a list where we store the progress and other information for each group group_progress_list = [] # we will use a queue to limit the number of threads running in parallel q = Queue(maxsize=0) num_threads = 24 # it is important to use the shallow option, only keys will be loaded and not the complete json firebase = firebase_admin_auth() fb_db = firebase.database() # this tries to set the max pool connections to 100 adapter = requests.adapters.HTTPAdapter(max_retries=5, pool_connections=100, pool_maxsize=100) for scheme in ('http://', 'https://'): fb_db.requests.mount(scheme, adapter) all_groups = fb_db.child("groups").child(project_id).shallow().get().val() print('downloaded all groups of project %s from firebase' % project_id) logging.warning('downloaded all groups of project %s from firebase' % project_id) for group_id in all_groups: q.put([ fb_db, group_progress_list, project_id, group_id, verification_count ]) print('added all groups of project %s to queue' % project_id) logging.warning('added all groups of project %s to queue' % project_id) for i in range(num_threads): worker = threading.Thread(target=get_group_progress, args=(q, )) worker.start() q.join() del fb_db print('downloaded progress for all groups of project %s from firebase' % project_id) logging.warning( 'downloaded progress for all groups of project %s from firebase' % project_id) return group_progress_list
def delete_firebase_results(all_results): firebase = firebase_admin_auth() fb_db = firebase.database() # we will use multilocation update to delete the entries, therefore we crate an dict with the items we want to delete data = {} for task_id, results in all_results.items(): for child_id, result in results.items(): key = 'results/{task_id}/{child_id}'.format(task_id=task_id, child_id=child_id) data[key] = None #q.put([fb_db, task_id, child_id]) fb_db.update(data) print('finished deleting results') logging.warning('deleted results in firebase') del fb_db
def get_projects_to_import(): # this functions looks for new entries in the firebase import table # the output is a dictionary with all information for newly imported projects new_imports = {} firebase = firebase_admin_auth() fb_db = firebase.database() # iterate over all the keys in the importer, add the ones to the import cache that are not yet complete all_imports = fb_db.child("imports").get().val() if all_imports: for import_key, project in all_imports.items(): try: # check if project was already imported and "complete" is set complete = project['complete'] except: # insert into new projects dict new_imports[import_key] = project return new_imports
def set_project_progress_firebase(project_id, progress): # connect to firebase firebase = firebase_admin_auth() fb_db = firebase.database() # update progress value for firebase project # progress in firebase is stored as integer progress = int(progress) fb_db.child("projects").child(project_id).update({"progress": progress}) # check if progress has been updated new_progress = fb_db.child("projects").child(project_id).child( "progress").shallow().get().val() if progress == new_progress: print('update progress for project %s successful' % project_id) logging.warning( 'update progress in firebase for project %s successful' % project_id) return True else: print('update progress in firebase for project %s FAILED' % project_id) logging.warning('update progress for project %s FAILED' % project_id) return False
def get_results_from_firebase(): firebase = firebase_admin_auth() fb_db = firebase.database() results = fb_db.child("results").get().val() return results
def run_transfer_results(): logging.basicConfig(filename='transfer_results.log', level=logging.WARNING, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M:%S', filemode='a') # first check if we have results stored locally, that have not been inserted in MySQL results_filename = 'results.json' if os.path.isfile(results_filename): # start to import the old results first with open(results_filename) as results_file: results = json.load(results_file) results_txt_filename = results_to_txt(results) logging.warning( "there are results in %s that we didnt't insert. do it now!" % results_filename) save_results_mysql(results_txt_filename) delete_firebase_results(results) os.remove(results_filename) print('removed "results.json" file') logging.warning('removed "results.json" file') firebase = firebase_admin_auth() fb_db = firebase.database() print('opened connection to firebase') # this tries to set the max pool connections to 100 adapter = requests.adapters.HTTPAdapter(max_retries=5, pool_connections=100, pool_maxsize=100) for scheme in ('http://', 'https://'): fb_db.requests.mount(scheme, adapter) # download all results and save as in json file to avoid data loss when script fails all_results = fb_db.child("results").get().val() del fb_db print('downloaded all results from firebase') logging.warning('downloaded all results from firebase') # test if there are any results to transfer if all_results: with open(results_filename, 'w') as fp: json.dump(all_results, fp) logging.warning('wrote results data to %s' % results_filename) print('wrote results data to %s' % results_filename) results_txt_filename = results_to_txt(all_results) save_results_mysql(results_txt_filename) delete_firebase_results(all_results) os.remove(results_filename) print('removed "results.json" file') logging.warning('removed "results.json" file') else: logging.warning('there are no results to transfer in firebase') print('there are no results to transfer in firebase')
def delete_project_firebase(project_id): firebase = firebase_admin_auth() fb_db = firebase.database() fb_db.child("projects").child(project_id).remove() logging.warning('deleted project in firebase for project %s' % project_id)