def create_movie(Movie): query = "INSERT INTO movies VALUES (?,?,?,?,?,?,?)" params = (Movie.movie_id, Movie.movie_title, Movie.release_date, Movie.rating, Movie.boxoffice_id, Movie.director_id, Movie.studio_id) query_database(query, params)
def signin(): try: app.logger.info('Hit /signin') data = request.get_json() app.logger.info(data) user = data['user'] credentials = (user['dni'], user['password']) userid_matches = db.query_database( "select * from users where dni=? and password=?;", credentials) if (len(userid_matches) == 0): return make_response("User credentials invalid!", 403) user_id = userid_matches[0] app.logger.info("Found {} matching users for credentials {}".format( user_id, credentials)) d = {'user_id': user_id[0]} return make_response(jsonify(d), 200) except Exception as e: return make_response("Error at signin! {}".format(e), 500)
def new_appointment(): app.logger.info('Hit /new_appointment') data = request.get_json() turn = data['turn'] now = dt.now().strftime('%Y-%m-%d %H:%M') app.logger.info(turn) app.logger.info('check if turn is reserved') turn_id = [turn['turn_id']] is_available = db.query_database( '''select available from turns where id = ?''', turn_id)[0][0] app.logger.info(is_available) if is_available == 'false': app.logger.info('Turn is reserved') return make_response('Error: turn is occupied!', 205) app.logger.info('Reserving turn') db.modify_database( '''update turns set available = "false" where id = ? ''', turn_id) sql = ''' insert into appointments (user_id, turn_id, created_at, deleted_at) values (?,?,?,null) ; ''' payload = (turn['user_id'], turn['turn_id'], now) db.modify_database(sql, payload) app.logger.info('New appointment') return make_response('Successfully created appointment!', 200)
def get_appointments(user_id): app.logger.info('Hit /get_appointments/%s', user_id) sql = ''' select ap.id, t.time, 'Dr. ' || d.last_name, s.name, t.id from appointments ap join turns t on t.id = ap.turn_id join doctors_specialties ds on ds.id = t.doctor_specialty_id join specialties s on s.id = ds.specialty_id join doctors d on d.id = ds.doctor_id where ap.user_id = ? and ap.deleted_at is null order by ap.created_at desc ; ''' res = db.query_database(sql, [user_id]) dic = [{ 'id': x[0], 'date': x[1].split(' ')[0], 'time': x[1].split(' ')[1], 'physician': x[2], 'specialty': x[3], 'turn_id': x[4] } for x in res] return make_response(jsonify(dic), 200)
def update_user(user_id): try: app.logger.info('Hit PUT /users/{}'.format(user_id)) data = request.get_json() user_matches = db.query_database( "select count(*) from users where id=?;", (user_id))[0][0] if (user_matches == 0): return make_response( "No user was found with id {}".format(user_id), 204) user = data['user'] sql = ''' update users set dni=?, first_name=?, last_name=? where id=?; ''' db.modify_database( sql, (user['dni'], user['first_name'], user['last_name'], user_id)) return make_response("Success!", 200) except Exception as e: return make_response("Error updating user {}! {}".format(user_id, e), 500)
def search_turns(): search = request.args.get('search') app.logger.info('Hit /search_turns/%s', search) sql = ''' select t.id, s.name, d.first_name || ' ' || d.last_name as doctor_name, t.time from turns t join doctors_specialties ds on ds.id = t.doctor_specialty_id join doctors d on d.id = ds.doctor_id join specialties s on s.id = ds.specialty_id where t.available = 'true' and (s.name like '%{key}%' or d.first_name like '%{key}%' or d.last_name like '%{key}%' or t.time like '%{key}%' ) order by t.time desc ; '''.format(key=search) res = db.query_database(sql) d = [{ 'id': x[0], 'practice': x[1], 'doctor': x[2], 'time': x[3] } for x in res] return make_response(jsonify(d), 200)
def get_specialties(): app.logger.info('Hit /get_specialties') res = db.query_database('select id, name from specialties;') dic = [{'id': x[0], 'value': x[1]} for x in res] app.logger.info(dic) return make_response(jsonify(dic), 200)
def query_bbox(tablename, bbox, type_filter): # query all points in bbox db_query = "select name,longitude,latitude,feature_code from %s where coordinates && ST_makeEnvelope(%f,%f,%f,%f,%d) and feature_class in ('%s');" % ( config.db_table, *bbox, config.srid, "','".join(type_filter)) # db_query = "select name,st_astext(coordinates),feature_code from %s where (longitude between %f and %f) and (latitude between %f and %f) and feature_class in ('%s');" % (config.db_table,*bbox,"','".join(type_filter)) result = query_database(db_query) # print(*result,sep="\n") print(len(result), "number of hits") return result
def doctors(): try: app.logger.info('Hit /doctors') doctors = db.query_database("select * from doctors;") return make_response(jsonify(doctors=doctors), 200) except Exception as e: app.logger.error("Error fetching doctors! {}".format(e)) return make_response("Error fetching doctors! {}".format(e), 500)
def users(): try: app.logger.info('Hit /users') users = db.query_database( "select id, dni, first_name, last_name from users;") return make_response(jsonify(users=users), 200) except Exception as e: return make_response("Error fetching users! {}".format(e), 500)
def unify_narrators(store_name: str) -> list: """ Determine if any of the narrators should be 'unified' (are the same person but have different names). :param store_name: The database/data store name :return: List holding strings of the new triples to add """ new_triples = list() success, results = query_database('select', query_for_unification, store_name) if success and results: for result in results['results']['bindings']: narr1 = result['narr1']['value'] narr2 = result['narr2']['value'] if 'unify1' in result.keys(): unify1 = result['unify1']['value'] else: unify1 = EMPTY_STRING if 'unify2' in result.keys(): unify2 = result['unify2']['value'] else: unify2 = EMPTY_STRING if not unify1 and not unify2: # Create new UnifyingCollection iri_collection = f'{narr1}{narr2.split("urn:ontoinsights:dna:")[-1]}' new_triples.append(f'@prefix : <urn:ontoinsights:dna:> . ' f'<{iri_collection}> a :UnifyingCollection ; :has_member <{narr1}>, <{narr2}> .') elif unify1 and unify2 and unify1 != unify2: # Move unify2 members to unify1 via SPARQL UPDATE and delete unify2 query_database('update', merge_insert.replace('unify1', unify1).replace('unify2', unify2), store_name) query_database('update', merge_delete.replace('unify2', unify2), store_name) elif unify1 and not unify2: # Add narr2 to unify1 new_triples.append(f'@prefix : <urn:ontoinsights:dna:> . <{unify1}> :has_member <{narr2}> .') elif unify2 and not unify1: # Add narr1` to unify2 new_triples.append(f'@prefix : <urn:ontoinsights:dna:> . <{unify2}> :has_member <{narr1}> .') else: logging.info('Failure querying for unification details') return new_triples
def display_similarities(store_name: str): """ Display a window to show 'similar' narratives. :param store_name: The database/data store name holding the narratives :return: TBD """ logging.info(f'Displaying similarities in {store_name}') # Setup the PySimpleGUI window sg.theme('Material2') layout = [[sg.Text("Not yet implemented.", font=('Arial', 16))], [ sg.Text("To exit, press 'End' or close the window.", font=('Arial', 16)) ]] # Create the GUI Window try: success, narrative_results = query_database('select', query_narrative_text, store_name) number_narratives = 0 if 'results' in narrative_results.keys() and \ 'bindings' in narrative_results['results'].keys(): number_narratives = len(narrative_results['results']['bindings']) if not number_narratives: sg.popup_error( f'No narrators are defined in {store_name}. ' f'Similarities graph cannot be displayed.', font=('Arial', 14), button_color='dark blue', icon=encoded_logo) except Exception as e: capture_error( f'Exception getting narratives for similarity analysis from {store_name}: {str(e)}', True) return window_similarities_list = sg.Window('Narrative Similarities', layout, icon=encoded_logo).Finalize() # Event Loop to process window "events" while True: event_similarities_list, values = window_similarities_list.read() if event_similarities_list in (sg.WIN_CLOSED, 'End'): # If user closes window or clicks 'End' break # TODO # Done window_similarities_list.close() return
def user_by_id(user_id): try: app.logger.info('Hit GET /users/{}'.format(user_id)) user_matches = db.query_database( "select id, dni, first_name, last_name from users where id=?;", (user_id)) if (len(user_matches) == 0): return make_response(jsonify(user=()), 200) return make_response(jsonify(user=user_matches[0]), 200) except Exception as e: return make_response("Error fetching user {}! {}".format(user_id, e), 500)
def display_hypotheses(store_name: str): """ Display a window to show currently defined hypotheses, and allow selection of one for display and possibly edit, or permit definition of a new hypothesis. :param store_name: The database/data store name holding the hypotheses :return: TBD """ logging.info(f'Displaying hypotheses in {store_name}') # Setup the PySimpleGUI window sg.theme('Material2') layout = [[sg.Text("Not yet implemented.", font=('Arial', 16))], [ sg.Text("To exit, press 'End' or close the window.", font=('Arial', 16)) ], [sg.Text()], [ sg.Button('End', button_color='dark blue', size=(5, 1), font=('Arial', 14)) ]] # Get the data for the window try: success, hypotheses_results = query_database('select', query_hypotheses, store_name) number_hypotheses = 0 if success and 'results' in hypotheses_results.keys() and \ 'bindings' in hypotheses_results['results'].keys(): number_hypotheses = len(hypotheses_results['results']['bindings']) except Exception as e: capture_error( f'Exception getting hypotheses details from {store_name}: {str(e)}', True) return window_hypotheses_list = sg.Window('Display Hypotheses', layout, icon=encoded_logo).Finalize() # Event Loop to process window "events" while True: event_hypotheses_list, values = window_hypotheses_list.read() if event_hypotheses_list in (sg.WIN_CLOSED, 'End'): # If user closes window or clicks 'End' break # Done window_hypotheses_list.close() return
def get_dates(doctor_specialty_id): app.logger.info('Hit /get_dates/%s', doctor_specialty_id) sql = ''' select id, time from turns where available = 'true' and doctor_specialty_id = ? ; ''' res = db.query_database(sql, (doctor_specialty_id)) dic = [{'id': x[0], 'value': x[1]} for x in res] app.logger.info(dic) return make_response(jsonify(dic), 200)
def get_physicians(specialty_id): app.logger.info('Hit /get_physicians/%s', specialty_id) sql = ''' select ds.id, 'Dr. ' || d.last_name as doctor from doctors_specialties ds join doctors d on d.id = ds.doctor_id where ds.specialty_id = ? ''' res = db.query_database(sql, (specialty_id)) dic = [{'id': x[0], 'value': x[1]} for x in res] app.logger.info(dic) return make_response(jsonify(dic), 200)
def initialise_geonames_table(tablename, filepath="geonames_DE/DE.txt"): headers = { "geonameid": "int", "name": "varchar(200)", "asciiname": "varchar(200)", "alternatenames": "varchar(10000)", "latitude": "float", "longitude": "float", "feature_class": "char(1)", "feature_code": "varchar(10)", "country_code": "varchar(2)", "cc2": "varchar(200)", "admin1_code": "varchar(20)", "admin2_code": "varchar(80)", "admin3_code": "varchar(20)", "admin4_code": "varchar(20)", "population": "bigint", "elevation": "int", "dem": "int", "timezone": "varchar(40)", "modification_date": "date" } db_query = "drop table %s if exists;" % (tablename) query_database(db_query) create_table(tablename, headers.keys(), headers.values()) create_table_from_csv(tablename, filepath) query_database("create extension postgis;") # db_query = "ALTER TABLE %s ADD COLUMN coordinates geometry;" % (tablename) db_query = "select AddGeometryColumn('%s', 'coordinates', %d, 'POINT', 2);" % ( tablename, config.srid) query_database(db_query) # db_query = "UPDATE %s this SET coordinates = ( select ST_POINT( other.longitude, other.latitude ) from %s other where this.geonameid = other.geonameid);" % (tablename,tablename) db_query = "UPDATE %s SET coordinates = ST_POINT( longitude, latitude );" % ( tablename) query_database(db_query)
def get_y_x_values(number_narrators: int, variable: str, query: str, store_name: str) -> (tuple, tuple): """ Get the count of narrators by gender, birth country and birth year. Also provide counts where this info is not known. :param number_narrators: Total number of narrators :param variable: String indicating the name of the returned SPARQL query variable - either 'gender', 'year' or 'country' :param query: String holding the query to run to get the count :param store_name: String holding the database/data store name with the narratives and narrator details :return: Two tuples of integers - for the y and x axes of a horizontal bar chart The y axis is the list of genders, birth countries and birth years, and the x axis is the number of narrators """ dictionary = dict() # Get count success, count_results = query_database('select', query, store_name) if success: if 'results' in count_results.keys( ) and 'bindings' in count_results['results'].keys(): for binding in count_results['results']['bindings']: # Manipulate the key value in case it is an IRI update_dictionary_count( dictionary, str(binding[variable]['value']).split(':')[-1]) # Make sure that all narrators are addressed total_count = 0 y_list = [] for key in dictionary.keys(): total_count += dictionary[key] y_list.append(key) if total_count != number_narrators: dictionary['Unknown'] = number_narrators - total_count y_list.append('Unknown') # Sort the results and create the returned tuples y_values = sorted(y_list) x_values = [] for y_value in y_values: x_values.append(dictionary[y_value]) return tuple(y_values), tuple(x_values)
def delete_user(user_id): try: app.logger.info('Hit DELETE /users/{}'.format(user_id)) user_matches = db.query_database( "select count(*) from users where id=?;", (user_id))[0][0] if (user_matches == 0): return make_response( "No user was found with id {}".format(user_id), 204) sql = ''' delete from users where id=?; ''' db.modify_database(sql, (user_id)) return make_response("Success!", 200) except Exception as e: app.logger.error("Error deleting user {}! {}".format(user_id, e)) return make_response("Error deleting user {}! {}".format(user_id, e), 500)
def signup(): try: app.logger.info('Hit /signup') data = request.get_json() app.logger.info(data) sql = ''' insert into users( dni, first_name, last_name, password) values(?,?,?,?); ''' user = data['user'] credentials = (user['dni'], user['password']) count_users = db.query_database( "select count(*) from users where dni=? and password=?;", credentials)[0][0] if (count_users > 0): msg = "Attempted to create user {}, but a user with these credentials already exists!".format( user) app.logger.error(msg) return make_response("User credentials invalid!", 403) db.modify_database(sql, (user['dni'], user['first_name'], user['last_name'], user['password'])) app.logger.info("Insert: created new user {}".format(user)) return make_response("Successfully created user!", 201) except Exception as e: app.logger.error("Error creating new user {} : {}".format(user, e)) return make_response("Error creating user! {}".format(e), 500)
def get_genres_movies_table(): query = "SELECT * FROM genres_movies" query_database(query)
def create_genre(genre): query = "INSERT INTO genres VALUES (?,?)" params = (genre.genre_id, genre.genre_name) query_database(query, params)
def insert_genres_movies(genre_name, movie_title): query = """INSERT INTO genres_movies (genre_id, movie_id) SELECT(SELECT genre_id FROM genres WHERE genre_name=?), (SELECT movie_id FROM movies WHERE movie_title=?)""" params = (genre_name, movie_title) query_database(query, params)
while True: event, values = window.read() if event in (sg.WIN_CLOSED, 'End'): # If user closes window or clicks 'End' break # Help for various buttons elif event in ('existing_question', 'csv_question', 'similarities_question', 'search_question', 'stats_question', 'hypothesis_question', 'test_question'): display_popup_help(event) # New windows to process narratives elif event == 'From Existing Store': store_name = select_store() if store_name: success, count_results = query_database( 'select', query_number_narratives, store_name) if success and 'results' in count_results.keys( ) and 'bindings' in count_results['results'].keys(): count = int(count_results['results']['bindings'][0]['cnt'] ['value']) window['text-selected'].\ update(f'The data store, {store_name}, holds {count} narratives.') else: capture_error('The query for narrative count failed.', True) elif event == 'New, From CSV Metadata': store_name, count = ingest_narratives() if store_name: window['text-selected'].\ update(f'{count} narratives were added to the data store, {store_name}') elif event == 'Summary Statistics':
def create_studio(studio): query = "INSERT INTO studios VALUES (?,?,?)" params = (studio.studio_id, studio.studio_name, studio.movie_title) query_database(query, params)
def create_director(director): query = "INSERT INTO directors VALUES (?,?,?)" params = (director.director_id, director.director_name, director.movie_title) query_database(query, params)
def display_narratives(store_name): """ Display a list of all narratives in the specified store and allow selection of one. :param store_name The database/data store name :return: None (Narrative timeline is displayed) """ logging.info('Narrative selection') # Create the GUI Window narrative_dict = dict() try: success, narrative_names = query_database('select', query_narratives, store_name) if success and 'results' in narrative_names.keys() and \ 'bindings' in narrative_names['results'].keys(): for binding in narrative_names['results']['bindings']: narrative_dict[binding['name']['value']] = binding['narrator'][ 'value'].split(':')[-1] else: sg.popup_error( f'No narratives are defined in {store_name}. ' f'Narrative timelines cannot be displayed.', font=('Arial', 14), button_color='dark blue', icon=encoded_logo) except Exception as e: capture_error( f'Exception getting narrative names from {store_name}: {str(e)}', True) return if not len(narrative_dict): sg.popup_ok( 'No narratives were found in the store. ' 'Please ingest one or more using the "Load Narratives" button.', font=('Arial', 14), button_color='dark blue', icon=encoded_logo) return else: narrative_list = list(narrative_dict.keys()) # Setup the PySimpleGUI window sg.theme('Material2') layout = [ [ sg.Text("Select a narrative and then press 'OK'.", font=('Arial', 16)) ], [ sg.Text( "To exit without making a selection, press 'End' or close the window.", font=('Arial', 16)) ], [ sg.Listbox(narrative_list, size=(30, 10), key='narrative_list', font=('Arial', 14), background_color='#fafafa', highlight_background_color='light grey', highlight_text_color='black', text_color='black') ], [sg.Text()], [ sg.Button('OK', button_color='dark blue', font=('Arial', 14), size=(5, 1)), sg.Button('End', button_color='dark blue', font=('Arial', 14), size=(5, 1)) ] ] # Create the GUI Window window_narrative_list = sg.Window('Select Narrative', layout, icon=encoded_logo).Finalize() # Event Loop to process window "events" while True: event_narrative_list, values = window_narrative_list.read() if event_narrative_list in (sg.WIN_CLOSED, 'End'): # If user closes window or clicks 'End' break if event_narrative_list == 'OK': if len(values['narrative_list']) != 1: sg.popup_error( 'Either no narrative was selected, or more than one was selected.', font=('Arial', 14), button_color='dark blue', icon=encoded_logo) else: narrative_name = values['narrative_list'][0] narrative_text = '' try: success1, narrative_text_results = query_database( 'select', query_narrative_text.replace('narrative_name', narrative_name), store_name) if success1 and 'results' in narrative_text_results.keys() and \ 'bindings' in narrative_text_results['results'].keys(): narrative_text = narrative_text_results['results'][ 'bindings'][0]['text']['value'] else: sg.popup_error( f'Error retrieving the text for the narrative, {narrative_name}, ' f'from {store_name}. The narrative details cannot be displayed.', font=('Arial', 14), button_color='dark blue', icon=encoded_logo) except Exception as e: capture_error( f'Exception getting narrative text for {narrative_name} ' f'from {store_name}: {str(e)}', True) return if narrative_text: _display_metadata(narrative_name, narrative_dict[narrative_name], narrative_text, store_name) _display_timeline(narrative_name, narrative_text) # Done window_narrative_list.close() return
def _display_metadata(narrative_name: str, narrator: str, narrative_text: str, store_name: str): """ """ logging.info(f'Displaying metadata and text for {narrative_name}') narrator_names = [] metadata_dict = dict() try: success1, metadata1_results = query_database( 'select', query_metadata1.replace("?narrator", f':{narrator}'), store_name) if success1 and 'results' in metadata1_results.keys() and \ 'bindings' in metadata1_results['results'].keys(): for binding in metadata1_results['results']['bindings']: narrator_names.append(binding['name']['value']) success2, metadata2_results = query_database( 'select', query_metadata2.replace("?narrator", f':{narrator}'), store_name) if success2 and 'results' in metadata2_results.keys() and \ 'bindings' in metadata1_results['results'].keys(): if len(metadata2_results['results']['bindings']): for binding in metadata2_results['results']['bindings']: # There should only be one result / one set of metadata for the narrator if 'country' in binding.keys(): metadata_dict['country'] = binding['country']['value'] else: metadata_dict['country'] = 'Unknown' if 'year' in binding.keys(): metadata_dict['year'] = binding['year']['value'] else: metadata_dict['year'] = 'Unknown' else: metadata_dict['country'] = 'Unknown' metadata_dict['year'] = 'Unknown' success3, metadata3_results = query_database( 'select', query_metadata3.replace("?narrator", f':{narrator}'), store_name) if success3 and 'results' in metadata3_results.keys() and \ 'bindings' in metadata1_results['results'].keys(): if len(metadata3_results['results']['bindings']): gender = '' for binding in metadata3_results['results']['bindings']: aspect = binding['aspect']['value'].split(':')[-1] if aspect in ('Agender', 'Bigender', 'Female', 'Male'): gender = aspect if gender: metadata_dict['gender'] = gender else: metadata_dict['gender'] = 'Unknown' else: metadata_dict['country'] = 'Unknown' metadata_dict['year'] = 'Unknown' if not (success1 or success2 or success3): sg.popup_error( f'Limited or no metadata was found for the narrator, {narrator.split(":")[-1]}. ' f'At a minimum, the narrative text will be displayed.', font=('Arial', 14), button_color='dark blue', icon=encoded_logo) except Exception as e: capture_error( f'Exception getting narrator details from {store_name}: {str(e)}', True) return # Setup the PySimpleGUI window sg.theme('Material2') layout = [[ sg.Text("Narrative Title:", font=('Arial', 16)), sg.Text(narrative_name, font=('Arial', 16)) ], [sg.Text()], [ sg.Text("Narrator Names:", font=('Arial', 16)), sg.Text(', '.join(narrator_names), font=('Arial', 16)) ], [sg.Text()], [ sg.Text("Narrator Gender:", font=('Arial', 16)), sg.Text(metadata_dict['gender'], font=('Arial', 16)) ], [ sg.Text("Narrator Birth Country:", font=('Arial', 16)), sg.Text(metadata_dict['country'], font=('Arial', 16)) ], [ sg.Text("Narrator Birth Year:", font=('Arial', 16)), sg.Text(metadata_dict['year'], font=('Arial', 16)) ], [sg.Text()], [sg.Text("Text:", font=('Arial', 16))], [ sg.Multiline(key='narr_text', font=('Arial', 14), size=(75, 30), auto_refresh=True, autoscroll=True, background_color='#fafafa', text_color='black', write_only=True) ], [sg.Text()], [sg.Text("To exit, close the window.", font=('Arial', 16))]] window_metadata_list = sg.Window(f'Metadata for {narrative_name}', layout, icon=encoded_logo).Finalize() # window_metadata_list['narr_text'].TKOut.output.config(wrap='word') window_metadata_list.FindElement('narr_text').Update(narrative_text) window_metadata_list.FindElement('narr_text').Widget.configure() window_metadata_list.read(timeout=0) return
def get_actors_movies_table(): query = "SELECT * FROM actors_movies" query_database(query)
def create_actor(Actor): query = "INSERT INTO actors VALUES (?,?)" params = (Actor.actor_id, Actor.actor_name) query_database(query, params)
def main(): rrmode = False if (len(sys.argv) > 1 and sys.argv[1] == '--rerun'): rrmode = True if rrmode: time_stamp = None else: time_stamp = get_last_timestamp() if time_stamp is None: print "No time stamp! first run (or a full re-run)." records = query_database() else: print "last backup: "+time_stamp records = query_database(time_stamp) files_total=0 files_success=0 files_failed=0 files_skipped=0 for result in records: dataset_authority = result[0] dataset_identifier = result[1] storage_identifier = result[2] checksum_type = result[3] checksum_value = result[4] file_size = result[5] create_time = result[6] is_tabular_data = result[7] if (checksum_value is None): checksum_value = "MISSING" if (storage_identifier is not None and dataset_identifier is not None and dataset_authority is not None): files_total += 1 print dataset_authority + "/" + dataset_identifier + "/" + storage_identifier + ", " + checksum_type + ": " + checksum_value file_input=None # if this is a re-run, we are only re-trying the files that have failed previously: if (rrmode and get_datafile_status(dataset_authority, dataset_identifier, storage_identifier) == 'OK'): files_skipped += 1 continue try: file_input = open_dataverse_file(dataset_authority, dataset_identifier, storage_identifier, is_tabular_data) except: print "failed to open file "+storage_identifier file_input=None if (file_input is not None): try: backup_file(file_input, dataset_authority, dataset_identifier, storage_identifier, checksum_type, checksum_value, file_size) print "backed up file "+storage_identifier record_datafile_status(dataset_authority, dataset_identifier, storage_identifier, 'OK', create_time) files_success += 1 except ValueError, ve: exception_message = str(ve) print "failed to back up file "+storage_identifier+": "+exception_message if (re.match("^remote", exception_message) is not None): record_datafile_status(dataset_authority, dataset_identifier, storage_identifier, 'FAIL_VERIFY', create_time) else: record_datafile_status(dataset_authority, dataset_identifier, storage_identifier, 'FAIL_WRITE', create_time) files_failed += 1 #TODO: add a separate failure status 'FAIL_VERIFY' - for when it looked like we were able to copy the file # onto the remote storage system, but the checksum verification failed (?) else: record_datafile_status(dataset_authority, dataset_identifier, storage_identifier, 'FAIL_READ', create_time) files_failed += 1