def main(file_paths):

    # unpack file paths:
    videos_table_file_path = file_paths['videos_table']
    manually_added_data_table_file_path = file_paths[
        'manually_added_data_table']
    videos_query = file_paths['populate_videos']
    manually_added_data_table_query = file_paths[
        'populate_manually_added_data']

    # Step 1, initialize script:
    tools.InitializeScript(os.path.basename(__file__), **tools.kwargs)

    # Step 2, read tables:
    videos_table = tools.ReadDataFromFile(videos_table_file_path)
    manually_added_data_table = tools.ReadDataFromFile(
        manually_added_data_table_file_path)

    # Step 3, prep tables for entry:
    videos_table = funcs.PrepVideosTable(videos_table)
    manually_added_data_table = funcs.PrepManuallyAddedDataTable(
        manually_added_data_table)

    # Step 4, populate postgres:
    funcs.PopulateVideosTable(videos_table, videos_query)
    funcs.PopulateManuallyAddedDataTable(manually_added_data_table,
                                         manually_added_data_table_query)
示例#2
0
def main(file_paths):

    # Unpack files:
    reports_table = file_paths['reports_table']
    descriptors_file = file_paths['descriptors']
    export_file = file_paths['reports_table_export']
    populate_query = file_paths['populate_query']

    # Step 1, initialize script:
    tools.InitializeScript(os.path.basename(__file__))

    # Step 2, read reports files:
    reports = tools.ReadDataFromFile(reports_table)

    # Step 3, parse reports:
    reports = funcs.ParseReports(reports)

    # Step 4, read descriptors:
    descriptors = funcs.ReadDescriptors(descriptors_file)

    # Step 5, get dictionary of normal descriptors:
    normal_features = funcs.GetDictionaryOfNormalDescriptors(descriptors)

    # Step 6, build reports table:
    reports_table = funcs.BuildReportTable(normal_features, reports,
                                           descriptors)

    # Step 7, write reports table to database:
    tools.ExportDataToFile(reports_table, export_file)
    funcs.ExportReportsTable(reports_table, populate_query)

    return reports, descriptors, reports_table
def main(file_paths):

    # Unpack files:
    insert_report_id_column_query = file_paths['insert_report_id_column_query']

    # Step 1, initialize script:
    tools.InitializeScript(os.path.basename(__file__))

    # Step 2, find report ids for each video:
    ids_table = funcs.FindReportIDs()

    # Step 3, add report id column to videos table:
    funcs.AddReportIDsToVidoes(ids_table, insert_report_id_column_query)
def main(file_paths):

    # unpack file paths:
    videos_table_file_path = file_paths['videos_table']

    # Step 1, initialize script:
    tools.InitializeScript(os.path.basename(__file__), **tools.kwargs)

    # Step 2, get root paths:
    root_paths = funcs.GetRootFolders()

    # Step 3, get list of folders:
    videos_table = funcs.GetFilePaths(root_paths)

    # Step 4, rename jpegs with leading zeros:
    funcs.RenameJpegs(videos_table)

    # Step 5, parse data:
    videos_table = funcs.ParseVideosTable(videos_table)

    # Step 6, export data:
    tools.ExportDataToFile(videos_table, videos_table_file_path)
示例#5
0
def main(file_paths):

    # unpack file paths:
    videos_table_file_path = file_paths['videos_table']
    manually_added_data_table_file_path = file_paths[
        'manually_added_data_table']

    # Step 1, initialize script:
    tools.InitializeScript(os.path.basename(__file__), **tools.kwargs)

    # Step 2, read vidoes table:
    videos_table = tools.ReadDataFromFile(videos_table_file_path)

    # Step 3, create manually added data table:
    manually_added_data_table = funcs.BuildManuallyAddedDataTable(videos_table)

    # Step 4, build webms:
    funcs.BuildWebmFiles(manually_added_data_table)

    # Step 5, export data:
    tools.ExportDataToFile(manually_added_data_table,
                           manually_added_data_table_file_path)
def QueryDatabase(database_dictionary, database_query, verbose=False, start=time()):
    
    ''' Accepts database dictionary, SQL query string, returns query results '''
    
    # intialize variables:
    result = None
    
    try:
        
        # establish connection:
        connection = psycopg2.connect(
            host = database_dictionary['host'],
            database = database_dictionary['database'],
            user = database_dictionary['user'],
            password = database_dictionary['password'],
        )

        # build cursor:
        cursor = connection.cursor()

        # execute query:
        cursor.execute(database_query)
    
        # retreive result (select queries only):
        if tools.QueryType(database_query) == 'SELECT':
            result = sqlio.read_sql_query(database_query, connection)

        # commit changes:
        connection.commit()
        
        # close cursor:
        cursor.close()
        
    except Exception as error:
        
        # handle exceptions:
        raise('[ERROR] in [QueryDatabase] with query [%s]: Database error: %s' %(database_query, error))

    finally:
        
        # close connection:
        if connection is not None:
            connection.close()
    
    if verbose:
        print('[@ %7.2f s] [QueryDatabase]: Queried database' %(time()-start))
        
    return result
def main():

    # Directory tree:
    file_paths = {
        'reports_table':
        '/labelling_app/patient_descriptors.csv',
        'descriptors':
        '/internal_drive/Imported Data/Descriptors.xlsx',
        'reports_table_export':
        '/sandbox/dsokol/echo_production_pipeline/Database/Tables/reports_table.pkl',
        'populate_query':
        '/sandbox/dsokol/echo_production_pipeline/Database/EchoData/Queries/DataManagementQueries/populate_reports_table.sql',
        'insert_report_id_column_query':
        '/sandbox/dsokol/echo_production_pipeline/Database/EchoData/Queries/DataManagementQueries/insert_report_id_columns.sql',
    }

    # Step 1, initialize script:
    tools.InitializeScript(os.path.basename(__file__))

    # Step 2, reports reader pipeline:
    #ReportsReaderPipeline.main(file_paths)

    # Step 3, videos table reports column pipeline:
    VideosTableReportsColumnPipeline.main(file_paths)