Beispiel #1
0
    def submit_change(self):
        sender = self.sender()
        self.covarlned['columns'] = hlp.string_to_list(self.lnedColumns.text())

        self.covarini = ini.InputHandler(name='covarinfo',
                                         tablename='covartable',
                                         lnedentry=self.covarlned)

        self.facade.input_register(self.covarini)
        self.facade.create_log_record('covartable')
        self._log = self.facade._tablelog['covartable']

        if self.covarlned['columns'][0] == '':
            print('in pass')
            pass
        else:
            try:
                self.facade._data[self.covarlned['columns']]
            except Exception as e:
                print(str(e))
                self._log.debug(str(e))
                self.error.showMessage(
                    'Column names not valid: Check spacing ' + 'and headers.')
                raise ValueError('Column names are incorrect')

        try:
            self.covardata = ddf.DictionaryDataframe(
                self.facade._data,
                self.covarlned['columns']).convert_records()
        except Exception as e:
            print(str(e))
            self._log.debug(str(e))
            self.error.showMessage('Could not concatenate columns')
            raise TypeError('Could not concatenate columns')

        self.facade.push_tables['covariates'] = self.covardata

        if sender is self.btnColumns:
            self.message.about(self, 'Status', 'Columns recorded')
        elif sender is self.btnPreview:
            self.covarmodel = views.PandasTableModelEdit(self.covardata)
            self.preview.tabviewPreview.setModel(self.covarmodel)
            self.preview.show()
        elif sender is self.btnSaveClose:
            hlp.write_column_to_log(self.covarlned, self._log, 'covartable')
            self.close()
def test_site_in_project_key_number_two(MergeToUpload, site_handle2,
                                        file_handle2, meta_handle2,
                                        project_handle2, taxa_handle2,
                                        time_handle2, count_handle2,
                                        covar_handle2):
    facade = face.Facade()

    facade.input_register(meta_handle2)
    facade.meta_verify()

    facade.input_register(file_handle2)
    facade.load_data()

    facade.input_register(site_handle2)
    sitedirector = facade.make_table('siteinfo')
    study_site_table = sitedirector._availdf

    siteid = site_handle2.lnedentry['study_site_key']
    sitelevels = facade._data[siteid].drop_duplicates().values.tolist()
    facade.register_site_levels(sitelevels)
    print('test2 sitelevels: ', sitelevels)
    facade._valueregister['siteid'] = siteid

    print('study_site_table (test): ', study_site_table)

    facade.create_log_record('study_site_table')
    lter = meta_handle2.lnedentry['lter']
    ltercol = produce_null_df(1, ['lter_table_fkey'], len(study_site_table),
                              lter)
    study_site_table = concat([study_site_table, ltercol], axis=1)
    study_site_table_og_col = study_site_table.columns.values.tolist()

    study_site_table_single = study_site_table.iloc[0, :]

    study_site_table_single_df = DataFrame([study_site_table_single])
    study_site_table_single_df.columns = study_site_table_og_col

    print('study site single: ', study_site_table_single)

    study_site_table_single_df.loc[0, 'study_site_key'] = 'NULL'

    print('study_site_table: ', study_site_table_single_df)

    facade.push_tables['study_site_table'] = study_site_table_single_df

    facade.input_register(project_handle2)
    maindirector = facade.make_table('maininfo')
    project_table = maindirector._availdf.copy().reset_index(drop=True)
    orm.convert_types(project_table, orm.project_types)

    facade.push_tables['project_table'] = project_table
    facade.create_log_record('project_table')

    facade.input_register(taxa_handle2)
    taxadirector = facade.make_table('taxainfo')

    taxa_table = taxadirector._availdf
    facade.push_tables['taxa_table'] = taxa_table
    print('taxa columns after make taxa table: ', taxa_table.columns)

    facade.create_log_record('taxa_table')

    print('taxa columns before time_table: ', taxa_table.columns)

    facade.input_register(time_handle2)
    timetable = tparse.TimeParse(facade._data,
                                 time_handle2.lnedentry).formater()
    facade.push_tables['timetable'] = timetable
    facade.create_log_record('timetable')

    print('taxa columns before count_table: ', taxa_table.columns)
    facade.input_register(count_handle2)
    rawdirector = facade.make_table('rawinfo')
    rawtable = rawdirector._availdf
    print(rawtable)
    facade.push_tables[count_handle2.tablename] = rawtable
    facade.create_log_record(count_handle2.tablename)

    print('taxa columns before covar_table: ', taxa_table.columns)
    facade.input_register(covar_handle2)
    covartable = ddf.DictionaryDataframe(
        facade._data, covar_handle2.lnedentry['columns']).convert_records()
    facade.push_tables['covariates'] = covartable
    facade.create_log_record('covartable')

    facade._valueregister['globalid'] = meta_handle2.lnedentry['globalid']
    facade._valueregister['lter'] = meta_handle2.lnedentry['lter']
    facade._valueregister['siteid'] = siteid

    timetable_og_cols = timetable.columns.values.tolist()
    timetable.columns = [x + '_derived' for x in timetable_og_cols]
    observationdf = facade._data
    observation_time_df = concat([timetable, observationdf], axis=1)

    print('merge class obs_time columns: ', observation_time_df.columns)
    print('merge class project table: ', project_table)

    study_site_table.to_sql('study_site_table',
                            orm.conn,
                            if_exists='append',
                            index=False)
    project_table['lter_project_fkey'] = facade._valueregister['lter']
    project_table.to_sql('project_table',
                         orm.conn,
                         if_exists='append',
                         index=False)

    print('taxa columns before site_in_proj method: ', taxa_table.columns)

    merge_object = MergeToUpload()
    site_in_project_key_df = merge_object.site_in_proj_key_df(
        studysitetabledf=study_site_table,
        projecttabledf=project_table,
        observationtabledf=observation_time_df,
        lterlocation=facade._valueregister['lter'],
        studysitelabel=siteid,
        studysitelevels=sitelevels)

    print('taxa columns before user taxa merge method: ', taxa_table.columns)
    merge_object.merge_for_taxa_table_upload(
        formated_taxa_table=taxa_table,
        siteinprojkeydf=site_in_project_key_df,
        sitelabel=siteid)

    taxa_column_in_data = [
        x[0] for x in list(facade._inputs['taxainfo'].lnedentry.items())
    ]

    taxa_column_in_push_table = [
        x[1] for x in list(facade._inputs['taxainfo'].lnedentry.items())
    ]

    merge_object.merge_for_datatype_table_upload(
        raw_dataframe=observation_time_df,
        formated_dataframe=rawtable,
        formated_dataframe_name='{}'.format(
            re.sub('_table', '', facade._inputs['rawinfo'].tablename)),
        covariate_dataframe=covartable,
        siteinprojkeydf=site_in_project_key_df,
        raw_data_taxa_columns=taxa_column_in_data,
        uploaded_taxa_columns=taxa_column_in_push_table)
Beispiel #3
0
def test_site_in_project_key(MergeToUpload, site_handle_corner_case,
                             file_handle_corner_case, meta_handle_corner_case,
                             project_handle_corner_case,
                             taxa_handle_corner_case, time_handle_corner_case,
                             percent_cover_handle_corner_case,
                             covar_handle_corner_case):
    facade = face.Facade()

    facade.input_register(meta_handle_corner_case)
    facade.meta_verify()

    facade.input_register(file_handle_corner_case)
    facade.load_data()

    siteid = site_handle_corner_case.lnedentry['study_site_key']

    facade._data[siteid].replace(
        {
            'C': 'site_jrn_zone_creosotebush',
            'G': 'site_jrn_zone_grassland'
        },
        inplace=True)

    facade.input_register(site_handle_corner_case)
    sitedirector = facade.make_table('siteinfo')
    study_site_table = sitedirector._availdf

    print('study_site_table (test): ', study_site_table)

    facade.create_log_record('study_site_table')
    lter = meta_handle_corner_case.lnedentry['lter']
    ltercol = produce_null_df(1, ['lter_table_fkey'], len(study_site_table),
                              lter)
    study_site_table = concat([study_site_table, ltercol], axis=1)
    print('study_site_table: ', study_site_table)
    facade.push_tables['study_site_table'] = study_site_table

    sitelevels = facade._data[siteid].drop_duplicates().values.tolist()
    facade.register_site_levels(sitelevels)
    facade._valueregister['siteid'] = siteid

    facade.input_register(project_handle_corner_case)
    maindirector = facade.make_table('maininfo')
    project_table = maindirector._availdf.copy().reset_index(drop=True)
    orm.convert_types(project_table, orm.project_types)

    facade.push_tables['project_table'] = project_table
    facade.create_log_record('project_table')

    facade.input_register(taxa_handle_corner_case)
    taxadirector = facade.make_table('taxainfo')
    taxa_table = taxadirector._availdf
    facade.push_tables['taxa_table'] = taxa_table
    facade.create_log_record('taxa_table')

    facade.input_register(time_handle_corner_case)
    timetable = tparse.TimeParse(facade._data,
                                 time_handle_corner_case.lnedentry).formater()
    facade.push_tables['timetable'] = timetable
    facade.create_log_record('timetable')

    facade.input_register(percent_cover_handle_corner_case)
    rawdirector = facade.make_table('rawinfo')
    rawtable = rawdirector._availdf
    print(rawtable)
    facade.push_tables[percent_cover_handle_corner_case.tablename] = rawtable
    facade.create_log_record(percent_cover_handle_corner_case.tablename)

    facade.input_register(covar_handle_corner_case)
    covartable = ddf.DictionaryDataframe(
        facade._data,
        covar_handle_corner_case.lnedentry['columns']).convert_records()
    facade.push_tables['covariates'] = covartable
    facade.create_log_record('covartable')

    facade._valueregister['globalid'] = meta_handle_corner_case.lnedentry[
        'globalid']
    facade._valueregister['lter'] = meta_handle_corner_case.lnedentry['lter']
    facade._valueregister['siteid'] = siteid

    timetable_og_cols = timetable.columns.values.tolist()
    timetable.columns = [x + '_derived' for x in timetable_og_cols]
    observationdf = facade._data
    observation_time_df = concat([timetable, observationdf], axis=1)

    print('merge class obs_time columns: ', observation_time_df.columns)
    print('merge class project table: ', project_table)

    #    try:
    #        study_site_table.to_sql(
    #            'study_site_table',
    #            orm.conn, if_exists='append', index=False)
    #    except Exception as e:
    #        print('Sites in db: ', str(e))
    project_table['lter_project_fkey'] = facade._valueregister['lter']
    project_table.to_sql('project_table',
                         orm.conn,
                         if_exists='append',
                         index=False)

    merge_object = MergeToUpload()
    site_in_project_key_df = merge_object.site_in_proj_key_df(
        studysitetabledf=study_site_table,
        projecttabledf=project_table,
        observationtabledf=observation_time_df,
        lterlocation=facade._valueregister['lter'],
        studysitelabel=siteid,
        studysitelevels=sitelevels)

    merge_object.merge_for_taxa_table_upload(
        formated_taxa_table=taxa_table,
        siteinprojkeydf=site_in_project_key_df,
        sitelabel=siteid)

    taxa_column_in_push_table = [
        x[0] for x in list(facade._inputs['taxainfo'].lnedentry.items())
    ]

    taxa_column_in_data = [
        x[1] for x in list(facade._inputs['taxainfo'].lnedentry.items())
    ]

    merge_object.merge_for_datatype_table_upload(
        raw_dataframe=observation_time_df,
        formated_dataframe=rawtable,
        formated_dataframe_name='{}'.format(
            re.sub('_table', '', facade._inputs['rawinfo'].tablename)),
        covariate_dataframe=covartable,
        siteinprojkeydf=site_in_project_key_df,
        raw_data_taxa_columns=taxa_column_in_data,
        uploaded_taxa_columns=taxa_column_in_push_table)

    obs_columns_in_data = [
        x[1] for x in list(facade._inputs['rawinfo'].lnedentry.items())
    ]
    obs_columns_in_push_table = [
        x[0] for x in list(facade._inputs['rawinfo'].lnedentry.items())
    ]
    merge_object.update_project_table(
        spatial_rep_columns_from_og_df=obs_columns_in_data,
        spatial_rep_columns_from_formated_df=obs_columns_in_push_table)
def test(Facade_push, site_handle_1_count, file_handle_1_count,
         meta_handle_1_count, project_handle_1_count, taxa_handle_1_count,
         time_handle_1_count, count_handle_1_count, covar_handle_1_count):
    facade = Facade_push()
    facade.input_register(meta_handle_1_count)
    facade.meta_verify()

    facade.input_register(file_handle_1_count)
    facade.load_data()

    facade.input_register(site_handle_1_count)
    sitedirector = facade.make_table('siteinfo')
    study_site_table = sitedirector._availdf

    study_site_table['lter_table_fkey'] = facade._valueregister[
        'globalid'] = meta_handle_1_count.lnedentry['lter']
    print(study_site_table)

    facade.push_tables['study_site_table'] = study_site_table

    siteid = site_handle_1_count.lnedentry['study_site_key']
    sitelevels = facade._data[siteid].drop_duplicates().values.tolist()
    facade.register_site_levels(sitelevels)

    facade.input_register(project_handle_1_count)
    maindirector = facade.make_table('maininfo')
    project_table = maindirector._availdf.copy().reset_index(drop=True)
    orm.convert_types(project_table, orm.project_types)
    facade.push_tables['project_table'] = project_table
    print('project table: ', project_table)

    facade.input_register(taxa_handle_1_count)
    taxadirector = facade.make_table('taxainfo')
    facade.push_tables['taxa_table'] = taxadirector._availdf

    facade.input_register(time_handle_1_count)
    timetable = tparse.TimeParse(facade._data,
                                 time_handle_1_count.lnedentry).formater()
    timetable_og_cols = timetable.columns.values.tolist()
    timetable.columns = [x + '_derived' for x in timetable_og_cols]
    observationdf = facade._data
    observation_time_df = concat([timetable, observationdf], axis=1)

    facade.push_tables['timetable'] = observation_time_df

    facade.input_register(count_handle_1_count)
    rawdirector = facade.make_table('rawinfo')
    facade.push_tables[
        facade._inputs['rawinfo'].tablename] = rawdirector._availdf

    facade.input_register(covar_handle_1_count)
    covartable = ddf.DictionaryDataframe(
        facade._data,
        covar_handle_1_count.lnedentry['columns']).convert_records()
    facade.push_tables['covariates'] = covartable

    facade._valueregister['globalid'] = meta_handle_1_count.lnedentry[
        'globalid']
    facade._valueregister['lter'] = meta_handle_1_count.lnedentry['lter']
    facade._valueregister['siteid'] = facade._inputs['siteinfo'].lnedentry[
        'study_site_key']

    facade.push_merged_data()
Beispiel #5
0
        # ------------------
        try:
            covarlned = {'columns': covar_dict['columns'][1][0]}
        except Exception as e:
            covarlned = {'columns': ['']}

        if covarlned['columns']:
            pass
        else:
            covarlned['columns'] = ['']

        covarinput = ini.InputHandler(name='covarinfo',
                                      tablename='covartable',
                                      lnedentry=covarlned)
        facade.input_register(covarinput)
        covartable = ddf.DictionaryDataframe(
            facade._data, covarinput.lnedentry['columns']).convert_records()
        facade.push_tables['covariates'] = covartable

        # ------------------
        # Create RAW DATA HANLDE
        # ------------------
        obslned = OrderedDict(
            (('spatial_replication_level_2',
              obs_dict['spt_rep2'][1][0].strip().strip()),
             ('spatial_replication_level_3',
              obs_dict['spt_rep3'][1][0].strip().strip()),
             ('spatial_replication_level_4',
              obs_dict['spt_rep4'][1][0].strip().strip()),
             ('spatial_replication_level_5',
              ''), ('structure_type_1', obs_dict['structure'][1][0].strip()),
             ('structure_type_2', ''), ('structure_type_3', ''),