def update_validated_submissions_table(syn, project_id, valid_df):
    """
    Push the latest version of the combined validated submissions 
    table to Synapse.
    """
    try:
        print("Searching for existing 'ValidatedSubmissions' table...")
        schema_id = [t for t in syn.getChildren(project_id, includeTypes=['table'])
                     if t['name'] == 'ValidatedSubmissions'][0]['id']
        schema = syn.get(schema_id)
        validated_subs_table = syn.tableQuery('select * from {}'.format(schema_id))
        if validated_subs_table.asDataFrame().shape[0] == valid_df.shape[0]:
            print("No new valid submissions since last update.")
        validated_subs_table.schema = schema
        print("Updating 'ValidatedSubmissions' table...")
        update_table = synapseclient.Table(schema, valid_df)
        validated_subs_table = _update_syn_table(validated_subs_table, update_table, 'objectId')
    except IndexError:
        print("Creating 'ValidatedSubmissions' table...")
        project = syn.get(project_id)
        cols = synapseclient.as_table_columns(valid_df)
        schema = synapseclient.Schema(name='ValidatedSubmissions', columns=cols, parent=project)
        validated_subs_table = synapseclient.Table(schema, valid_df)
    print("Storing 'ValidatedSubmissions' table...")
    validated_subs_table = syn.store(validated_subs_table)
Exemplo n.º 2
0
def createMafDatabase(syn,
                      databaseToSynIdMappingDf,
                      testing=False,
                      staging=False):
    mafDatabaseSynId = process_functions.getDatabaseSynId(
        syn, "vcf2maf", databaseToSynIdMappingDf=databaseToSynIdMappingDf)
    mafDatabaseEnt = syn.get(mafDatabaseSynId)
    mafCols = list(syn.getTableColumns(mafDatabaseSynId))
    schema = synapseclient.Schema(
        name='Narrow MAF %s Database' % time.time(),
        columns=mafCols,
        parent=process_functions.getDatabaseSynId(
            syn, "main", databaseToSynIdMappingDf=databaseToSynIdMappingDf))
    schema.primaryKey = mafDatabaseEnt.primaryKey
    newMafDb = syn.store(schema)
    #Store in the new database synid
    databaseToSynIdMappingDf['Id'][0] = newMafDb.id
    syn.store(
        synapseclient.Table(
            process_functions.getDatabaseSynId(syn, "dbMapping", test=testing),
            databaseToSynIdMappingDf))
    if not staging and not testing:
        #Make sure to store the newly created maf db synid into the staging synapse mapping
        databaseToSynIdMapping = syn.tableQuery(
            "SELECT * FROM syn12094210 where Database = 'vcf2maf'")
        databaseToSynIdMappingDf = databaseToSynIdMapping.asDataFrame()
        databaseToSynIdMappingDf['Id'][0] = newMafDb.id
        syn.store(synapseclient.Table("syn12094210", databaseToSynIdMappingDf))
    #Move and archive old mafdatabase
    mafDatabaseEnt.parentId = "syn7208886"
    mafDatabaseEnt.name = "ARCHIVED " + mafDatabaseEnt.name
    syn.store(mafDatabaseEnt)
    mafDatabaseSynId = newMafDb.id
    #Remove can download permissions from project GENIE team
    syn.setPermissions(mafDatabaseSynId, 3326313, [])
Exemplo n.º 3
0
def main():
    credentials = get_credentials()
    syn = sc.login(email = credentials['synapseUsername'],
                   password = credentials['synapsePassword'])
    new_users = get_new_users(syn)
    if isinstance(new_users, tuple): # returned error message
        table_row = create_table_row(new_users[0], new_users[1],
                                     new_users[2], new_users[3])
        syn.store(sc.Table(OUTPUT_TABLE, [table_row]))
        return
    duplicated_numbers = new_users.phone_number.duplicated(keep = False)
    if any(duplicated_numbers):
        duplicates = new_users.loc[duplicated_numbers]
        if len(duplicates.guid) == len(duplicates.guid.unique()):
            table_row = create_table_row("Error: It looks like you accidentally "
                                         "entered an incorrect guid and tried to "
                                         "submit a corrected one immediately "
                                         "afterwards. Please delete the duplicate "
                                         "phone number from the Users table "
                                         "(syn16784393) and this table (syn16786935) "
                                         "and resubmit.", duplicates.phone_number.iloc[0],
                                         "", duplicates.visit_date.iloc[0])
        syn.store(sc.Table(OUTPUT_TABLE, [table_row]))
        return
    to_append_to_table = []
    for i, user in new_users.iterrows():
        phone_number = int(user.phone_number)
        guid = user.guid
        visit_date = int(user.visit_date)
        print("phone_number: ", phone_number)
        print("guid: ", guid)
        print("visit_date: ", visit_date)
        try:
            if not (len(str(phone_number)) == 10 and str(phone_number).isdigit()):
                table_row = create_table_row("Error: The phone number is improperly "
                                             "formatted. Please enter a valid, 10-digit "
                                             "number",
                                             phone_number, guid, visit_date)
            else:
                bridge = get_bridge_client(credentials['bridgeUsername'],
                                           credentials['bridgePassword'])
                participant_info = get_participant_info(bridge, phone_number)
                status = process_request(bridge, participant_info,
                                         phone_number, guid)
                table_row = create_table_row(status, phone_number,
                                             guid, visit_date)
        except Exception as e:
            table_row = create_table_row("Error: One of the fields is improperly "
                                         "formatted. Console output: {0}".format(e),
                                         -1, guid, visit_date)
        to_append_to_table.append(table_row)
    if len(to_append_to_table):
        syn.store(sc.Table(OUTPUT_TABLE, to_append_to_table))
def test_manually_pass_source_tables_dict(syn, tables, new_project,
                                          sample_table):
    source_table = tables["schema"][0]["id"]
    schema = sc.Schema(name=tables["schema"][0]["name"],
                       columns=tables["columns"][0],
                       parent=new_project["id"])
    incomplete_table = deepcopy(sample_table.iloc[:len(sample_table) // 2])
    rest_of_the_table = deepcopy(sample_table.iloc[len(sample_table) // 2:])
    table = syn.store(sc.Table(schema, incomplete_table))
    source_tables = {source_table: rest_of_the_table}
    exported_table = export_tables(syn,
                                   table_mapping={source_table: table.tableId},
                                   source_tables=source_tables,
                                   update=True)
    updated_table = syn.tableQuery("select * from {}".format(table.tableId))
    updated_table = updated_table.asDataFrame().reset_index(drop=True)
    updated_table_no_fh = updated_table.drop("raw_data", axis=1)
    update = exported_table[source_table][1]
    correct_table_no_fh = incomplete_table.append(update,
                                                  ignore_index=True,
                                                  sort=False)
    correct_table_no_fh = correct_table_no_fh.drop(
        "raw_data", axis=1).reset_index(drop=True)
    print("returned results \n", updated_table_no_fh)
    print("correct result \n", correct_table_no_fh)
    pd.testing.assert_frame_equal(updated_table_no_fh, correct_table_no_fh)
def test_rename_column(syn, new_tables):
    source_table = new_tables["schema"][0]
    target_table = new_tables["schema"][1]
    source_cols = new_tables["columns"][0]
    source_table_vals = syn.tableQuery("select * from {}".format(
        source_table["id"])).asDataFrame()
    renamed_col = source_cols[0]
    original_col_name = renamed_col["name"]
    new_col_name = "table_index"
    source_table.removeColumn(renamed_col)
    renamed_col["name"] = new_col_name
    renamed_col.pop("id")
    source_table.addColumn(renamed_col)
    source_table = syn.store(source_table)
    source_table_vals = source_table_vals.rename(
        {original_col_name: new_col_name}, axis=1)
    syn.store(sc.Table(source_table, source_table_vals))
    target_table = synchronize_schemas(
        syn,
        schema_comparison={"renamed": {
            original_col_name: new_col_name
        }},
        source=source_table["id"],
        target=target_table["id"])
    source_cols = [c["name"] for c in syn.getTableColumns(source_table["id"])]
    target_cols = [c["name"] for c in syn.getTableColumns(target_table["id"])]
    assert all([c in target_cols for c in source_cols])
Exemplo n.º 6
0
Arquivo: maf.py Projeto: kdaily/Genie
    def storeProcessedMaf(
            self, filePath, mafSynId, centerMafSynId, isNarrow=False):
        '''
        Stores the processed maf
        There is a isNarrow option, but note that the number of rows
        of the maf file DOES NOT change in this function

        Args:
            filePath: Path to maf file
            mafSynId: database synid
            centerMafSynid: center flat file folder synid
            isNarrow: Is the file a narrow maf. Defaul to False.
        '''
        logger.info('STORING %s' % filePath)
        database = self.syn.get(mafSynId)
        if isNarrow:
            try:
                update_table = synapseclient.Table(
                    database.id, filePath, separator="\t")
                self.syn.store(update_table)
            except SynapseTimeoutError:
                # This error occurs because of waiting for table to index.
                # Don't worry about this.
                pass
        else:
            self.syn.store(
                synapseclient.File(filePath, parentId=centerMafSynId))
        return(filePath)
Exemplo n.º 7
0
def update_data_release_file_table(syn, database_mappingdf):
    release_folder_fileview_synid = database_mappingdf['Id'][
        database_mappingdf['Database'] == 'releaseFolder'].values[0]
    release_folder = syn.tableQuery(
        "select id,name from %s" % release_folder_fileview_synid +
        " where name not like 'Release%' and name <> 'case_lists' " +
        "and name not like '0.%'")
    release_folderdf = release_folder.asDataFrame()

    data_release_table_synid = "syn16804261"
    data_release_table = syn.tableQuery("select * from %s" %
                                        data_release_table_synid)
    data_release_tabledf = data_release_table.asDataFrame()

    not_in_release_tabledf = release_folderdf[
        ~release_folderdf.name.isin(data_release_tabledf.release)]

    for synid, name in \
            zip(not_in_release_tabledf.id, not_in_release_tabledf.name):
        release_files = syn.getChildren(synid)

        append_rows = [[
            release_file['name'], release_file['id'], name,
            string_to_unix_epoch_time_milliseconds(release_file['modifiedOn']),
            synid
        ] for release_file in release_files
                       if release_file['name'] != "case_lists"]

        syn.store(synapseclient.Table(data_release_table_synid, append_rows))
def issues_to_table_handler(event, context):

    table_id = event['table_id']

    repo = event['repo']
    issues_url = 'https://api.github.com/repos/%s/issues' % repo

    syn = synapseclient.login(email=SYNAPSE_USERNAME,
                              apiKey=SYNAPSE_API_KEY,
                              silent=True)

    tmpfile = tempfile.NamedTemporaryFile(suffix=".csv")
    csvout = csv.writer(tmpfile)
    csvout.writerow(('id', 'Title', 'Labels', 'Created At', 'Updated At',
                     'URL', 'Milestone'))
    process(csvout, url=issues_url)
    tmpfile.flush()

    logger.debug("Wrote to %s" % tmpfile.name)

    try:
        a = delete_all_rows(syn, table_id)
    except synapseclient.exceptions.SynapseHTTPError:
        pass

    table = syn.store(synapseclient.Table(syn.get(table_id), tmpfile.name))

    return {'message': "Stored issues to table %s" % (table_id, )}
Exemplo n.º 9
0
    def publish(self, validate=True):
        """ Store `self.view` back to the file view it was derived
        from on Synapse.

        Parameters
        ----------
        validate : bool
            Optional. Whether to warn of possible errors in `self.view`.
            Defaults to True.
        """
        if validate:
            warnings = self._validate()
            if len(warnings):
                for w in warnings:
                    print(w)
                print()
                continueAnyways = self._getUserConfirmation()
                if not continueAnyways:
                    print("Publish canceled.")
                    return
        t = sc.Table(self._entityViewSchema.id, self.view)
        print("Storing to Synapse...")
        t_online = self.syn.store(t)
        print("Fetching new table index...")
        self.view = utils.synread(self.syn, self._entityViewSchema.id)
        self._index = self.view.index
        print("You're good to go :~)")
        return self._entityViewSchema.id
def test_schema_change(syn, tables, new_project, sample_table):
    source_table = tables["schema"][0]["id"]
    target_table_cols = deepcopy(tables["columns"][0])
    added_col = target_table_cols.pop(2)
    renamed_original_name = target_table_cols[2]["name"]
    target_table_cols[2]["name"] = "renamed_col"
    target_table_cols[3]["maximumSize"] = 100
    schema = sc.Schema(name=tables["schema"][0]["name"],
                       columns=target_table_cols,
                       parent=new_project["id"])
    incomplete_table = deepcopy(sample_table.iloc[:len(sample_table) // 2])
    incomplete_table = incomplete_table.drop(added_col["name"], axis=1)
    incomplete_table = incomplete_table.rename(
        {renamed_original_name: "renamed_col"}, axis=1)
    table = syn.store(sc.Table(schema, incomplete_table))
    exported_table = export_tables(syn,
                                   table_mapping={source_table: table.tableId},
                                   update=False)
    updated_table = syn.tableQuery("select * from {}".format(table.tableId))
    updated_table = updated_table.asDataFrame().reset_index(drop=True)
    updated_table_no_fh = updated_table.drop("raw_data", axis=1)
    comparison_table = sample_table.drop("raw_data",
                                         axis=1).reset_index(drop=True)
    updated_table_no_fh = updated_table_no_fh[comparison_table.columns]
    print(updated_table_no_fh)
    print(comparison_table)
    pd.testing.assert_frame_equal(updated_table_no_fh, comparison_table)
Exemplo n.º 11
0
def updateTable(syn, tableSynId, newTable, releaseVersion):
    """
    Gets the current annotation table, deletes all it's rows, then updates the table with new content generated
    from all the json files on synapseAnnotations. In the process also updates the table annotation to the latest
    releaseversion.

    :param syn:
    :param tableSynId:
    :param newTable:
    :param releaseVersion:
    :return:
    """
    currentTable = syn.tableQuery("SELECT * FROM %s" % tableSynId)

    # If current table has rows, delete all the rows
    if currentTable.asRowSet().rows:
        deletedRows = syn.delete(currentTable.asRowSet())

    # get table schema and set it's release version annotation
    tableSchema = syn.get(tableSynId)
    tableSchema.annotations = {"annotationReleaseVersion": str(releaseVersion)}
    updated_schema_release = syn.store(tableSchema)

    # store the new table on synapse
    table = syn.store(synapseclient.Table(tableSchema, newTable))
Exemplo n.º 12
0
def store_table_and_reindex(syn, tbl, res):
    """Store a Synapse table and then force it to be indexed.

    """
    new_tbl = syn.store(synapseclient.Table(tbl, res))
    logger.debug("Stored table, forcing reindex.")
    _ = syn.tableQuery(f'SELECT id FROM {tbl} LIMIT 1')
    return(new_tbl)
Exemplo n.º 13
0
def registerAssignments(syn, count, table_id, primary_col, assignee_col, state_col, force=None, debug=False, display=False):
    table = syn.get(table_id)
    if table.entityType != "org.sagebionetworks.repo.model.table.TableEntity":
        return
    username = syn.getUserProfile()['userName']

    if force is None:
        results = syn.tableQuery('select * from %s where "%s" is NULL limit %s' % (table.id, assignee_col, count))
        df = results.asDataFrame()
        for row in df.index:
            df.loc[row,assignee_col] = username
        syn.store(synapseclient.Table(table, df, etag=results.etag))
    else:
        results = syn.tableQuery('select * from %s where "%s"=\'%s\'' % (table.id, primary_col, force))
        df = results.asDataFrame()
        for row in df.index:
            df.loc[row,assignee_col] = username
        syn.store(synapseclient.Table(table, df, etag=results.etag))
Exemplo n.º 14
0
def table(syn, parent, obj):
    df = read(obj)
    cols = synapseclient.as_table_columns(df)
    schema = synapseclient.Schema(name=str(uuid.uuid4()),
                                  columns=cols,
                                  parent=parent)
    schema = syn.store(schema)
    table = syn.store(synapseclient.Table(schema, df))
    return schema
def _store_dataframe_to_table(syn,
                              df,
                              df_cols,
                              table_id=None,
                              parent_id=None,
                              table_name=None,
                              **kwargs):
    """Store a pandas DataFrame to Synapse in a safe way by formatting the
    the values so that the store operation is not rejected by Synapse.

    Parameters
    ----------
    syn : synapseclient.Synapse
    df : pandas.DataFrame
    df_cols : list of synapseclient.Column objects
    table_id : str, default None
        Synapse ID of a preexisting Synapse Table to store `df` to.
        Either `table_id` or both `parent_id` and `table_name` must
        be supplied as arguments.
    parent_id : str, default None
        Synapse ID of the project to store `df` to as a table.
        Either `table_id` or both `parent_id` and `table_name` must
        be supplied as arguments.
    table_name : str, default None
        Either `table_id` or both `parent_id` and `table_name` must
        be supplied as arguments.
    **kwargs :
        Keyword arguments to provide to syn.store (useful for provenance)
    """
    if table_id is None and parent_id is None and table_name is None:
        raise TypeError("Either the table Synapse ID must be set or "
                        "the parent ID and table name must be set.")
    sanitized_dataframe = _sanitize_dataframe(syn, records=df, cols=df_cols)
    if table_id is None:
        target_table_schema = sc.Schema(name=table_name,
                                        parent=parent_id,
                                        columns=df_cols)
        target_table = sc.Table(schema=target_table_schema,
                                values=sanitized_dataframe,
                                headers=df_cols)
    else:
        target_table = sc.Table(table_id, sanitized_dataframe, headers=df_cols)
    target_table = syn.store(target_table, **kwargs)
    return target_table
def test_export_multiple_tables_to_preexisting_update(syn, new_project, tables,
                                                      sample_table):
    source_table = tables["schema"][0]["id"]
    source_table_2 = tables["schema"][1]["id"]
    schema = sc.Schema(name=tables["schema"][0]["name"],
                       columns=tables["columns"][0],
                       parent=new_project["id"])
    incomplete_table = deepcopy(sample_table.iloc[:len(sample_table) // 2])
    table = syn.store(sc.Table(schema, incomplete_table))
    schema_2 = sc.Schema(name=tables["schema"][1]["name"],
                         columns=tables["columns"][1],
                         parent=new_project["id"])
    incomplete_table_2 = deepcopy(sample_table.iloc[:len(sample_table) // 3])
    table_2 = syn.store(sc.Table(schema_2, incomplete_table_2))
    exported_table = export_tables(syn,
                                   table_mapping={
                                       source_table: table.tableId,
                                       source_table_2: table_2.tableId
                                   },
                                   update=True)
    updated_table = syn.tableQuery("select * from {}".format(table.tableId))
    updated_table = updated_table.asDataFrame().reset_index(drop=True)
    updated_table_no_fh = updated_table.drop("raw_data", axis=1)
    update = exported_table[source_table][1]
    correct_table_no_fh = incomplete_table.append(update,
                                                  ignore_index=True,
                                                  sort=False)
    correct_table_no_fh = correct_table_no_fh.drop(
        "raw_data", axis=1).reset_index(drop=True)
    updated_table_2 = syn.tableQuery("select * from {}".format(
        table_2.tableId))
    updated_table_2 = updated_table_2.asDataFrame().reset_index(drop=True)
    updated_table_2_no_fh = updated_table_2.drop("raw_data", axis=1)
    update_2 = exported_table[source_table_2][1]
    correct_table_no_fh_2 = incomplete_table_2.append(update_2,
                                                      ignore_index=True,
                                                      sort=False)
    correct_table_no_fh_2 = correct_table_no_fh_2.drop(
        "raw_data", axis=1).reset_index(drop=True)
    print("returned results \n", updated_table_no_fh)
    print("correct result \n", correct_table_no_fh)
    assert (updated_table_no_fh.equals(correct_table_no_fh)
            and updated_table_2_no_fh.equals(correct_table_no_fh_2))
def table(syn, parent, sample_table, schema=None):
    if schema is None:
        t = synapseclient.table.build_table(
                name=str(uuid.uuid4()),
                parent=parent,
                values=sample_table)
    else:
        t = synapseclient.Table(schema=schema, values=sample_table)
    table = syn.store(t)
    return table.schema
Exemplo n.º 18
0
def append_queue_mapping(syn, main_queueid, internal_queueid):
    """Append to queue mapping if mapping doesn't exist"""
    queue_mapping_table = syn.tableQuery(
        f"select * from syn22077175 where main = '{main_queueid}'")
    queue_mappingdf = queue_mapping_table.asDataFrame()
    if queue_mappingdf.empty:
        table = synapseclient.Table(
            "syn22077175",
            [[str(main_queueid), str(internal_queueid)]])
        syn.store(table)
Exemplo n.º 19
0
 def createTemporaryGenieId(self, x, tempIdMapping, patientIdCol):
     """
     Create temporary genie id for those that don't have 
     """
     uniqId = x['record_id_patient_id'] + x['redcap_data_access_group']
     if sum(tempIdMapping['uniqueId'] == uniqId) == 0:
         tempId = 'GENIE-%s-%s' % (x['redcap_data_access_group'],''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10)))
         self.syn.store(synapseclient.Table(self.syn.get("syn10164044"),[[uniqId, tempId]]))
         return(tempId)
     else:
         return(tempIdMapping[tempIdMapping['uniqueId'] == uniqId]['temporaryId'].values[0])
Exemplo n.º 20
0
def updateEnrollmentTable(user):
    searchFile = user.searchQueries_file_synid if user.searchQueries_file_synid is not None else 'NA'
    locationFile = user.location_file_synid if user.location_file_synid is not None else 'NA'
    new_enrollment_row = [
        user.afsID, user.extID,
        str(user.consentTime.isoformat()), searchFile, locationFile,
        user.status
    ]

    syn.store(
        synapseclient.Table(ENROLLMENT_SYNTABLE_SCHEMA, [new_enrollment_row]))
Exemplo n.º 21
0
def setStates(syn, state, ids, table_id, primary_col, assignee_col, state_col, debug=False, display=False):
    table = syn.get(table_id)
    if table.entityType != "org.sagebionetworks.repo.model.table.TableEntity":
        return
    username = syn.getUserProfile()['userName']
    query = 'select * from %s where "%s" = \'%s\'' % (table.id, assignee_col, username)
    results = syn.tableQuery(query)
    df = results.asDataFrame()
    for row in df.index:
        if df.loc[row,primary_col] in ids:
            df.loc[row,state_col] = state
    syn.store(synapseclient.Table(table, df, etag=results.etag))
def _update_syn_table(syn_table, update_table, update_key):
    """
    Update a Synapse table object based on a local dataframe, using
    a specified key to match rows.
    """
    syn_df = syn_table.asDataFrame()
    update_df = update_table.asDataFrame().set_index(update_key, drop=False)
    for idx, row in syn_df.iterrows():
        update_idx = syn_df.loc[idx, update_key]
        if not syn_df.loc[idx].equals(update_df.loc[update_idx]):
            syn_df.loc[idx] = update_df.loc[update_idx]
    syn_df = syn_df.append(update_df[~update_df[update_key].isin(syn_df[update_key])])
    return synapseclient.Table(syn_table.schema, syn_df)
def update_team_stats_table(syn, project_id, team_stats_df):
    """
    Push the latest version of the team stats table to Synapse.
    """
    try:
        print("Searching for existing 'TeamStats' table...")
        schema_id = [t for t in syn.getChildren(project_id, includeTypes=['table'])
                     if t['name'] == 'TeamStats'][0]['id']
        schema = syn.get(schema_id)
        team_stats_table = syn.tableQuery('select * from {}'.format(schema_id))
        team_stats_table.schema = schema
        print("Updating 'TeamStats' table...")
        update_table = synapseclient.Table(schema, team_stats_df)
        team_stats_table = _update_syn_table(team_stats_table, update_table, 'team')
    except IndexError:
        print("Creating 'TeamStats' table...")
        project = syn.get(project_id)
        cols = synapseclient.as_table_columns(team_stats_df)
        schema = synapseclient.Schema(name='TeamStats', columns=cols, parent=project)
        team_stats_table = synapseclient.Table(schema, team_stats_df)
    print("Storing 'TeamStats' table...")
    team_stats_table = syn.store(team_stats_table)
Exemplo n.º 24
0
 def storeProcessedMaf(self,
                       filePath,
                       mafSynId,
                       centerMafSynId,
                       isNarrow=False):
     logger.info('STORING %s' % filePath)
     database = self.syn.get(mafSynId)
     if isNarrow:
         self.syn.store(
             synapseclient.Table(database.id, filePath, separator="\t"))
     else:
         self.syn.store(
             synapseclient.File(filePath, parentId=centerMafSynId))
     return (filePath)
Exemplo n.º 25
0
 def storeProcessedMaf(self, filePath, mafSynId, centerMafSynId, isNarrow=False):
     logger.info('STORING %s' % filePath)
     database = self.syn.get(mafSynId)
     if isNarrow:
         try:
             update_table = synapseclient.Table(
                 database.id, filePath, separator="\t")
             self.syn.store(update_table)
         except SynapseTimeoutError:
             # This error occurs because of waiting for table to index.
             # Don't worry about this.
             pass
     else:
         self.syn.store(synapseclient.File(filePath, parentId=centerMafSynId))
     return(filePath)
def test_export_one_table_to_preexisting_no_update(syn, new_project, tables,
                                                   sample_table):
    source_table = tables["schema"][0]["id"]
    schema = sc.Schema(name=tables["schema"][0]["name"],
                       columns=tables["columns"][0],
                       parent=new_project["id"])
    incomplete_table = deepcopy(sample_table.iloc[:len(sample_table) // 2])
    table = syn.store(sc.Table(schema, incomplete_table))
    exported_table = export_tables(syn,
                                   table_mapping={source_table: table.tableId},
                                   update=False)
    updated_table = syn.tableQuery("select * from {}".format(table.tableId))
    updated_table = updated_table.asDataFrame().reset_index(drop=True)
    updated_table_no_fh = updated_table.drop("raw_data", axis=1)
    comparison_table = sample_table.drop("raw_data",
                                         axis=1).reset_index(drop=True)
    print(updated_table_no_fh)
    print(comparison_table)
    pd.testing.assert_frame_equal(updated_table_no_fh, comparison_table)
Exemplo n.º 27
0
def create_and_archive_maf_database(syn, database_synid_mappingdf):
    '''
    Creates new MAF database and archives the old database in the staging site

    Args:
        syn: Synapse object
        databaseToSynIdMappingDf: Database to synapse id mapping dataframe

    Return:
        Editted database to synapse id mapping dataframe
    '''
    maf_database_synid = process_functions.getDatabaseSynId(
        syn,
        "vcf2maf",
        project_id=None,
        databaseToSynIdMappingDf=database_synid_mappingdf)
    maf_database_ent = syn.get(maf_database_synid)
    maf_columns = list(syn.getTableColumns(maf_database_synid))
    schema = synapseclient.Schema(
        name='Narrow MAF {current_time} Database'.format(
            current_time=time.time()),
        columns=maf_columns,
        parent=process_functions.getDatabaseSynId(
            syn, "main", databaseToSynIdMappingDf=database_synid_mappingdf))
    schema.primaryKey = maf_database_ent.primaryKey
    new_maf_database = syn.store(schema)
    # Store in the new database synid
    database_synid_mappingdf['Id'][database_synid_mappingdf['Database'] ==
                                   'vcf2maf'] = new_maf_database.id

    vcf2maf_mappingdf = database_synid_mappingdf[
        database_synid_mappingdf['Database'] == 'vcf2maf']
    # vcf2maf_mappingdf['Id'][0] = newMafDb.id
    syn.store(synapseclient.Table("syn10967259", vcf2maf_mappingdf))
    # Move and archive old mafdatabase (This is the staging synid)
    maf_database_ent.parentId = "syn7208886"
    maf_database_ent.name = "ARCHIVED " + maf_database_ent.name
    syn.store(maf_database_ent)
    # maf_database_synid = new_maf_database.id
    # Remove can download permissions from project GENIE team
    syn.setPermissions(new_maf_database.id, 3326313, [])
    return (database_synid_mappingdf)
def _update_database_mapping(syn, database_synid_mappingdf,
                             database_mapping_synid, fileformat, new_tableid):
    """Updates database to synapse id mapping table
    Args:
        syn: Synapse object
        database_synid_mappingdf: Database to synapse id mapping dataframe
        database_mapping_synid: Database to synapse id table id
        fileformat: File format updated
        new_tableid: New file format table id
    Returns:
        Updated Table object
    """
    fileformat_ind = database_synid_mappingdf['Database'] == fileformat
    # Store in the new database synid
    database_synid_mappingdf['Id'][fileformat_ind] = new_tableid
    # Only update the one row
    to_update_row = database_synid_mappingdf[fileformat_ind]

    syn.store(synapseclient.Table(database_mapping_synid, to_update_row))
    return database_synid_mappingdf
Exemplo n.º 29
0
def create_team_wikis(syn, synid, templateid, tracker_table_synid):
    """
    Function that creates wiki pages from a template by looking at teams that
    are registered for a challenge.  The teams that have a wiki made for them
    Are stored into a trackerTable that has columns wikiSynId, and teamId

    Args:
        synId: Synapse id of challenge project
        templateId:  Synapse id of the template
        trackerTableSynId: Synapse id of Table that tracks if wiki pages
                           have been made per team
    """

    challenge_ent = syn.get(synid)
    challenge_obj = utils.get_challenge(challenge_ent)
    registered_teams = syn._GET_paginated("/challenge/{}/challengeTeam".format(
        challenge_obj['id']))
    for i in registered_teams:
        submitted_teams = syn.tableQuery(
            "SELECT * FROM {} where teamId = '{}'".format(
                tracker_table_synid, i['teamId']))
        if len(submitted_teams.asDataFrame()) == 0:
            team = syn.getTeam(i['teamId'])
            # The project name is the challenge project name and team name
            project = syn.store(
                synapseclient.Project("{} {}".format(challenge_ent.name,
                                                     team.name)))
            # Give admin access to the team
            syn.setPermissions(project,
                               i['teamId'],
                               accessType=[
                                   'DELETE', 'CHANGE_SETTINGS', 'MODERATE',
                                   'CREATE', 'READ', 'DOWNLOAD', 'UPDATE',
                                   'CHANGE_PERMISSIONS'
                               ])
            wiki_copy = synapseutils.copy(syn, templateid, project.id)
            # syn.sendMessage(i[])
            # Store copied synId to tracking table
            tracking_table = synapseclient.Table(
                tracker_table_synid, [[wiki_copy[templateid], i['teamId']]])
            syn.store(tracking_table)
Exemplo n.º 30
0
def getValues(syn, value_col, table_id, primary_col, orSet=None, **kwds):
    table = syn.get(table_id)
    if table.entityType != "org.sagebionetworks.repo.model.table.TableEntity":
        return
    results = syn.tableQuery('select * from %s' % (table.id))
    df = results.asDataFrame()
    changed = False
    out = {}
    for row_name in df.index:
        row = df.loc[row_name]
        key = row[primary_col]
        value = row[value_col]
        if orSet is not None:
            if isinstance(value, float) and isnan(value):
                value = orSet(key)
                df.loc[row_name,value_col] = value
                changed = True
        out[key] = value
    if changed:
        syn.store(synapseclient.Table(table, df, etag=results.etag))
    return out