示例#1
0
def list_workspaces():
    try:
        return sorted([
            {
                'accessLevel': workspace['accessLevel'],
                'public': workspace['public'],
                'namespace': workspace['workspace']['namespace'],
                'name': workspace['workspace']['name'],
                'bucket': workspace['workspace']['bucketName'],
                'id': workspace['workspace']['workspaceId']
            } for workspace in fc.list_workspaces().json()
        ], key=lambda x:(x['namespace'], x['name'])), 200
    except:
        print(traceback.format_exc())
        all_workspaces = readvar(current_app.config, 'storage', 'cache', 'all_workspaces')
        if all_workspaces is not None:
            workspace_data = []
            for ns, ws, in all_workspaces:
                workspace = get_workspace_object(ns, ws).get_workspace_metadata()
                workspace_data.append({
                    'accessLevel': workspace['accessLevel'],
                    'public': False,
                    'namespace': ns,
                    'name': ws,
                    'bucket': workspace['workspace']['bucketName'],
                    'id': workspace['workspace']['workspaceId']
                })
            return sorted(workspace_data, key=lambda x:(x['namespace'], x['name'])), 200
    return {
        'failed': True,
        'reason': "The Firecloud api is currently offline, and there are no workspaces in the cache"
    }, 500
示例#2
0
def get_projects(namespaces=None, project_pattern=None):
    """Filter terra workspaces by namespaces and project_pattern.

    Args:
        namespaces ([str]): Optional, list of workspace `namespace` to match ex: 'anvil-datastorage'.
        project_pattern (str): Optional, regexp to match workspace `name` ex: 'AnVIL_CCDG.*'.

    Returns:
        dict: keys ['accessLevel', 'public', 'workspace', 'workspaceSubmissionStats']

    """
    workspaces = FAPI.list_workspaces()
    workspaces = workspaces.json()

    if namespaces:
        workspaces = [
            AttrDict(w) for w in workspaces
            if w['workspace']['namespace'] in namespaces
        ]

    if project_pattern:
        workspaces = [
            AttrDict(w) for w in workspaces
            if re.match(project_pattern, w['workspace']['name'], re.IGNORECASE)
        ]

    # normalize fields
    for w in workspaces:
        if 'project_files' not in w.workspace:
            w.workspace.project_files = []
    return workspaces
示例#3
0
 def test_list_workspaces(self):
     """Test list_workspaces()."""
     r = fapi.list_workspaces()
     print(r.status_code, r.content)
     self.assertEqual(r.status_code, 200)
     workspace_names = [w['workspace']['name'] for w in r.json()]
     self.assertIn(self.workspace, workspace_names)
def get_workspaces(namespace):
    # This is based on the fiss list_spaces function but only workspaces are returned

    workspaces = api.list_workspaces()
    api._check_response_code(workspaces, 200)

    for space in workspaces.json():
        if namespace == space['workspace']['namespace']:
            yield space['workspace']['name']
示例#5
0
 def __init__(self):
   self.user = os.getenv('OWNER_EMAIL')
   self.terra_workspaces = fapi.list_workspaces().json()
   if self.user.endswith(self.AOU_DOMAIN):
     aou_api = os.getenv('RW_API_BASE_URL')
     if not aou_api:
       aou_api = self.AOU_PROD_API
     # Use the All of Us API to get the human-readable workspace names. For All of Us workspaces,
     # the Terra workspace metadata the workspace names are actually the AoU workspace ids.
     aou_response = get_ipython().getoutput(f'''curl -H "Content-Type: application/json" \
         -H "Authorization: Bearer $(gcloud auth print-access-token)" \
         "{aou_api}" 2>/dev/null | jq .''')
     self.aou_workspaces = json.loads(''.join(aou_response))['items']
   else:
     self.aou_workspaces = None
def export_workspaces(project, get_cost):
    # call list workspaces
    response = fapi.list_workspaces(
        fields=
        "workspace.namespace,workspace.name,workspace.createdBy,workspace.createdDate"
    )
    fapi._check_response_code(response, 200)
    all_workspaces = response.json()

    # limit the workspaces to the desired project
    workspaces = [
        ws['workspace'] for ws in all_workspaces
        if ws['workspace']['namespace'] == project
    ]

    print(f"Found {len(workspaces)} workspaces in Terra project {project}")

    if get_cost:
        print(f"Retrieving workspace bucket cost estimates")
        ws_costs = {}
        for ws in workspaces:
            ws_costs[ws['name']] = get_storage_cost_estimate(
                ws['name'], project, get_access_token())

    # write to csv
    csv_name = f"Terra_workspaces_{project}.csv"
    with open(csv_name, "w") as csvout:
        if get_cost:
            # add header with attribute values to csv
            csvout.write(
                "workspace,created by,storage cost estimate,date created,link\n"
            )
            for ws in workspaces:
                name_for_link = ws['name'].replace(" ", "%20")
                csvout.write(
                    f"{ws['name']},{ws['createdBy']},{ws_costs[ws['name']]},{ws['createdDate']},https://app.terra.bio/#workspaces/{project}/{name_for_link}\n"
                )
        else:
            # add header with attribute values to csv
            csvout.write("workspace,created by,date created,link\n")
            for ws in workspaces:
                name_for_link = ws['name'].replace(" ", "%20")
                csvout.write(
                    f"{ws['name']},{ws['createdBy']},{ws['createdDate']},https://app.terra.bio/#workspaces/{project}/{name_for_link}\n"
                )
def create_workspaces_attributes_csv(workspace_project, verbose=False):
    """Getting all the workspaces attributes."""
    # Assigning dictionary containing all workspaces
    dict_of_all_workspaces = {}

    # Assigning list that will combine keys from all attributes
    csv_columns = []

    # Getting List of workspaces json
    response = fapi.list_workspaces(
        fields="workspace.name, workspace.namespace, workspace.attributes")
    workspaces = response.json()

    # Looping through all workspaces
    for workspace_json in workspaces:
        # Getting attributes from workspaces in a certain project
        if workspace_json["workspace"]["namespace"] == workspace_project:
            workspace_name = workspace_json["workspace"]["name"]
            attributes = get_attributes(workspace_json, workspace_name)
            dict_of_all_workspaces[workspace_name] = attributes
            # Adding keys to make a master attributes list
            for key in attributes.keys():
                if key not in csv_columns:
                    csv_columns.append(key)

    # Looping through all attributes_values
    for attributes_values in dict_of_all_workspaces.values():
        attributes_list = list(attributes_values.keys())
        # Finding which keys are not in the combine/master keys list
        extra_columns = [
            i for i in csv_columns + attributes_list
            if i not in csv_columns or i not in attributes_list
        ]
        # Looping through extra Keys and adding them as None Value to the attributes List
        for extra_key in extra_columns:
            attributes_values[extra_key] = None

    # Assigning csv variable name
    csv_filename = "attributes_for_AnVIL_workspaces.csv"

    # Creating csv
    create_csv(dict_of_all_workspaces, csv_filename, csv_columns)
示例#8
0
def main(argv):
    parser = argparse.ArgumentParser(
        description=
        'Export workspace storage cost estimates associated with the user to TSV'
    )
    parser.add_argument('--output', help='Output TSV path', required=True)
    parser.add_argument('--access',
                        help='Workspace access levels',
                        choices=['owner', 'reader', 'writer'],
                        action='append')
    args = parser.parse_args(argv)

    workspaces = fapi.list_workspaces().json()

    access = args.access
    if access is None:
        access = []
    output = args.output
    access_filter = set()
    if 'reader' in access:
        access_filter.add('READER')
    if 'writer' in access:
        access_filter.add('WRITER')
    if 'owner' in access or len(access) == 0:
        access_filter.add('PROJECT_OWNER')

    with open(output, 'wt') as out:
        out.write('namespace\tname\testimate\n')
        for w in workspaces:
            if w['accessLevel'] in access_filter:
                namespace = w['workspace']['namespace']
                name = w['workspace']['name']

                headers = fapi._fiss_agent_header()
                root_url = fapi.fcconfig.root_url
                r = fapi.__SESSION.get(urljoin(
                    root_url,
                    f'workspaces/{namespace}/{name}/storageCostEstimate'),
                                       headers=headers).json()
                estimate = r['estimate']
                out.write(f'{namespace}\t{name}\t{estimate}\n')
示例#9
0
import firecloud.api as FAPI
import logging
import re
from attrdict import AttrDict

from dotenv import load_dotenv
load_dotenv()

print(FAPI.list_workspaces().json())
示例#10
0
import firecloud.api as fapi
import re
from datetime import date


def sizeof_fmt(num, suffix='B'):
    for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
        if abs(num) < 1024.0:
            return "%3.1f%s%s" % (num, unit, suffix)
        num /= 1024.0
    return "%.1f%s%s" % (num, 'Yi', suffix)


r = fapi.list_workspaces()
workspaceList = []

for workspace in r.json():
    try:
        thedate = (re.sub(
            "-", ",", workspace['workspaceSubmissionStats']
            ['lastSuccessDate']).split("T")[0]).split(",")
        if date(int(thedate[0]), int(thedate[1]), int(thedate[2])) < date(
                2018, 8, 30):
            continue
    except KeyError:
        continue
    if workspace["public"] == False and workspace["workspace"][
            "namespace"] == "broadtagteam" and (
                workspace["accessLevel"] == "OWNER"
                or workspace["accessLevel"] == "PROJECT_OWNER"):
        workspaceList.append(workspace["workspace"]["name"])