コード例 #1
0
def publish_labbook(endpoint, variables) -> float:
    # Publish labbook mutation
    v = variables
    v.update({'setPublic': False})
    d = run_query(endpoint, 'Publish Labbook', publishLabbookQuery, v)
    job_key = d['data']['publishLabbook']['jobKey']

    waiting = True
    t0 = time.time()
    while waiting:
        d = run_query(endpoint, 'Query Publish Status', labbookQuery,
                      variables)
        bgjobs = d['data']['labbook']['backgroundJobs']
        for j in bgjobs:
            md = json.loads(j['jobMetadata'])
            if md.get('method') == 'publish_labbook':
                if j['status'] in ['failed', 'finished']:
                    tfin = time.time()
                    pub_time = tfin - t0
                    print(f'Published project {d["data"]["labbook"]["owner"]}'
                          f'/{d["data"]["labbook"]["name"]} '
                          f'(size {d["data"]["labbook"]["sizeBytes"]}b) '
                          f'in {pub_time:.2f}s')
                    waiting = False
                    return pub_time
        time.sleep(1)
コード例 #2
0
def sync_labbook(endpoint, variables):
    d = run_query(endpoint, 'Sync Labbook', syncLabbookQuery, variables)
    job_key = d['data']['syncLabbook']['jobKey']

    waiting = True
    t0 = time.time()
    while waiting:
        d = run_query(endpoint, 'Query Sync Status', labbookQuery, variables)
        bgjobs = [
            n for n in d['data']['labbook']['backgroundJobs']
            if n['jobKey'] == job_key
        ]
        for j in bgjobs:
            md = json.loads(j['jobMetadata'])
            if md.get('method') == 'sync_labbook':
                if j['status'] == 'finished':
                    tfin = time.time()
                    sync_time = tfin - t0
                    print(f'Synced project {d["data"]["labbook"]["owner"]}'
                          f'/{d["data"]["labbook"]["name"]} '
                          f'(size {d["data"]["labbook"]["sizeBytes"]}b) '
                          f'in {sync_time:.2f}s')
                    waiting = False
                    #pprint.pprint(md)
                    #pprint.pprint(j)
                    return sync_time
                elif j['status'] == 'failed':
                    print(f'FAIL Sync after {time.time()-t0:.2f}s')
                    pprint.pprint(md)
                    pprint.pprint(j)
                    waiting = False
                    break
        time.sleep(1)
コード例 #3
0
deleteRemoteLabbook = '''
    mutation DeleteRemote($owner: String!, $lbname: String!) {
        deleteRemoteLabbook(input: {
            owner: $owner,
            labbookName: $lbname,
            confirm: true
        }) {
            success
        }
    }
'''


if __name__ == '__main__':
    container_id = container_under_test()
    d = run_query(endpt_post, 'Get Local Labbooks', localLabbookQuery, {})
    local_lbs = d['data']['labbookList']['localLabbooks']['edges']
    for llb in local_lbs:
        if 'cli-' in llb['node']['name']:
            d = run_query(endpt_post, f'Delete Local {llb["node"]["owner"]}/{llb["node"]["name"]}',
                    deleteLocalLabbook,
                    {'owner': llb["node"]["owner"],
                     'lbname': llb["node"]["name"]})
    
    d = run_query(endpt_post, 'Get Remote Labbooks', remoteLabbookQuery, {})
    remote_lbs = d['data']['labbookList']['remoteLabbooks']['edges']
    for rlb in remote_lbs:
        m = re.match('^cli-[\da-f]{4}$', rlb['node']['name'])
        if m:
            print(f'Deleting remote project {m.group(0)}')
            d = run_query(endpt_post, f'Delete Remote {rlb["node"]["owner"]}/{rlb["node"]["name"]}',
コード例 #4
0
    $owner: String!,
    $labbookName: String!
) {
    labbook(owner: $owner, name: $labbookName) {
        activeBranchName
        creationDateUtc
        isRepoClean
        schemaVersion
        isDeprecated
        environment {
            imageStatus
            containerStatus
        }
    }
}
'''

if __name__ == '__main__':
    owner, lbname = sys.argv[1:3]
    endpoint = endpt_post
    print(owner, lbname)

    resp = run_query(endpoint,
                     'Basic Query Labbook',
                     migrate_query,
                     variables={
                         'owner': owner,
                         'labbookName': lbname
                     })
    pprint.pprint(resp)
コード例 #5
0
ファイル: migrate.py プロジェクト: jjwatts/gigantum-client
import sys
import pprint

from misc import (gqlquery as run_query, endpt_post)

migrate_query = '''
mutation m {{
    migrateLabbookSchema(input: {{
        owner: "{}",
        labbookName: "{}"
    }}) {{
        labbook {{
            activeBranchName
            isRepoClean
            schemaVersion
        }}
    }}
}}
'''

if __name__ == '__main__':
    owner, lbname = sys.argv[1:3]
    endpoint = endpt_post
    print(owner, lbname)
    fq = migrate_query.format(owner, lbname)
    print(fq)

    resp = run_query(endpoint, 'Migrate Labbook', fq, variables=None)
    pprint.pprint(resp)
コード例 #6
0
            f'[{failt}] {desc} (max {time_allowed:.2f}s; took {time_executed:.2f}s)'
        )
    else:
        passt = color('PASS', 'green')
        print(
            f'[{passt}] {desc} (max {time_allowed:.2f}s; took {time_executed:.2f}s)'
        )


if __name__ == '__main__':
    lbname = f'cli-{uuid.uuid4().hex[:4]}'
    print(f'Using labbook name: {lbname}')

    endpoint = endpt_post
    container_id = container_under_test()
    run_query(endpoint, 'Create Labbook', createLabbookQuery, {'name': lbname})

    print(f'## Publishing {lbname} (bare, brand-new Project)')
    t = publish_labbook(endpoint,
                        variables={
                            'name': lbname,
                            'owner': USERNAME
                        })
    check_limit("Publish bare", 5.0, t)

    print(f'## Syncing {lbname} (no upstream or local changes)')
    t = sync_labbook(endpoint, variables={'name': lbname, 'owner': USERNAME})
    check_limit("Sync bare", 5.0, t)

    print(f'## Syncing {lbname} (1 MB file in code and input)')
    drop_file(container_id, make_random_file(1000000), USERNAME, USERNAME,