class NewtIntegrationTest(BaseIntegrationTest):
    def __init__(self, name, girder_url, girder_user, girder_password, machine, job_timeout=60 * 5):
        super(NewtIntegrationTest, self).__init__(name, girder_url, girder_user, girder_password, job_timeout)
        self._cluster_id = None
        self._machine = machine

    def setUp(self):

        # First authenticate with NEWT
        self._session = Session()
        r = self._session.post(
            "https://newt.nersc.gov/newt/auth", {"username": self._girder_user, "password": self._girder_password}
        )

        self.assertEqual(r.status_code, 200)
        print r.json()
        self._newt_session_id = r.json()["newt_sessionid"]

        # Now authenticate with Girder using the session id
        url = "%s/api/v1/newt/authenticate/%s" % (self._girder_url, self._newt_session_id)
        r = self._session.put(url)
        self.assertEqual(r.status_code, 200)

        url = "%s/api/v1/newt/authenticate/%s" % (self._girder_url, self._newt_session_id)
        r = self._session.put(url)
        self.assertEqual(r.status_code, 200)

        url = "%s/api/v1" % self._girder_url
        self._client = GirderClient(apiUrl=url)
        self._client.token = self._session.cookies["girderToken"]

        user = self._client.get("user/me")
        self._user_id = user["_id"]
        r = self._client.listFolder(self._user_id, "user", name="Private")
        r = list(r)
        self.assertEqual(len(r), 1)
        self._private_folder_id = r[0]["_id"]

    def tearDown(self):
        super(NewtIntegrationTest, self).tearDown()
        if self._cluster_id:
            try:
                url = "clusters/%s" % self._cluster_id
                self._client.delete(url)
            except Exception:
                traceback.print_exc()

    def create_cluster(self):
        body = {"config": {"host": self._machine}, "name": "NewtIntegrationTest", "type": "newt"}

        r = self._client.post("clusters", data=json.dumps(body))
        self._cluster_id = r["_id"]

        # Now test the connection
        r = self._client.put("clusters/%s/start" % self._cluster_id)
        sleeps = 0
        while True:
            time.sleep(1)
            r = self._client.get("clusters/%s/status" % self._cluster_id)

            if r["status"] == "running":
                break
            elif r["status"] == "error":
                r = self._client.get("clusters/%s/log" % self._cluster_id)
                self.fail(str(r))

            if sleeps > 9:
                self.fail("Cluster never moved into running state")
            sleeps += 1

    def assert_output(self):
        r = self._client.listItem(self._output_folder_id)
        self.assertEqual(len(r), 4)

        stdout_item = None
        for i in r:
            if i["name"].startswith("CumulusIntegrationTestJob-%s.o" % self._job_id):
                stdout_item = i
                break

        self.assertIsNotNone(stdout_item)
        r = self._client.get("item/%s/files" % i["_id"])
        self.assertEqual(len(r), 1)

        url = "%s/api/v1/file/%s/download" % (self._girder_url, r[0]["_id"])
        r = self._session.get(url)
        self.assertEqual(r.content, self._data)

    def test(self):
        try:
            self.create_cluster()
            self.create_script()
            self.create_input()
            self.create_output_folder()
            self.create_job()
            self.submit_job(timeout=self._job_timeout)
            self.assert_output()
        except HttpError as error:
            self.fail(error.responseText)
Beispiel #2
0
def test_sanity_checks():
    gc = GirderClient(apiUrl=source_api_root)
    resp = gc.get('/', jsonResp=False)
    assert resp.status_code == 200
    assert resp.headers['Content-Type'] == 'text/html;charset=utf-8'
    resp = gc.get('system/check', jsonResp=False)
    assert resp.status_code == 200
Beispiel #3
0
def wait_for_jobs(client: GirderClient, max_wait_timeout=30, expected_status=JobStatus.SUCCESS):
    """Wait for all worker jobs to complete"""
    start_time = time.time()
    incompleteJobs = []
    while True and (time.time() - start_time < max_wait_timeout):
        incompleteJobs = client.get(
            'job',
            parameters={
                # https://github.com/girder/girder/blob/master/plugins/jobs/girder_jobs/constants.py
                # https://github.com/girder/girder_worker/blob/master/girder_worker/girder_plugin/status.py
                'statuses': json.dumps([0, 1, 2, 820, 821, 822, 823, 824]),
            },
        )
        if len(incompleteJobs) == 0:
            break
        time.sleep(1)
    if len(incompleteJobs) > 0:
        raise Exception("Jobs were still running after timeout")
    # Verify that all jobs succeeded
    time.sleep(1)
    lastJob = client.get(
        'job',
        parameters={
            'limit': 1,
        },
    )
    if len(lastJob) > 0 and lastJob[0]['status'] != expected_status:
        raise Exception(f"Some jobs did not meet their expected status: {expected_status}")
Beispiel #4
0
def test_upgrade_pipelines(admin_client: GirderClient):
    cnf = admin_client.get('dive_configuration/pipelines')
    if 'detector' not in cnf:
        admin_client.post(
            'dive_configuration/upgrade_pipelines',
            data=json.dumps(tasks.UPGRADE_JOB_DEFAULT_URLS),
        )
    wait_for_jobs(admin_client, 1000)
Beispiel #5
0
def import_calc(config):
    try:
        target_port = None
        if config.port:
            target_port = config.port
        target_scheme = None
        if config.scheme:
            target_scheme = config.scheme
        target_apiroot = None
        if config.apiroot:
            target_apiroot = config.apiroot

        client = GirderClient(host=config.host, port=target_port,
                              scheme=target_scheme, apiRoot=target_apiroot)
        client.authenticate(apiKey=config.apiKey)

        me = client.get('/user/me')
        if not me:
            print('Error: Girder token invalid, please verify')
            return

        folderParams = {
            'parentId': me['_id'],
            'parentType': 'user',
            'name': 'Private'
        }

        # Get the private folder id first
        folder = next(client.listResource('folder', folderParams))
        folder = next(client.listFolder(me['_id'], 'user', 'Private'))

        for file_name in config.datafile:
            print ('\nUploading ' + file_name)
            file_id = {}
            with open(file_name, 'r') as fp:
                fileNameBase = os.path.basename(file_name)
                size = os.path.getsize(file_name)
                file_id = client.uploadFile(folder['_id'], fp, fileNameBase,
                                            size, 'folder')

            body = {
                'fileId': file_id['_id']
            }

            if config.public:
                body['public'] = True


            mol = client.sendRestRequest('POST', 'molecules', data=json.dumps(body))

            if mol and '_id' in mol:
                config.moleculeId = mol['_id']
                print('Molecule ID: ' + mol['_id'])
            else:
                print(mol)

    except HttpError as error:
        print(error.responseText, file=sys.stderr)
Beispiel #6
0
def download_source_media(girder_client: GirderClient, datasetId: str,
                          dest: Path) -> Tuple[List[str], str]:
    """Download media for dataset to dest path"""
    media = models.DatasetSourceMedia(
        **girder_client.get(f'dive_dataset/{datasetId}/media'))
    dataset = models.GirderMetadataStatic(
        **girder_client.get(f'dive_dataset/{datasetId}'))
    if dataset.type == constants.ImageSequenceType:
        for frameImage in media.imageData:
            girder_client.downloadItem(frameImage.id, str(dest))
        return [str(dest / image.filename)
                for image in media.imageData], dataset.type
    elif dataset.type == constants.VideoType and media.video is not None:
        destination_path = str(dest / media.video.filename)
        girder_client.downloadFile(media.video.id, destination_path)
        return [destination_path], dataset.type
    else:
        raise Exception(f"unexpected metadata {str(dataset.dict())}")
Beispiel #7
0
def test_reset_job_logs(admin_client: GirderClient):
    # remove any failed jobs.
    for job in admin_client.get(
            'job/all',
            parameters={"statuses": json.dumps([0, 1, 2, 4, 5, 824])}):
        admin_client.delete(f'job/{job["_id"]}')
Beispiel #8
0
def getTestFolder(client: GirderClient):
    me = client.get('user/me')
    privateFolder = client.loadOrCreateFolder("Integration", me['_id'], 'user')
    return privateFolder
Beispiel #9
0
class CumulusClient():
    '''Application interface to cumulus-based client for HPC systems
  supporting NEWT API.

  Note: the methods must be called in a specific order!
    create_cluster()
    create_omega3p_script()
    create_job()
    upload_inputs()
    submit_job()

  Then optionally:
    monitor_job()
    download_results()
    release_resources()
  '''

    # ---------------------------------------------------------------------
    def __init__(self, girder_url, newt_sessionid):
        '''
    '''
        self._client = None
        self._cluster_id = None
        self._girder_url = girder_url
        self._input_folder_id = None
        self._job_folder_id = None
        self._job_id = None
        self._output_folder_id = None
        self._private_folder_id = None
        self._script_id = None
        self._session = requests.Session()

        # Authenticate with Girder using the newt session id
        url = '%s/api/v1/newt/authenticate/%s' % \
          (self._girder_url, newt_sessionid)
        r = self._session.put(url)
        if r.status_code != 200:
            raise HttpError(r.status_code, r.text, r.url, r.request.method)

        # Instantiate Girder client
        url = '%s/api/v1' % self._girder_url
        self._client = GirderClient(apiUrl=url)
        self._client.token = self._session.cookies['girderToken']

        user = self._client.get('user/me')
        #print 'user', user
        user_id = user['_id']
        r = self._client.listFolder(user_id, 'user', name='Private')
        if len(r) != 1:
            raise Exception('Wrong number of users; should be 1 got %s' %
                            len(r))
        self._private_folder_id = r[0]['_id']
        print 'private_folder_id', self._private_folder_id

    # ---------------------------------------------------------------------
    def job_id(self):
        '''Returns current job id (which may be None)
    '''
        return self._job_id

    # ---------------------------------------------------------------------
    def create_cluster(self, machine_name, cluster_name=None):
        '''
    '''
        if cluster_name is None:
            user = self._client.get('user/me')
            user_name = user.get('firstName', 'user')
            cluster_name = '%s.%s' % (machine_name, user_name)

        cluster = None
        cluster_list = self._client.get('clusters')
        for extant_cluster in cluster_list:
            if extant_cluster['name'] == cluster_name:
                cluster = extant_cluster
                self._cluster_id = extant_cluster['_id']
                break

        if not cluster:
            body = {
                'config': {
                    'host': machine_name
                },
                'name': cluster_name,
                'type': 'newt'
            }

            r = self._client.post('clusters', data=json.dumps(body))
            self._cluster_id = r['_id']
            print 'cluster_id', self._cluster_id

        # Reset the state of the cluster
        body = {'status': 'created'}
        r = self._client.patch('clusters/%s' % self._cluster_id,
                               data=json.dumps(body))

        # Now test the connection
        r = self._client.put('clusters/%s/start' % self._cluster_id)
        sleeps = 0
        while True:
            time.sleep(1)
            r = self._client.get('clusters/%s/status' % self._cluster_id)

            if r['status'] == 'running':
                break
            elif r['status'] == 'error':
                r = self._client.get('clusters/%s/log' % self._cluster_id)
                print r
                raise Exception('ERROR creating cluster')

            if sleeps > 9:
                raise Exception('Cluster never moved into running state')
            sleeps += 1

    # ---------------------------------------------------------------------
    def create_omega3p_script(self,
                              omega3p_filename,
                              name=None,
                              number_of_tasks=1):
        '''Creates script to submit omega3p job
    '''
        command = 'srun -n %s /project/projectdirs/ace3p/{{machine}}/omega3p %s' % \
          (number_of_tasks, omega3p_filename)
        if name is None:
            name = omega3p_filename
        body = {'commands': [command], 'name': name}
        r = self._client.post('scripts', data=json.dumps(body))
        self._script_id = r['_id']
        print 'script_id', self._script_id

    # ---------------------------------------------------------------------
    def create_input(self, input_paths, folder_name='input_files'):
        '''DEPRECATED Uploads input files
    '''
        folder_id = self.get_folder(self._private_folder_id, folder_name)
        if folder_id is None:
            return
        print 'input_folder_id', folder_id
        self._input_folder_id = folder_id

        def upload_file(path):
            name = os.path.basename(path)
            size = os.path.getsize(path)
            with open(path, 'rb') as fp:
                self._client.uploadFile(self._input_folder_id,
                                        fp,
                                        name,
                                        size,
                                        parentType='folder')

        for input_path in input_paths:
            if not input_path or not os.path.exists(input_path):
                raise Exception('Input file not found: %s' % input_path)
            upload_file(input_path)

    # ---------------------------------------------------------------------
    def create_output_folder(self, folder_name='output_files'):
        '''DEPRECATED
    '''
        folder_id = self.get_folder(self._private_folder_id, folder_name)
        print 'output_folder_id', folder_id
        self._output_folder_id = folder_id

    # ---------------------------------------------------------------------
    def create_job(self, job_name, tail=None):
        '''
    '''
        # Create job folders
        folder_name = uuid.uuid4().hex  # unique name
        self._job_folder_id = self.get_folder(self._private_folder_id,
                                              folder_name)
        print 'Created job folder', folder_name
        self._input_folder_id = self.get_folder(self._job_folder_id,
                                                'input_files')
        self._output_folder_id = self.get_folder(self._job_folder_id,
                                                 'output_files')

        # Make sure job_name isn't null
        if not job_name:
            job_name = 'CumulusJob'

        # Create job spec
        body = {
            'name': job_name,
            'scriptId': self._script_id,
            'output': [{
                'folderId': self._output_folder_id,
                'path': '.'
            }],
            'input': [{
                'folderId': self._input_folder_id,
                'path': '.'
            }]
        }

        if tail:
            body['output'].append({"path": tail, "tail": True})

        job = self._client.post('jobs', data=json.dumps(body))
        self._job_id = job['_id']
        print 'Created job_id', self._job_id

    # ---------------------------------------------------------------------
    def upload_inputs(self, input_paths):
        '''Uploads input files to input folder
    '''
        if not self._input_folder_id:
            raise Exception('Input folder missing')

        def upload_file(path):
            name = os.path.basename(path)
            size = os.path.getsize(path)
            with open(path, 'rb') as fp:
                self._client.uploadFile(self._input_folder_id,
                                        fp,
                                        name,
                                        size,
                                        parentType='folder')

        for input_path in input_paths:
            if not input_path or not os.path.exists(input_path):
                raise Exception('Input file not found: %s' % input_path)
            upload_file(input_path)

    # ---------------------------------------------------------------------
    def submit_job(self,
                   machine,
                   project_account,
                   timeout_minutes,
                   queue='debug',
                   qos=None,
                   number_of_nodes=1,
                   job_output_dir=None):
        '''
    '''
        body = {
            'machine': machine,
            'account': project_account,
            'numberOfNodes': number_of_nodes,
            'maxWallTime': {
                'hours': 0,
                'minutes': timeout_minutes,
                'seconds': 0
            },
            'queue': queue,
        }
        if qos:
            body['qualityOfService'] = qos
        #print 'jobOutputDir', job_output_dir
        if job_output_dir:
            body['jobOutputDir'] = job_output_dir
            print 'Setting jobOutputDir', job_output_dir
        url = 'clusters/%s/job/%s/submit' % (self._cluster_id, self._job_id)
        self._client.put(url, data=json.dumps(body))
        print 'Submitted job', self._job_id

    # ---------------------------------------------------------------------
    def monitor_job(self, tail=None):
        '''Periodically monitors job status
    '''
        log_offset = 0
        job_timeout = 60 * timeout_minutes
        start = time.time()
        while True:
            time.sleep(2)

            # Provide some feedback at startup
            if log_offset == 0:
                sys.stdout.write('.')

            #print 'Checking status'
            r = self._client.get('jobs/%s' % self._job_id)
            #print r

            if r['status'] in ['error', 'unexpectederror']:
                r = self._client.get('jobs/%s/log' % self._job_id)
                raise Exception(str(r))
            elif r['status'] == 'complete':
                break

            # Tail log file
            if tail:
                params = {'offset': log_offset, 'path': tail}
                #print 'Checking tail'
                r = self._client.get('jobs/%s/output' % self._job_id,
                                     parameters=params)
                #print r
                output = r['content']

                if output and log_offset == 0:
                    print  # end the user feedback dots

                log_offset += len(output)

                for l in output:
                    print l

            sys.stdout.flush()

            if time.time() - start > job_timeout:
                raise Exception('Job timeout')

    # ---------------------------------------------------------------------
    def download_results(self, destination_folder):
        '''Downloads all output files to a local directory

    '''
        if not os.path.exists(destination_folder):
            os.makedirs(destination_folder)

        self._client.downloadFolderRecursive(self._output_folder_id,
                                             destination_folder)

        print 'Downloaded files to %s' % destination_folder

    # ---------------------------------------------------------------------
    def release_resources(self):
        '''Closes/deletes any current resources

    '''
        resource_info = {
            'clusters': [self._cluster_id],
            'jobs': [self._job_id],
            'scripts': [self._script_id],
            'folder': [self._job_folder]
        }
        for resource_type, id_list in resource_info.items():
            for resource_id in id_list:
                if resource_id is not None:
                    url = '%s/%s' % (resource_type, resource_id)
                    self._client.delete(url)

        self._input_folder_id = None
        self._job_folder_id = None
        self._job_id = None
        self._output_folder_id = None
        self._script_id = None

    # ---------------------------------------------------------------------
    def get_folder(self, parent_id, name):
        '''Returns folder_id, creating one if needed
    '''
        # Check if folder already exists
        folder_list = self._client.listFolder(parent_id, name=name)
        if folder_list:
            folder = folder_list[0]
            #print 'found folder %s: %s' % (name, str(folder))
            return folder['_id']

        # (else)
        try:
            r = self._client.createFolder(parent_id, name)
            return r['_id']
        except HttpError as e:
            print e.responseText

        return None
Beispiel #10
0
def main(args=None):
    parser = argparse.ArgumentParser(
        description='Mount Girder filesystem assetstore.')
    parser.add_argument('--api-url',
                        required=True,
                        default=None,
                        help='full URL to the RESTful API of Girder server')
    parser.add_argument('--username', required=False, default=None)
    parser.add_argument('--password', required=False, default=None)
    parser.add_argument('--api-key', required=False, default=None)
    parser.add_argument('--token', required=False, default=None)
    parser.add_argument('--foreground', dest='foreground', action='store_true')
    parser.add_argument('--hostns', dest='hostns', action='store_true')
    parser.add_argument('-c',
                        default='remote',
                        help='command to run',
                        choices=['remote', 'direct', 'wt_dms', 'wt_home'])
    parser.add_argument('local_folder', help='path to local target folder')
    parser.add_argument('remote_folder',
                        help='Girder\'s folder id or a DM session id')

    args = parser.parse_args()

    gc = GirderClient(apiUrl=args.api_url)
    if args.token:
        gc.token = args.token
    elif args.api_key:
        gc.authenticate(apiKey=args.api_key)
    elif args.username and args.password:
        gc.authenticate(username=args.username, password=args.password)
    else:
        raise RuntimeError("You need to specify apiKey or user/pass")

    if args.hostns:
        targetns = os.path.join(os.environ.get('HOSTDIR', '/'),
                                'proc/1/ns/mnt')
        with open(targetns) as fd:
            setns(fd, CLONE_NEWNS)

    if args.c == 'remote':
        FUSE(RESTGirderFS(args.remote_folder, gc),
             args.local_folder,
             foreground=args.foreground,
             ro=True,
             allow_other=True)
    elif args.c == 'direct':
        FUSE(LocalGirderFS(args.remote_folder, gc),
             args.local_folder,
             foreground=args.foreground,
             ro=True,
             allow_other=True)
    elif args.c == 'wt_dms':
        FUSE(WtDmsGirderFS(args.remote_folder, gc),
             args.local_folder,
             foreground=args.foreground,
             ro=True,
             allow_other=True)
    elif args.c == 'wt_home':
        user = gc.get('/user/me')
        args = {
            'user': user['login'],
            'pass': '******'.format(gc.token),
            'dest': args.local_folder,
            'opts': '-o uid=1000,gid=100',  # FIXME
            'url': gc.urlBase.replace('api/v1', 'homes').rstrip('/')  # FIXME
        }
        cmd = 'echo "{user}\n{pass}" | mount.davfs {opts} {url}/{user} {dest}'
        cmd = cmd.format(**args)
        subprocess.check_output(cmd, shell=True)  # FIXME
    else:
        print('No implementation for command %s' % args.c)
Beispiel #11
0
    | |  / /  _/   |  /  |/  / ____/  | |     / /___  _____/ /_____  _____
    | | / // // /| | / /|_/ / __/     | | /| / / __ \/ ___/ //_/ _ \/ ___/
    | |/ // // ___ |/ /  / / /___     | |/ |/ / /_/ / /  / ,< /  __/ /
    |___/___/_/  |_/_/  /_/_____/     |__/|__/\____/_/  /_/|_|\___/_/

    You are running in private standalone mode.

    Troubleshooting: Try running `docker pull kitware/viame-worker` to get the latest image
    Documentation: https://kitware.github.io/dive/Deployment-Docker-Compose/
    Issues: https://github.com/Kitware/dive/issues
    Support: please email [email protected]
    """)
    # Fetch Celery broker credentials from server
    diveclient = GirderClient(apiUrl=dive_api_url)
    diveclient.authenticate(username=dive_username, password=dive_password)
    me = diveclient.get('user/me')
    creds = diveclient.post(f'rabbit_user_queues/user/{me["_id"]}')
    broker_url = creds['broker_url']
    queue_name = f"{me['login']}@private"
    if not me.get(UserPrivateQueueEnabledMarker, False):
        warn(" Private queues not enabled for this user.")
        warn(
            " You can visit https://viame.kitware/com/#jobs to change these settings"
        )
    info("========================")
    task_default_queue = queue_name

if broker_url is None:
    raise RuntimeError('CELERY_BROKER_URL must be set')

worker_send_task_events = False
Beispiel #12
0
except:
    c.sendRestRequest(
        "POST",
        "user",
        {
            "login": "******",
            "password": "******",
            "email": "*****@*****.**",
            "firstName": "Girder",
            "lastName": "Admin",
        },
    )
    c.authenticate("girder", "girder")

# Create a tangelo hub collection if there isn't one
coll_search = c.get("resource/search", parameters={"q": "Default", "types": '["collection"]'})
if len(coll_search["collection"]) == 0:
    collection = c.post(
        "collection", parameters={"name": "Default", "description": "Default workspace", "public": "true"}
    )
    c.post(
        "folder",
        parameters={
            "parentType": "collection",
            "parentId": collection["_id"],
            "name": "Data",
            "description": "Data Folder",
            "public": "true",
        },
    )
    c.post(
Beispiel #13
0
def main(args=None):
    parser = argparse.ArgumentParser(description="Mount Girder filesystem assetstore.")
    parser.add_argument(
        "--api-url",
        required=True,
        default=None,
        help="full URL to the RESTful API of Girder server",
    )
    parser.add_argument("--username", required=False, default=None)
    parser.add_argument("--password", required=False, default=None)
    parser.add_argument("--api-key", required=False, default=None)
    parser.add_argument("--token", required=False, default=None)
    parser.add_argument("--foreground", dest="foreground", action="store_true")
    parser.add_argument("--hostns", dest="hostns", action="store_true")
    parser.add_argument(
        "--versions-mountpoint",
        dest="versions_mountpoint",
        required=False,
        help="Mountpoint for the versions FS. If relative, then it should be "
        "relative to the runs mountpoint",
        default="Versions",
    )
    parser.add_argument(
        "-c",
        default="remote",
        help="type of filesystem to mount",
        choices=[
            "remote",
            "direct",
            "wt_dms",
            "wt_home",
            "wt_work",
            "wt_run",
            "wt_versions",
            "wt_runs",
        ],
    )
    parser.add_argument("local_folder", help="path to local target folder")
    parser.add_argument(
        "remote_folder",
        help="Girder's folder id, a DM session id (for wt_dms), or a tale instance"
        "ID (for wt_versions)",
    )

    args = parser.parse_args()

    gc = GirderClient(apiUrl=args.api_url)
    if args.token:
        gc.token = args.token
    elif args.api_key:
        gc.authenticate(apiKey=args.api_key)
    elif args.username and args.password:
        gc.authenticate(username=args.username, password=args.password)
    else:
        raise RuntimeError("You need to specify apiKey or user/pass")

    if args.hostns:
        targetns = os.path.join(os.environ.get("HOSTDIR", "/"), "proc/1/ns/mnt")
        with open(targetns) as fd:
            setns(fd, CLONE_NEWNS)

    if args.c == "remote":
        FUSE(
            RESTGirderFS(args.remote_folder, gc),
            args.local_folder,
            foreground=args.foreground,
            ro=True,
            allow_other=True,
        )
    elif args.c == "direct":
        FUSE(
            LocalGirderFS(args.remote_folder, gc),
            args.local_folder,
            foreground=args.foreground,
            ro=True,
            allow_other=True,
        )
    elif args.c == "wt_dms":
        FUSE(
            WtDmsGirderFS(args.remote_folder, gc),
            args.local_folder,
            foreground=args.foreground,
            ro=True,
            allow_other=True,
        )
    elif args.c == "wt_run":
        user = gc.get("/user/me")
        args = {
            "user": user["login"],
            "pass": "******".format(gc.token),
            "dest": args.local_folder,
            "runId": args.remote_folder,
            "opts": "-o uid=1000,gid=100,file_mode=0600,dir_mode=2700",  # FIXME
            "url": gc.urlBase.replace("api/v1", "runs").rstrip("/"),  # FIXME
        }
        cmd = 'echo "{user}\n{pass}" | mount.davfs {opts} {url}/{runId} {dest}'
        cmd = cmd.format(**args)
        subprocess.check_output(cmd, shell=True)  # FIXME
    elif args.c == "wt_work":
        user = gc.get("/user/me")
        args = {
            "user": user["login"],
            "pass": "******".format(gc.token),
            "dest": args.local_folder,
            "tale": args.remote_folder,
            "opts": "-o uid=1000,gid=100,file_mode=0600,dir_mode=2700",  # FIXME
            "url": gc.urlBase.replace("api/v1", "tales").rstrip("/"),  # FIXME
        }
        cmd = 'echo "{user}\n{pass}" | mount.davfs {opts} {url}/{tale} {dest}'
        cmd = cmd.format(**args)
        subprocess.check_output(cmd, shell=True)  # FIXME
    elif args.c == "wt_home":
        user = gc.get("/user/me")
        args = {
            "user": user["login"],
            "pass": "******".format(gc.token),
            "dest": args.local_folder,
            "opts": "-o uid=1000,gid=100,file_mode=0600,dir_mode=2700",  # FIXME
            "url": gc.urlBase.replace("api/v1", "homes").rstrip("/"),  # FIXME
        }
        cmd = 'echo "{user}\n{pass}" | mount.davfs {opts} {url}/{user} {dest}'
        cmd = cmd.format(**args)
        subprocess.check_output(cmd, shell=True)  # FIXME
    elif args.c == "wt_versions":
        FUSE(
            WtVersionsFS(args.remote_folder, gc),
            args.local_folder,
            foreground=args.foreground,
            ro=False,
            allow_other=True,
        )
    elif args.c == "wt_runs":
        FUSE(
            WtRunsFS(args.remote_folder, gc, args.versions_mountpoint),
            args.local_folder,
            foreground=args.foreground,
            ro=False,
            allow_other=True,
        )
    else:
        print("No implementation for command %s" % args.c)
Beispiel #14
0
parser = argparse.ArgumentParser()
parser.add_argument("path", type=str,
                    help="path to Arbor web apps")
parser.add_argument("-g", "--girder-host", type=str, default='localhost',
                    help="host to Girder instance")
parser.add_argument("-p", "--girder-port", type=int, default=9000,
                    help="port to Girder instance")

args = parser.parse_args()


# Get the ID for our Analyses folder.
c = GirderClient(host=args.girder_host, port=args.girder_port)
c.authenticate('girder', 'girder')
folderSearch = c.get('resource/search', parameters={
    'q': 'Analyses',
    'types': '["folder"]'
})
folderId = folderSearch['folder'][0]['_id']

# Disable authorization requirements for running romanesco tasks
c.put('system/setting', parameters={
    'key': 'flow.require_auth',
    'value': 'false'
})

# Check if these analyses already exist.  If so, we won't re-upload them.
uploadACR = False
uploadPGS = False

searchACR = c.get('resource/search', {
    'q': 'aceArbor',
class NewtIntegrationTest(BaseIntegrationTest):

    def __init__(self, name, girder_url, girder_user, girder_password, machine,
                 job_timeout=60*5):
        super(NewtIntegrationTest, self).__init__(name, girder_url, girder_user,
                                                  girder_password, job_timeout)
        self._cluster_id = None
        self._machine = machine

    def setUp(self):

        # First authenticate with NEWT
        self._session = Session()
        r = self._session.post('https://newt.nersc.gov/newt/auth',
                               {
                                    'username': self._girder_user,
                                    'password': self._girder_password})

        self.assertEqual(r.status_code, 200)
        print r.json()
        self._newt_session_id = r.json()['newt_sessionid']

        # Now authenticate with Girder using the session id
        url = '%s/api/v1/newt/authenticate/%s' % (self._girder_url, self._newt_session_id)
        r = self._session.put(url)
        self.assertEqual(r.status_code, 200)

        url = '%s/api/v1/newt/authenticate/%s' % (self._girder_url, self._newt_session_id)
        r = self._session.put(url)
        self.assertEqual(r.status_code, 200)

        url = '%s/api/v1' % self._girder_url
        self._client = GirderClient(apiUrl=url)
        self._client.token = self._session.cookies['girderToken']

        user = self._client.get('user/me')
        self._user_id = user['_id']
        r = self._client.listFolder(self._user_id, 'user', name='Private')
        self.assertEqual(len(r), 1)
        self._private_folder_id = r[0]['_id']

    def tearDown(self):
        super(NewtIntegrationTest, self).tearDown()
        if self._cluster_id:
            try:
                url = 'clusters/%s' % self._cluster_id
                self._client.delete(url)
            except Exception:
                traceback.print_exc()

    def create_cluster(self):
        body = {
            'config': {
                'host': self._machine
            },
            'name': 'NewtIntegrationTest',
            'type': 'newt'
        }

        r = self._client.post('clusters', data=json.dumps(body))
        self._cluster_id = r['_id']

        # Now test the connection
        r = self._client.put('clusters/%s/start' % self._cluster_id)
        sleeps = 0
        while True:
            time.sleep(1)
            r = self._client.get('clusters/%s/status' % self._cluster_id)

            if r['status'] == 'running':
                break
            elif r['status'] == 'error':
                r = self._client.get('clusters/%s/log' % self._cluster_id)
                self.fail(str(r))

            if sleeps > 9:
                self.fail('Cluster never moved into running state')
            sleeps += 1

    def assert_output(self):
        r = self._client.listItem(self._output_folder_id)
        self.assertEqual(len(r), 4)

        stdout_item = None
        for i in r:
            if i['name'].startswith('CumulusIntegrationTestJob-%s.o' % self._job_id):
                stdout_item = i
                break

        self.assertIsNotNone(stdout_item)
        r = self._client.get('item/%s/files' % i['_id'])
        self.assertEqual(len(r), 1)

        url =   '%s/api/v1/file/%s/download' % (self._girder_url, r[0]['_id'])
        r = self._session.get(url)
        self.assertEqual(r.content, self._data)


    def test(self):
        try:
            self.create_cluster()
            self.create_script()
            self.create_input()
            self.create_output_folder()
            self.create_job()
            self.submit_job(timeout=self._job_timeout)
            self.assert_output()
        except HttpError as error:
            self.fail(error.responseText)
class NewtIntegrationTest(BaseIntegrationTest):
    def __init__(self,
                 name,
                 girder_url,
                 girder_user,
                 girder_password,
                 machine,
                 job_timeout=60 * 5):
        super(NewtIntegrationTest,
              self).__init__(name, girder_url, girder_user, girder_password,
                             job_timeout)
        self._cluster_id = None
        self._machine = machine

    def setUp(self):

        # First authenticate with NEWT
        self._session = Session()
        r = self._session.post('https://newt.nersc.gov/newt/auth', {
            'username': self._girder_user,
            'password': self._girder_password
        })

        self.assertEqual(r.status_code, 200)
        print r.json()
        self._newt_session_id = r.json()['newt_sessionid']

        # Now authenticate with Girder using the session id
        url = '%s/api/v1/newt/authenticate/%s' % (self._girder_url,
                                                  self._newt_session_id)
        r = self._session.put(url)
        self.assertEqual(r.status_code, 200)

        url = '%s/api/v1/newt/authenticate/%s' % (self._girder_url,
                                                  self._newt_session_id)
        r = self._session.put(url)
        self.assertEqual(r.status_code, 200)

        url = '%s/api/v1' % self._girder_url
        self._client = GirderClient(apiUrl=url)
        self._client.token = self._session.cookies['girderToken']

        user = self._client.get('user/me')
        self._user_id = user['_id']
        r = self._client.listFolder(self._user_id, 'user', name='Private')
        r = list(r)
        self.assertEqual(len(r), 1)
        self._private_folder_id = r[0]['_id']

    def tearDown(self):
        super(NewtIntegrationTest, self).tearDown()
        if self._cluster_id:
            try:
                url = 'clusters/%s' % self._cluster_id
                self._client.delete(url)
            except Exception:
                traceback.print_exc()

    def create_cluster(self):
        body = {
            'config': {
                'host': self._machine
            },
            'name': 'NewtIntegrationTest',
            'type': 'newt'
        }

        r = self._client.post('clusters', data=json.dumps(body))
        self._cluster_id = r['_id']

        # Now test the connection
        r = self._client.put('clusters/%s/start' % self._cluster_id)
        sleeps = 0
        while True:
            time.sleep(1)
            r = self._client.get('clusters/%s/status' % self._cluster_id)

            if r['status'] == 'running':
                break
            elif r['status'] == 'error':
                r = self._client.get('clusters/%s/log' % self._cluster_id)
                self.fail(str(r))

            if sleeps > 9:
                self.fail('Cluster never moved into running state')
            sleeps += 1

    def assert_output(self):
        r = self._client.listItem(self._output_folder_id)
        self.assertEqual(len(r), 4)

        stdout_item = None
        for i in r:
            if i['name'].startswith('CumulusIntegrationTestJob-%s.o' %
                                    self._job_id):
                stdout_item = i
                break

        self.assertIsNotNone(stdout_item)
        r = self._client.get('item/%s/files' % i['_id'])
        self.assertEqual(len(r), 1)

        url = '%s/api/v1/file/%s/download' % (self._girder_url, r[0]['_id'])
        r = self._session.get(url)
        self.assertEqual(r.content, self._data)

    def test(self):
        try:
            self.create_cluster()
            self.create_script()
            self.create_input()
            self.create_output_folder()
            self.create_job()
            self.submit_job(timeout=self._job_timeout)
            self.assert_output()
        except HttpError as error:
            self.fail(error.responseText)
Beispiel #17
0
    c.authenticate('girder', 'girder')
except:
    c.sendRestRequest(
        'POST', 'user', {
            "login": "******",
            "password": "******",
            "email": "*****@*****.**",
            "firstName": "Girder",
            "lastName": "Admin"
        })
    c.authenticate('girder', 'girder')

# Create a tangelo hub collection if there isn't one
coll_search = c.get('resource/search',
                    parameters={
                        'q': 'Default',
                        'types': '["collection"]'
                    })
if len(coll_search["collection"]) == 0:
    collection = c.post('collection',
                        parameters={
                            'name': 'Default',
                            'description': 'Default workspace',
                            'public': 'true'
                        })
    c.post('folder',
           parameters={
               'parentType': 'collection',
               'parentId': collection['_id'],
               'name': 'Data',
               'description': 'Data Folder',
class BaseIntegrationTest(unittest.TestCase):
    def __init__(self, name, girder_url, girder_user, girder_password, job_timeout=60, cleanup=True):
        super(BaseIntegrationTest, self).__init__(name)
        self._job_id = None
        self._script_id = None
        self._output_folder_id = None
        self._input_folder_id = None
        self._girder_url = girder_url
        self._girder_user = girder_user
        self._girder_password = girder_password
        self._job_timeout = job_timeout
        self._data = 'Need more input!'
        self._cleanup = cleanup

    def setUp(self):
        url = '%s/api/v1' % self._girder_url
        self._client = GirderClient(apiUrl=url)
        self._client.authenticate(self._girder_user,
                                  self._girder_password)

        user = self._client.get('user/me')
        self._user_id = user['_id']
        r = list(self._client.listFolder(self._user_id, 'user', name='Private'))
        self.assertEqual(len(r), 1)
        self._private_folder_id = r[0]['_id']

    def tearDown(self):

        if not self._cleanup:
            return

        if self._job_id:
            try:
                url = 'jobs/%s' % self._job_id
                self._client.delete(url)
            except Exception as e:
                traceback.print_exc()

        if self._script_id:
            try:
                url = 'scripts/%s' % self._script_id
                self._client.delete(url)
            except Exception:
                traceback.print_exc()

        if self._output_folder_id:
            try:
                url = 'folder/%s' % self._output_folder_id
                self._client.delete(url)
            except Exception:
                traceback.print_exc()

        if self._input_folder_id:
            try:
                url = 'folder/%s' % self._input_folder_id
                self._client.delete(url)
            except Exception:
                traceback.print_exc()

    def create_script(self, commands=[
                'sleep 10', 'cat CumulusIntegrationTestInput'
            ]):
        body = {
            'commands': commands,
            'name': 'CumulusIntegrationTestLob'
        }

        r = self._client.post('scripts', data=json.dumps(body))
        self._script_id = r['_id']

    def create_input(self, folder_name='CumulusInput'):

        r = self._client.createFolder(self._private_folder_id, folder_name)
        self._input_folder_id = r['_id']
        size = len(self._data)

        item = self._client.uploadFile(self._input_folder_id,
                    StringIO(self._data), 'CumulusIntegrationTestInput', size,
                    parentType='folder')

        self._item_id = item['itemId']

    def create_output_folder(self, folder_name='CumulusOutput'):
        r = self._client.createFolder(self._private_folder_id, folder_name)
        self._output_folder_id = r['_id']

    def create_job(self, job_name='CumulusIntegrationTestJob', tail=None):
        body = {
            'name': job_name,
            'scriptId': self._script_id,
            'output': [{
              'folderId': self._output_folder_id,
              'path': '.'
            }],
            'input': [
              {
                'folderId': self._input_folder_id,
                'path': '.'
              }
            ]
        }

        if tail:
            body['output'].append({
                "path": tail,
                "tail": True
            })

        job = self._client.post('jobs', data=json.dumps(body))
        self._job_id = job['_id']

    def submit_job(self, job_params={}, timeout=None):
        url = 'clusters/%s/job/%s/submit' % (self._cluster_id, self._job_id)

        self._client.put(url, data=json.dumps(job_params))
        start = time.time()
        while True:
            time.sleep(1)
            r = self._client.get('jobs/%s' % self._job_id)

            if r['status'] in ['error', 'unexpectederror']:
                r = self._client.get('jobs/%s/log' % self._job_id)
                self.fail(str(r))
            elif r['status'] == 'complete':
                break

            if time.time() - start > timeout:
                self.fail('Job didn\'t complete in timeout')

    def assert_output(self):
        r = self._client.listItem(self._output_folder_id)
        self.assertEqual(len(r), 4)

        stdout_item = None
        for i in r:
            if i['name'].startswith('CumulusIntegrationTestJob-%s.o' % self._job_id):
                stdout_item = i
                break

        self.assertIsNotNone(stdout_item)
        r = self._client.get('item/%s/files' % i['_id'])
        self.assertEqual(len(r), 1)

        path = os.path.join(tempfile.gettempdir(), self._job_id)
        try:
            self._client.downloadFile(r[0]['_id'], path)
            with open(path, 'rb') as fp:
                self.assertEqual(fp.read(), self._data)

        finally:
            if os.path.exists(path):
                os.remove(path)
Beispiel #19
0
for root, dirs, files in os.walk(arbor_collections_path):
  for file in files:
    if not file.endswith(".json"):
      continue

    # Get the name of this file and the directory that it's in.
    # We use the directory name as the collection name in Girder.
    fullpath = os.path.join(root, file)
    analysis_filename = os.path.basename(fullpath)
    analysis_name = os.path.splitext(analysis_filename)[0]
    analysis_dir = os.path.dirname(fullpath)
    collection_name = os.path.basename(analysis_dir)

    # Create this collection if it doesn't already exist.
    collection_search = c.get('resource/search', parameters={
        'q': collection_name,
        'types': '["collection"]'
    })
    if len(collection_search["collection"]) == 0:
        collection = c.post('collection', parameters={
            'name': collection_name,
            'description': collection_name,
            'public': 'true'
        })
        c.post('folder', parameters={
            'parentType': 'collection',
            'parentId': collection['_id'],
            'name': 'Data',
            'description': 'Data Folder',
            'public': 'true'
        })
        c.post('folder', parameters={
Beispiel #20
0
        'gridWidth': lon_select_index,
        'gridHeight': lat_select_index,
        'x0': float(data.variables[lon_name][0]),
        'y0': float(data.variables[lat_name][0]),
        'dx': float(data.variables[lon_name][1] - data.variables[lon_name][0]),
        'dy': float(data.variables[lat_name][1] - data.variables[lat_name][0]),
        'values': variable[timestep][:lat_select_index, :lon_select_index].reshape(variable[timestep][:lat_select_index, :lon_select_index].size).tolist()
    }

    return contour_data

client = GirderClient(host, port)
client.token = token

# Get the user
user = client.get('user/me')
# Get the dataset folder
parameters = {
    'userId': user['_id']
}
dataset_folder = client.get('minerva_dataset/folder', parameters=parameters)['folder']
dataset_folder_id = dataset_folder['_id']
parameters = {
    'id': fileId,
    'type': 'file'
}

# Get the file resource so we can get the name
input_file = client.get('resource/%s' % str(fileId), parameters=parameters)
input_file_name = input_file['name']
output_file_name = input_file_name.replace('.nc', '.json')
Beispiel #21
0
from girder_client import GirderClient
import json
import pymongo
import sys

if len(sys.argv) < 2:
    print "%s /path/to/ArborWebApps" % sys.argv[0]
    sys.exit(1)
arborWebAppsPath = sys.argv[1]

# Get the ID for our Analyses folder.
c = GirderClient(host='localhost', port=9000)
c.authenticate('girder', 'girder')
folderSearch = c.get('resource/search',
                     parameters={
                         'q': 'Analyses',
                         'types': '["folder"]'
                     })
folderId = folderSearch['folder'][0]['_id']

# Disable authorization requirements for running romanesco tasks
c.put('system/setting',
      parameters={
          'key': 'romanesco.require_auth',
          'value': 'false'
      })

# Check if these analyses already exist.  If so, we won't re-upload them.
uploadACR = False
uploadPGS = False
Beispiel #22
0
class BaseIntegrationTest(unittest.TestCase):
    def __init__(self,
                 name,
                 girder_url,
                 girder_user,
                 girder_password,
                 job_timeout=60,
                 cleanup=True):
        super(BaseIntegrationTest, self).__init__(name)
        self._job_id = None
        self._script_id = None
        self._output_folder_id = None
        self._input_folder_id = None
        self._girder_url = girder_url
        self._girder_user = girder_user
        self._girder_password = girder_password
        self._job_timeout = job_timeout
        self._data = 'Need more input!'
        self._cleanup = cleanup

    def setUp(self):
        url = '%s/api/v1' % self._girder_url
        self._client = GirderClient(apiUrl=url)
        self._client.authenticate(self._girder_user, self._girder_password)

        user = self._client.get('user/me')
        self._user_id = user['_id']
        r = list(self._client.listFolder(self._user_id, 'user',
                                         name='Private'))
        self.assertEqual(len(r), 1)
        self._private_folder_id = r[0]['_id']

    def tearDown(self):

        if not self._cleanup:
            return

        if self._job_id:
            try:
                url = 'jobs/%s' % self._job_id
                self._client.delete(url)
            except Exception as e:
                traceback.print_exc()

        if self._script_id:
            try:
                url = 'scripts/%s' % self._script_id
                self._client.delete(url)
            except Exception:
                traceback.print_exc()

        if self._output_folder_id:
            try:
                url = 'folder/%s' % self._output_folder_id
                self._client.delete(url)
            except Exception:
                traceback.print_exc()

        if self._input_folder_id:
            try:
                url = 'folder/%s' % self._input_folder_id
                self._client.delete(url)
            except Exception:
                traceback.print_exc()

    def create_script(self,
                      commands=['sleep 10',
                                'cat CumulusIntegrationTestInput']):
        body = {'commands': commands, 'name': 'CumulusIntegrationTestLob'}

        r = self._client.post('scripts', data=json.dumps(body))
        self._script_id = r['_id']

    def create_input(self, folder_name='CumulusInput'):

        r = self._client.createFolder(self._private_folder_id, folder_name)
        self._input_folder_id = r['_id']
        size = len(self._data)

        item = self._client.uploadFile(self._input_folder_id,
                                       StringIO(self._data),
                                       'CumulusIntegrationTestInput',
                                       size,
                                       parentType='folder')

        self._item_id = item['itemId']

    def create_output_folder(self, folder_name='CumulusOutput'):
        r = self._client.createFolder(self._private_folder_id, folder_name)
        self._output_folder_id = r['_id']

    def create_job(self, job_name='CumulusIntegrationTestJob', tail=None):
        body = {
            'name': job_name,
            'scriptId': self._script_id,
            'output': [{
                'folderId': self._output_folder_id,
                'path': '.'
            }],
            'input': [{
                'folderId': self._input_folder_id,
                'path': '.'
            }]
        }

        if tail:
            body['output'].append({"path": tail, "tail": True})

        job = self._client.post('jobs', data=json.dumps(body))
        self._job_id = job['_id']

    def submit_job(self, job_params={}, timeout=None):
        url = 'clusters/%s/job/%s/submit' % (self._cluster_id, self._job_id)

        self._client.put(url, data=json.dumps(job_params))
        start = time.time()
        while True:
            time.sleep(1)
            r = self._client.get('jobs/%s' % self._job_id)

            if r['status'] in ['error', 'unexpectederror']:
                r = self._client.get('jobs/%s/log' % self._job_id)
                self.fail(str(r))
            elif r['status'] == 'complete':
                break

            if time.time() - start > timeout:
                self.fail('Job didn\'t complete in timeout')

    def assert_output(self):
        r = self._client.listItem(self._output_folder_id)
        self.assertEqual(len(r), 4)

        stdout_item = None
        for i in r:
            if i['name'].startswith('CumulusIntegrationTestJob-%s.o' %
                                    self._job_id):
                stdout_item = i
                break

        self.assertIsNotNone(stdout_item)
        r = self._client.get('item/%s/files' % i['_id'])
        self.assertEqual(len(r), 1)

        path = os.path.join(tempfile.gettempdir(), self._job_id)
        try:
            self._client.downloadFile(r[0]['_id'], path)
            with open(path, 'rb') as fp:
                self.assertEqual(fp.read(), self._data)

        finally:
            if os.path.exists(path):
                os.remove(path)