Пример #1
0
    def authenticate_me(self, **kwargs):

        username = input("Enter Username: "******"Enter Password: "******"Either credentials invalid or unable to connect to HydroShare."
            )

        return False
Пример #2
0
    def authenticate_me(self, **kwargs):

        username = kwargs.get('username') or input("Enter Username: "******"Enter Password: "******"Either credentials invalid or unable to connect to {}.".
                      format(self.name))

        return False
Пример #3
0
    def authenticate_me(self, **kwargs):
        connection_info = 'https://data.kitware.com/api/v1'
        username = kwargs['username']
        password = kwargs['password']

        try:
            gc = girder_client.GirderClient(apiUrl=connection_info)
            gc.authenticate(username, password)
            db = get_db()
            with db_session:
                p = db.Providers.select().filter(provider=self.name).first()

                provider_metadata = {
                    'provider': self.name,
                    'username': username,
                    'password': password,
                }

                if p is None:
                    db.Providers(**provider_metadata)
                else:
                    p.set(**provider_metadata)

            return True
        except:
            log.error(
                "Either credentials invalid or unable to connect to the Girder live server."
            )

        return False
Пример #4
0
def stage_for_download(uris, options=None):
    """Apply download options before downloading
    Args:
        uris (string or list, Required):
            uris of datasets to stage for download

        options (dict or list of dicts, Optional, Default=None):
            options to be passed to quest.api.download function specified for each dataset

            If options is a dict, then apply same options to all datasets,
            else each dict in list is used for each respective dataset

    Returns:
        uris (list):
            staged dataset uids
    """
    uris = listify(uris)
    display_name = None
    datasets = []

    # TODO classify uris and ensure only datasets

    if not isinstance(options, list):
        options = [options] * len(uris)

    db = get_db()

    for dataset_uri, kwargs in zip(uris, options):
        if isinstance(kwargs, param.Parameterized):
            kwargs = dict(kwargs.get_param_values())

        dataset_metadata = get_metadata(dataset_uri)[dataset_uri]

        parameter = kwargs.get('parameter') if kwargs else None
        parameter_name = parameter or 'no_parameter'

        if dataset_metadata['display_name'] == dataset_uri:
            catalog_entry = dataset_metadata['catalog_entry']
            provider, service, _ = parse_service_uri(catalog_entry)
            display_name = '{0}-{1}-{2}'.format(provider, parameter_name,
                                                dataset_uri[:7])

        quest_metadata = {
            'display_name': display_name or dataset_metadata['display_name'],
            'options': kwargs,
            'status': DatasetStatus.STAGED,
            'parameter': parameter
        }

        with db_session:
            dataset = db.Dataset[dataset_uri]
            dataset.set(**quest_metadata)

        datasets.append(dataset_uri)

    return datasets
Пример #5
0
def set_active_project(name):
    """Set active QUEST project.

    Args:
        name (string, Required):
            name of a project

    Returns:
        project (string):
            name of project currently set as active

    """
    path = _get_projects_index_file()
    contents = util.read_yaml(path)
    if name not in contents['projects'].keys():
        raise ValueError('Project %s does not exist' % name)
    contents.update({'active_project': name})
    util.write_yaml(path, contents)
    get_db(active_db(), reconnect=True)  # change active database
    return name
Пример #6
0
def _copy_dataset(dataset_metadata, collection_path,
                  destination_collection_path):
    new_name = util.uuid('dataset')
    db = get_db()
    with db_session:
        db_metadata = db.Dataset[dataset_metadata['name']].to_dict()
        db_metadata.update(name=new_name)
        db.Dataset(**db_metadata)

    _update_dataset_file_location(shutil.copy2, db_metadata, collection_path,
                                  destination_collection_path)
    return new_name
Пример #7
0
def new_catalog_entry(geometry=None, geom_type=None, geom_coords=None, metadata=None):
    """Add a new entry to a catalog either a quest local catalog (table) or file.

    Args:
        geometry (string or Shapely.geometry.shape, optional, Default=None):
            well-known-text or Shapely shape representing the geometry of the catalog_entry.
            Alternatively `geom_type` and `geom_coords` can be passed.
        geom_type (string, Optional, Default=None):
             geometry type of catalog_entry (i.e. point/line/polygon)
        geom_coords (string or list, Optional, Default=None):
            geometric coordinates specified as valid geojson coordinates (i.e. a list of lists i.e.
            '[[-94.0, 23.2], [-94.2, 23.4] ...]'
            --------- OR ---------
            [[-94.0, 23.2], [-94.2, 23.4] ...] etc)
        metadata (dict, Optional, Default=None):
            optional metadata at the new catalog_entry

    Returns
    -------
        uri (string):
            uri of newly created entry

    """

    if geometry is None and geom_coords is not None and geom_type is not None:
        if isinstance(geom_coords, str):
            geom_coords = json.loads(geom_coords)

        geometry = shape({"coordinates": geom_coords, "type": geom_type})

    if hasattr(geometry, 'wkt'):
        geometry = geometry.wkt

    catalog_id = util.uuid('catalog_entry')

    data = {
        'service_id': catalog_id,
        'geometry': geometry,
        'metadata': metadata,
    }

    db = get_db()
    with db_session:
        db.QuestCatalog(**data)

    return construct_service_uri('quest', 'quest', catalog_id)
Пример #8
0
def new_collection(name, display_name=None, description=None, metadata=None):
    """Create a new collection.

    Create a new collection by creating a new folder in project directory
    and adding collection metadata in project database.

    Args:
        name (string, Required):
            Name of the collection used in all quest function calls,must be unique. Will also be the folder name of the collection
        display_name (string, Optional, Default=None):
            display name for collection
        description (string, Optional, Default=None):
            description of collection
        metadata (dict, Optional, Default=None):
            user defined metadata

    Returns:
        collection (dict)
            details of the newly created collection
    """
    name = name.lower()
    collections = _load_collections()
    if name in collections:
        raise ValueError('Collection %s already exists' % name)

    if display_name is None:
        display_name = name

    if description is None:
        description = ''

    if metadata is None:
        metadata = {}

    path = os.path.join(_get_project_dir(), name)
    os.makedirs(path, exist_ok=True)

    db = get_db()
    with db_session:
        db.Collection(name=name,
                      display_name=display_name,
                      description=description,
                      metadata=metadata)

    return _load_collection(name)
Пример #9
0
def get_auth_status(uri):
    """Check to see if a provider has been authenticated

    Args:
        uri (string, Required):
            uri of 'user service'
     Returns:
        True on success
        False on not authenticated

    """
    db = get_db()
    with db_session:
        p = db.Providers.select().filter(provider=uri).first()

        if p is None:
            return False

    return True
Пример #10
0
def download_datasets(datasets, raise_on_error=False):
    """Download datasets that have been staged with stage_for_download.

    Args:
        datasets (string or list, Required):
            datasets to download
        raise_on_error (bool, Optional, Default=False):
            if True, if an error occurs raise an exception
        async: (bool, Optional, Default=False)
            if True, download in background

    Returns:
        status (dict):
            download status of datasets
    """
    datasets = get_metadata(datasets, as_dataframe=True)

    if datasets.empty:
        return

    # filter out non download datasets
    datasets = datasets[datasets['source'] == DatasetSource.WEB_SERVICE]

    db = get_db()
    project_path = _get_project_dir()
    status = {}
    for idx, dataset in datasets.iterrows():
        collection_path = os.path.join(project_path, dataset['collection'])
        catalog_entry = dataset["catalog_entry"]
        try:
            update_metadata(idx,
                            quest_metadata={'status': DatasetStatus.PENDING})
            kwargs = dataset['options'] or dict()
            all_metadata = download(catalog_entry,
                                    file_path=collection_path,
                                    dataset=idx,
                                    **kwargs)

            metadata = all_metadata.pop('metadata', None)
            quest_metadata = all_metadata
            quest_metadata.update({
                'status': DatasetStatus.DOWNLOADED,
                'message': 'success',
            })
        except Exception as e:
            if raise_on_error:
                raise

            quest_metadata = {
                'status': DatasetStatus.FAILED_DOWNLOAD,
                'message': str(e),
            }

            metadata = None

        status[idx] = quest_metadata['status']

        quest_metadata.update({'metadata': metadata})

        with db_session:
            dataset = db.Dataset[idx]
            dataset.set(**quest_metadata)

    return status
Пример #11
0
def new_dataset(catalog_entry,
                collection,
                source=None,
                display_name=None,
                description=None,
                file_path=None,
                metadata=None,
                name=None):
    """Create a new dataset in a collection.

    Args:
        catalog_entry (string, Required):
            catalog_entry uri
        collection (string, Required):
            name of collection to create dataset in
        source (string, Optional, Default=None):
            type of the dataset such as timeseries or raster
        display_name (string, Optional, Default=None):
            display name for dataset
        description (string, Optional, Default=None):
            description of dataset
        file_path (string, Optional, Default=None):
            path location to save new dataset's data
        metadata (dict, Optional, Default=None):
            user defined metadata
        name (dict, Optional, Default=None):
            optionally pass in a UUID starting with d as name, otherwise it will be generated

    Returns:
        uri (string):
            uid of dataset
    """

    if collection not in get_collections():
        raise ValueError("Collection {} does not exist".format(collection))

    if not isinstance(catalog_entry, pd.DataFrame):
        catalog_entry = get_metadata(catalog_entry, as_dataframe=True)
    try:
        catalog_entry = catalog_entry['name'][0]
    except IndexError:
        raise ValueError('Entry {} dose not exist'.format(catalog_entry))

    name = name or uuid('dataset')
    assert name.startswith('d') and is_uuid(name)

    if source is None:
        source = DatasetSource.USER

    if display_name is None:
        display_name = name

    if metadata is None:
        metadata = {}

    quest_metadata = {
        'name': name,
        'collection': collection,
        'catalog_entry': catalog_entry,
        'source': source,
        'display_name': display_name,
        'description': description,
        'file_path': file_path,
        'metadata': metadata,
    }
    if source == DatasetSource.WEB_SERVICE:
        quest_metadata.update({'status': DatasetStatus.NOT_STAGED})

    db = get_db()
    with db_session:
        db.Dataset(**quest_metadata)

    return name
Пример #12
0
def delete(uris):
    """Delete metadata for resource(s)

    WARNING:
        deleting a collection deletes all associated datasets

    Args:
        uris (string, comma separated string or list of strings, Required):
            uri(s) of collection, and/or dataset to delete

    Returns:
        status (bool):
            True on success

    """
    # if uri list is empty do nothing
    if not uris:
        return True

    # group uris by type
    grouped_uris = util.classify_uris(uris,
                                      as_dataframe=False,
                                      exclude=['services', 'publishers'],
                                      require_same_type=True)
    resource = list(grouped_uris)[0]
    uris = grouped_uris[resource]

    db = get_db()
    for uri in uris:
        if resource == 'collections':
            if uri not in get_collections():
                logger.error('Collection does not exist: %s', uri)
                raise ValueError('Collection does not exists')

            with db_session:
                datasets = db.Dataset.select(
                    lambda d: d.collection.name == uri)
                if datasets.count() > 0:
                    datasets.delete()
                db.Collection[uri].delete()

            path = _get_project_dir()
            path = os.path.join(path, uri)
            if os.path.exists(path):
                logger.info('deleting all data under path: %s' % path)
                shutil.rmtree(path)

        if resource == 'datasets':
            with db_session:
                dataset = db.Dataset[uri]

                if dataset.source == 'derived':
                    catalog_entry_datasets = select_datasets(
                        lambda d: d.catalog_entry == dataset.catalog_entry)

                    if len(catalog_entry_datasets) == 1:
                        _, _, catalog_id = util.parse_service_uri(
                            dataset.catalog_entry)
                        db.QuestCatalog[catalog_id].delete()

                try:
                    os.remove(dataset.file_path)
                except (OSError, TypeError):
                    pass

                dataset.delete()

    return True
Пример #13
0
def _load_collections():
    """load list of collections."""
    db = get_db()
    with db_session:
        return {c.name: c.to_dict() for c in db.Collection.select(lambda c: c)}
Пример #14
0
def _load_collection(name):
    """load collection."""
    db = get_db()
    with db_session:
        return db.Collection.select(lambda c: c.name == name).first().to_dict()