예제 #1
0
    def handle(self, **options):
        # ignore_errors = options.get('ignore_errors')
        config = helpers.Config(options)
        backup_file = options.get('backup_file')
        owner = options.get('owner')

        if not backup_file or len(backup_file) == 0:
            raise CommandError("Backup archive '--backup-file' is mandatory")

        if not owner or len(owner) == 0:
            raise CommandError("Owner '--owner' is mandatory")

        message = 'WARNING: The migration may break GeoNode existing Layers. You want to proceed?'
        if helpers.confirm(prompt=message, resp=False):

            """Migrate existing Layers on GeoNode DB"""
            try:
                # Create Target Folder
                restore_folder = os.path.join(tempfile.gettempdir(), 'restore')
                if not os.path.exists(restore_folder):
                    os.makedirs(restore_folder)

                # Extract ZIP Archive to Target Folder
                target_folder = helpers.unzip_file(backup_file, restore_folder)

                # Retrieve the max Primary Key from the DB
                from geonode.base.models import ResourceBase
                try:
                    higher_pk = ResourceBase.objects.all().order_by("-id")[0].pk
                except:
                    higher_pk = 0

                # Restore Fixtures
                for app_name, dump_name in zip(config.app_names, config.dump_names):
                    for mig_name, mangler in zip(config.migrations, config.manglers):
                        if app_name == mig_name:
                            fixture_file = os.path.join(target_folder, dump_name+'.json')

                            print("Deserializing "+fixture_file)
                            mangler = helpers.load_class(mangler)
                            site_url = settings.SITEURL.rstrip('/') if settings.SITEURL.startswith('http') else settings.SITEURL
                            obj = helpers.load_fixture(app_name, fixture_file, mangler=mangler,
                                                       basepk=higher_pk, owner=owner,
                                                       datastore=settings.OGC_SERVER['default']['DATASTORE'],
                                                       siteurl=site_url)

                            from django.core import serializers

                            objects = serializers.deserialize('json', json.dumps(obj), ignorenonexistent=True)
                            for obj in objects:
                                obj.save(using=DEFAULT_DB_ALIAS)

                print("Restore finished. Please find restored files and dumps into: '"+target_folder+"'.")

            except Exception:
                traceback.print_exc()
예제 #2
0
    def handle(self, **options):
        # ignore_errors = options.get('ignore_errors')
        config = helpers.Config(options)
        backup_file = options.get('backup_file')
        owner = options.get('owner')

        if not backup_file or len(backup_file) == 0:
            raise CommandError("Backup archive '--backup-file' is mandatory")

        if not owner or len(owner) == 0:
            raise CommandError("Owner '--owner' is mandatory")

        message = 'WARNING: The migration may break GeoNode existing Layers. You want to proceed?'
        if helpers.confirm(prompt=message, resp=False):

            """Migrate existing Layers on GeoNode DB"""
            try:
                # Create Target Folder
                restore_folder = os.path.join(tempfile.gettempdir(), 'restore')
                if not os.path.exists(restore_folder):
                    os.makedirs(restore_folder)

                # Extract ZIP Archive to Target Folder
                target_folder = helpers.unzip_file(backup_file, restore_folder)

                # Retrieve the max Primary Key from the DB
                from geonode.base.models import ResourceBase
                try:
                    higher_pk = ResourceBase.objects.all().order_by("-id")[0].pk
                except:
                    higher_pk = 0

                # Restore Fixtures
                for app_name, dump_name in zip(config.app_names, config.dump_names):
                    for mig_name, mangler in zip(config.migrations, config.manglers):
                        if app_name == mig_name:
                            fixture_file = os.path.join(target_folder, dump_name+'.json')

                            print "Deserializing "+fixture_file
                            mangler = helpers.load_class(mangler)

                            obj = helpers.load_fixture(app_name, fixture_file, mangler=mangler,
                                                       basepk=higher_pk, owner=owner,
                                                       datastore=settings.OGC_SERVER['default']['DATASTORE'],
                                                       siteurl=settings.SITEURL)

                            from django.core import serializers

                            objects = serializers.deserialize('json', json.dumps(obj), ignorenonexistent=True)
                            for obj in objects:
                                obj.save(using=DEFAULT_DB_ALIAS)

                print "Restore finished. Please find restored files and dumps into: '"+target_folder+"'."

            except Exception:
                traceback.print_exc()
예제 #3
0
def migrate_layers(archive, owner):
    """Migrate existing Layers on GeoNode DB"""
    try:
        # Create Target Folder
        restore_folder = 'restore'
        if not os.path.exists(restore_folder):
            os.makedirs(restore_folder)

        # Extract ZIP Archive to Target Folder
        target_folder = helpers.unzip_file(archive, restore_folder)

        # Retrieve the max Primary Key from the DB
        from geonode.base.models import ResourceBase
        try:
            higher_pk = ResourceBase.objects.all().order_by("-id")[0].pk
        except:
            higher_pk = 0

        # Restore Fixtures
        for app_name, dump_name in zip(helpers.app_names, helpers.dump_names):
            for mig_name, mangler in zip(helpers.migrations, helpers.manglers):
                if app_name == mig_name:
                    fixture_file = os.path.join(target_folder,
                                                dump_name + '.json')

                    print "Deserializing " + fixture_file
                    mangler = helpers.load_class(mangler)

                    obj = helpers.load_fixture(
                        app_name,
                        fixture_file,
                        mangler=mangler,
                        basepk=higher_pk,
                        owner=owner,
                        datastore=settings.OGC_SERVER['default']['DATASTORE'],
                        siteurl=settings.SITEURL)

                    from django.core import serializers

                    objects = serializers.deserialize('json',
                                                      json.dumps(obj),
                                                      ignorenonexistent=True)
                    for obj in objects:
                        obj.save(using=DEFAULT_DB_ALIAS)

    except Exception, err:
        traceback.print_exc()
예제 #4
0
def migrate_layers(archive, owner):
   """Migrate existing Layers on GeoNode DB"""
   try:
      # Create Target Folder
      restore_folder = 'restore'
      if not os.path.exists(restore_folder):
         os.makedirs(restore_folder)
      
      # Extract ZIP Archive to Target Folder
      target_folder = helpers.unzip_file(archive, restore_folder)

      # Retrieve the max Primary Key from the DB
      from geonode.base.models import ResourceBase
      try:
         higher_pk = ResourceBase.objects.all().order_by("-id")[0].pk
      except:
         higher_pk = 0

      # Restore Fixtures
      for app_name, dump_name in zip(helpers.app_names, helpers.dump_names):
         for mig_name, mangler in zip(helpers.migrations, helpers.manglers):
            if app_name == mig_name:
               fixture_file = os.path.join(target_folder, dump_name+'.json')
         
               print "Deserializing "+fixture_file
               mangler = helpers.load_class(mangler)

               obj = helpers.load_fixture(app_name, fixture_file, mangler=mangler, basepk=higher_pk, owner=owner, datastore=settings.OGC_SERVER['default']['DATASTORE'], siteurl=settings.SITEURL)

               from django.core import serializers

               objects = serializers.deserialize('json', json.dumps(obj), ignorenonexistent=True)
               for obj in objects:
                  obj.save(using=DEFAULT_DB_ALIAS)

   except Exception, err:
      traceback.print_exc()
예제 #5
0
파일: handlers.py 프로젝트: tolsac/restae
class APIModelBaseHandler(BaseHandler):
    """

    """
    lookup_field = 'urlsafe'
    lookup_value_regex = '[^/.]+'
    queryset = None
    serializer_class = None
    pagination_class = load_class(DEFAULT_PAGINATION_CLASS)

    def check_object_permissions(self, request, obj):
        """
        Returns always True by default. Override this method to
        get a object level permissions checks
        """
        return True

    def get_object(self,
                   urlsafe=None,
                   query_param=None,
                   lookup_field=None,
                   raise_exception=True):
        _key = None
        _lookup_field = lookup_field or self.lookup_field
        _query_param = query_param or self.query_param

        if _lookup_field is None and urlsafe is None and _query_param is None:
            if raise_exception is True:
                raise BadRequest
            else:
                return None

        try:
            if urlsafe is None:
                if _lookup_field is not None and _lookup_field in self.route_args:
                    urlsafe_from_request = self.route_args[_lookup_field]
                elif _query_param is not None:
                    urlsafe_from_request = self.request.GET.get(
                        _query_param, None)
                else:
                    urlsafe_from_request = None

                if urlsafe_from_request is None and raise_exception is False:
                    return None

                if urlsafe_from_request is None:
                    raise MissingParameter(
                        'Parameter {} is missing from request'.format(
                            _lookup_field or _query_param))
                else:
                    urlsafe = urlsafe_from_request

            _key = get_key_from_urlsafe(urlsafe)
            if not self.check_object_permissions(self.request, _key):
                raise NotAuthorized(
                    'You are not authorized to access this resource')
        except Exception as err:
            logging.error('Error in get_object: %s -> %s\n%s',
                          err.__class__.__name__, str(err),
                          traceback.format_exc())
            if raise_exception:
                raise NotFound
        finally:
            return _key

    @property
    def paginator(self):
        """
        The paginator instance associated with the view, or `None`.
        """
        if not hasattr(self, '_paginator'):
            if self.pagination_class is None:
                self._paginator = None
            else:
                self._paginator = self.pagination_class()
        return self._paginator

    def paginate_queryset(self, queryset):
        """
        Return a single page of results, or `None` if pagination is disabled.
        """
        if self.paginator is None:
            return None
        return self.paginator.paginate_queryset(queryset,
                                                self.request,
                                                view=self)

    def get_paginated_response(self, data):
        """
        Return a paginated style `Response` object for the given output data.
        """
        assert self.paginator is not None
        return self.paginator.get_paginated_response(data)

    def get_queryset(self):
        """
        Method used to retrieve the queryset. Must be overloaded for custom operations
        """
        return self.queryset

    def filter_queryset(self):
        """
        Default queryset filter. List all model properties and compare to query params.
        All intersected attributes are applied ad filters on the queryset
        """
        if not self.query_params:
            return self.queryset

        _model = get_model_class_from_query(self.queryset)
        fields = get_model_fields(_model)
        queryset = self.get_queryset()

        for field_name, field_class in fields:
            if field_name in self.query_params:
                filter_value = self.query_params[field_name]
                if field_class == KeyProperty:
                    _key = get_key_from_urlsafe(filter_value)
                    queryset = queryset.filter(
                        ndb.GenericProperty(field_name) == _key)
                else:
                    """
                    We use the property serializer to be able to make the filter with the good type.
                    For instance if we want to filter an IntergerProperty, the self.query_params, will
                    have only string values. We need to use the serializer to cast the param to the
                    good type.
                    """
                    filter_value = TYPE_MAPPING[field_class.__name__](
                        data=filter_value).data
                    queryset = queryset.filter(
                        ndb.GenericProperty(field_name) == filter_value)

        return queryset

    def get_serializer(self, *args, **kwargs):
        """
        Method used to retrieves the serializer. Must be overloaded for custom operations
        """
        return self.serializer_class

    def post_save(self, obj, created=True):
        """
        Method called after each call to .put()
        """
        pass

    def pre_delete(self, obj):
        """
        Method called just before calling key.delete()
        """
        pass
예제 #6
0
# ==============================================================================

res_path = "results/raw/"

# global data set index
data_set_index = pd.read_csv("data/data_index.csv",
                             sep=";",
                             index_col="dataset")

# global algorithm index
algorithm_index = pd.read_csv("alg_index.csv", sep=";", index_col="algorithm")

algorithm_index = algorithm_index.loc[algorithm_index.export == 1]
algorithms = list(algorithm_index.index)
algorithm_dict = dict({
    alg: helpers.load_class(algorithm_index.loc[alg, "module_name"],
                            algorithm_index.loc[alg, "class_name"])
    for alg in algorithms
})

global_seeds = [4711, 1337, 42, 90210, 666, 879, 1812, 4055, 711, 512]

# train/test ratios to test against
train_test_ratios = [[0.1, 0.9], [0.3, 0.7], [0.5, 0.5], [0.7, 0.3]]
train_test_ratios = [np.array(d) for d in train_test_ratios]

train_distributions = dict()
train_distributions[2] = np.array([[0.1, 0.9], [0.3, 0.7], [0.5, 0.5],
                                   [0.7, 0.3], [0.9, 0.1], [0.95, 0.05]])
train_distributions[3] = np.array([[0.2, 0.5, 0.3], [0.05, 0.8, 0.15],
                                   [0.35, 0.3, 0.35]])
train_distributions[4] = np.array([[0.5, 0.3, 0.1, 0.1], [0.7, 0.1, 0.1, 0.1],