def update_from_options(self, options):
        cursor = connection.cursor()
        project_ids = options['project_id']
        if project_ids:
            projects = Project.objects.filter(id__in=project_ids)
        else:
            projects = Project.objects.all()

        orientations = options['orientations']
        if type(orientations) in (list, tuple):
            orientations = [o for o in orientations]
        else:
            orientations = ['xy']

        steps = options['steps']
        if not steps:
            raise CommandError('Need depth resolution per orientation (--step)')
        steps = [float(s) for s in steps]

        delete = False
        clean = options['clean']
        if clean:
            if project_ids:
                delete = True
            else:
                # Removing cache data for all projects is faster this way.
                cursor.execute("TRUNCATE node_query_cache")

        bb_limits = [
            [float(options['min_x']), float(options['min_y']), float(options['min_z'])],
            [float(options['max_x']), float(options['max_y']), float(options['max_z'])]
        ]

        node_limit = int(options['node_limit'])
        if node_limit == 0:
            node_limit = None

        n_largest_skeletons_limit = None
        if options['n_largest_skeletons_limit']:
            n_largest_skeletons_limit = int(options['n_largest_skeletons_limit'])

        n_last_edited_skeletons_limit = None
        if options['n_last_edited_skeletons_limit']:
            n_last_edited_skeletons_limit = int(options['n_last_edited_skeletons_limit'])

        data_type = options['data_type']

        if data_type not in ('json', 'json_text', 'msgpack'):
            raise CommandError('Type must be one of: json, json_text, msgpack')
        if len(steps) != len(orientations):
            raise CommandError('Need one depth resolution flag per orientation')

        for p in projects:
            self.stdout.write('Updating cache for project {}'.format(p.id))
            update_cache(p.id, data_type, orientations, steps, node_limit,
                    n_largest_skeletons_limit, n_last_edited_skeletons_limit,
                    delete, bb_limits, log=self.stdout.write)
            self.stdout.write('Updated cache for project {}'.format(p.id))
Exemplo n.º 2
0
    def update_from_options(self, options):
        cursor = connection.cursor()
        project_ids = options['project_id']
        if project_ids:
            projects = Project.objects.filter(id__in=project_ids)
        else:
            projects = Project.objects.all()

        orientations = options['orientations']
        if type(orientations) in (list, tuple):
            orientations = [o for o in orientations]
        else:
            orientations = ['xy']

        steps = options['steps']
        if not steps:
            raise CommandError(
                'Need depth resolution per orientation (--step)')
        steps = [float(s) for s in steps]

        delete = False
        clean = options['clean']
        if clean:
            if project_ids:
                delete = True
            else:
                # Removing cache data for all projects is faster this way.
                cursor.execute("TRUNCATE node_query_cache")

        bb_limits = [[
            float(options['min_x']),
            float(options['min_y']),
            float(options['min_z'])
        ],
                     [
                         float(options['max_x']),
                         float(options['max_y']),
                         float(options['max_z'])
                     ]]

        node_limit = int(options['node_limit'])
        if node_limit == 0:
            node_limit = None

        n_largest_skeletons_limit = None
        if options['n_largest_skeletons_limit']:
            n_largest_skeletons_limit = int(
                options['n_largest_skeletons_limit'])

        data_type = options['data_type']

        if data_type not in ('json', 'json_text', 'msgpack'):
            raise CommandError('Type must be one of: json, json_text, msgpack')
        if len(steps) != len(orientations):
            raise CommandError(
                'Need one depth resolution flag per orientation')

        for p in projects:
            self.stdout.write('Updating cache for project {}'.format(p.id))
            update_cache(p.id,
                         data_type,
                         orientations,
                         steps,
                         node_limit,
                         n_largest_skeletons_limit,
                         delete,
                         bb_limits,
                         log=self.stdout.write)
            self.stdout.write('Updated cache for project {}'.format(p.id))
Exemplo n.º 3
0
    def update_from_options(self, options):
        cursor = connection.cursor()
        project_ids = options['project_id']
        if project_ids:
            projects = Project.objects.filter(id__in=project_ids)
        else:
            projects = Project.objects.all()

        cache_type = options['cache_type']
        if cache_type not in ('section', 'grid'):
            raise CommandError('Cache type must be one of: section, grid')

        orientations = options['orientations']
        if type(orientations) in (list, tuple):
            orientations = [o for o in orientations]
        else:
            orientations = ['xy']

        steps = options['steps']
        if cache_type == 'section':
            if not steps:
                raise CommandError(
                    'Need depth resolution per orientation (--step)')
            steps = [float(s) for s in steps]

            if len(steps) != len(orientations):
                raise CommandError(
                    'Need one depth resolution flag per orientation')

        delete = False
        clean = options['clean']
        if clean:
            if project_ids:
                delete = True
            else:
                # Removing cache data for all projects is faster this way.
                cursor.execute("TRUNCATE node_query_cache")

        bb_limits = [[
            float(options['min_x']),
            float(options['min_y']),
            float(options['min_z'])
        ],
                     [
                         float(options['max_x']),
                         float(options['max_y']),
                         float(options['max_z'])
                     ]]

        node_limit = options['node_limit']
        if node_limit:
            node_limit = int(options['node_limit'])
        else:
            node_limit = None

        n_largest_skeletons_limit = None
        if options['n_largest_skeletons_limit']:
            n_largest_skeletons_limit = int(
                options['n_largest_skeletons_limit'])

        n_last_edited_skeletons_limit = None
        if options['n_last_edited_skeletons_limit']:
            n_last_edited_skeletons_limit = int(
                options['n_last_edited_skeletons_limit'])

        hidden_last_editor_id = None
        if options['hidden_last_editor']:
            user = User.objects.get(username=options['hidden_last_editor'])
            hidden_last_editor_id = user.id

        data_type = options['data_type']

        if data_type not in ('json', 'json_text', 'msgpack'):
            raise CommandError('Type must be one of: json, json_text, msgpack')

        cell_width = options['cell_width']
        if cell_width:
            cell_width = float(cell_width)

        cell_height = options['cell_height']
        if cell_height:
            cell_height = float(cell_height)

        cell_depth = options['cell_depth']
        if cell_depth:
            cell_depth = float(cell_depth)

        allow_empty = options['allow_empty']

        lod_levels = options['lod_levels']
        if lod_levels:
            lod_levels = int(lod_levels)

        lod_bucket_size = options['lod_bucket_size']
        if lod_bucket_size:
            lod_bucket_size = int(lod_bucket_size)

        lod_strategy = options['lod_strategy']
        if lod_strategy not in ('linear', 'quadratic', 'exponential'):
            raise ValueError(f"Unknown LOD strategy: {lod_strategy}")

        jobs = options['jobs']
        if jobs > 1 and cache_type != 'grid':
            raise ValueError(
                "Parallel processing works currently only with grid caches")

        depth_steps = options['depth_steps']
        if depth_steps > 1 and cache_type != 'grid':
            raise ValueError(
                "Depth steps work currently only with grid caches")

        chunksize = options['chunk_size']
        ordering = options['order']
        progress = options['progress']

        for p in projects:
            self.stdout.write(
                f'Updating {cache_type} cache for project {p.id}')
            if cache_type == 'section':
                update_cache(p.id,
                             data_type,
                             orientations,
                             steps,
                             node_limit,
                             n_largest_skeletons_limit,
                             n_last_edited_skeletons_limit,
                             hidden_last_editor_id,
                             delete,
                             bb_limits,
                             log=self.stdout.write,
                             ordering=ordering)
            elif cache_type == 'grid':
                update_grid_cache(p.id,
                                  data_type,
                                  orientations,
                                  cell_width,
                                  cell_height,
                                  cell_depth,
                                  node_limit,
                                  n_largest_skeletons_limit,
                                  n_last_edited_skeletons_limit,
                                  hidden_last_editor_id,
                                  delete,
                                  bb_limits,
                                  log=self.stdout.write,
                                  progress=progress,
                                  allow_empty=allow_empty,
                                  lod_levels=lod_levels,
                                  lod_bucket_size=lod_bucket_size,
                                  lod_strategy=lod_strategy,
                                  jobs=jobs,
                                  depth_steps=depth_steps,
                                  chunksize=chunksize,
                                  ordering=ordering)
            self.stdout.write(f'Updated {cache_type} cache for project {p.id}')