Ejemplo n.º 1
0
 def get(self, org):
     '''List organization discussions'''
     reuses = Reuse.objects(organization=org).only('id')
     datasets = Dataset.objects(organization=org).only('id')
     subjects = list(reuses) + list(datasets)
     qs = Discussion.objects(subject__in=subjects).order_by('-created')
     return list(qs)
Ejemplo n.º 2
0
 def get(self, org):
     '''List organization discussions'''
     reuses = Reuse.objects(organization=org).only('id')
     datasets = Dataset.objects(organization=org).only('id')
     subjects = list(reuses) + list(datasets)
     qs = Discussion.objects(subject__in=subjects).order_by('-created')
     return list(qs)
Ejemplo n.º 3
0
 def get(self, org):
     """List organization discussions"""
     reuses_ids = [r.id for r in Reuse.objects(organization=org).only("id")]
     datasets_ids = [d.id for d in Dataset.objects(organization=org).only("id")]
     ids = reuses_ids + datasets_ids
     qs = Discussion.objects(subject__in=ids).order_by("-created")
     return list(qs)
Ejemplo n.º 4
0
 def get(self, org):
     '''List organization discussions'''
     reuses_ids = [r.id for r in Reuse.objects(organization=org).only('id')]
     datasets_ids = [d.id
                     for d in Dataset.objects(organization=org).only('id')]
     ids = reuses_ids + datasets_ids
     qs = Discussion.objects(subject__in=ids).order_by('-created')
     return list(qs)
Ejemplo n.º 5
0
Archivo: api.py Proyecto: grouan/udata
 def get(self, org):
     '''List organization issues'''
     reuses_ids = [r.id for r in Reuse.objects(organization=org).only('id')]
     datasets_ids = [d.id
                     for d in Dataset.objects(organization=org).only('id')]
     ids = reuses_ids + datasets_ids
     qs = Issue.objects(subject__in=ids).order_by('-created')
     return list(qs)
Ejemplo n.º 6
0
def preview(domain, id):
    if current_app.config.get('PREVIEW_MODE') is None:
        abort(404)
    dataset = Dataset.objects(__raw__={
        'extras.harvest:remote_id': id,
        'extras.harvest:domain': domain
    }).first()
    if not dataset:
        abort(404)
    return render_template('preview.html', dataset=dataset)
Ejemplo n.º 7
0
 def get(self, org, format):
     if org.deleted:
         api.abort(410)
     params = multi_to_dict(request.args)
     page = int(params.get('page', 1))
     page_size = int(params.get('page_size', 100))
     datasets = Dataset.objects(organization=org).visible().paginate(page, page_size)
     catalog = build_org_catalog(org, datasets, format=format)
     # bypass flask-restplus make_response, since graph_response
     # is handling the content negociation directly
     return make_response(*graph_response(catalog, format))
Ejemplo n.º 8
0
def migrate():
    '''
    Migrate zones from old to new ids in datasets.

    Should only be run once with the new version of geozones w/ geohisto.
    '''
    counter = Counter(['zones', 'datasets'])
    qs = GeoZone.objects.only('id', 'level', 'successors')
    # Iter over datasets with zones
    for dataset in Dataset.objects(spatial__zones__gt=[]):
        counter['datasets'] += 1
        new_zones = []
        for current_zone in dataset.spatial.zones:
            counter['zones'] += 1
            level, code, validity = geoids.parse(current_zone.id)
            zone = qs(level=level, code=code).valid_at(validity).first()
            if not zone:
                log.warning('No match for %s: skipped', current_zone.id)
                counter['skipped'] += 1
                continue
            previous = None
            while not zone.is_current and len(zone.successors) == 1 and zone.id != previous:
                previous = zone.id
                zone = qs(id=zone.successors[0]).first() or zone
            new_zones.append(zone.id)
            counter[zone.level] += 1
        dataset.update(
            spatial=SpatialCoverage(
                granularity=dataset.spatial.granularity,
                zones=list(new_zones)
            )
        )
    level_summary = '\n'.join([
        ' - {0}: {1}'.format(l.id, counter[l.id])
        for l in GeoLevel.objects.order_by('admin_level')
    ])
    summary = '\n'.join([dedent('''\
    Summary
    =======
    Processed {zones} zones in {datasets} datasets:\
    '''.format(level_summary, **counter)), level_summary])
    log.info(summary)
    log.info('Done')
Ejemplo n.º 9
0
def migrate():
    '''
    Migrate zones from old to new ids in datasets.

    Should only be run once with the new version of geozones w/ geohisto.
    '''
    counter = Counter()
    drom_zone = GeoZone.objects(id='country-subset:fr:drom').first()
    dromcom_zone = GeoZone.objects(id='country-subset:fr:dromcom').first()
    # Iter over datasets with zones
    for dataset in Dataset.objects(spatial__zones__gt=[]):
        counter['datasets'] += 1
        new_zones = []
        for zone in dataset.spatial.zones:
            if zone.id.startswith('fr/'):
                counter['zones'] += 1
                country, kind, zone_id = zone.id.split('/')
                zone_id = zone_id.upper()  # Corsica 2a/b case.
                if kind == 'town':
                    counter['towns'] += 1
                    new_zones.append(
                        GeoZone
                        .objects(code=zone_id, level='fr:commune')
                        .valid_at(date.today())
                        .first())
                elif kind == 'county':
                    counter['counties'] += 1
                    new_zones.append(
                        GeoZone
                        .objects(code=zone_id, level='fr:departement')
                        .valid_at(date.today())
                        .first())
                elif kind == 'region':
                    counter['regions'] += 1
                    # Only link to pre-2016 regions which kept the same id.
                    new_zones.append(
                        GeoZone
                        .objects(code=zone_id, level='fr:region')
                        .first())
                elif kind == 'epci':
                    counter['epcis'] += 1
                    new_zones.append(
                        GeoZone
                        .objects(code=zone_id, level='fr:epci')
                        .valid_at(dataset.created_at.date())
                        .first())
                else:
                    new_zones.append(zone)
            elif zone.id.startswith('country-subset/fr'):
                counter['zones'] += 1
                subset, country, kind = zone.id.split('/')
                if kind == 'dom':
                    counter['drom'] += 1
                    new_zones.append(drom_zone)
                elif kind == 'domtom':
                    counter['dromcom'] += 1
                    new_zones.append(dromcom_zone)
            elif zone.id.startswith('country/'):
                counter['zones'] += 1
                counter['countries'] += 1
                new_zones.append(zone.id.replace('/', ':'))
            elif zone.id.startswith('country-group/'):
                counter['zones'] += 1
                counter['countrygroups'] += 1
                new_zones.append(zone.id.replace('/', ':'))
            else:
                new_zones.append(zone)
        dataset.update(
            spatial=SpatialCoverage(
                granularity=dataset.spatial.granularity,
                zones=[getattr(z, 'id', z) for z in new_zones if z]
            )
        )
    log.info(Formatter().vformat('''Summary
    Processed {zones} zones in {datasets} datasets:
    - {countrygroups} country groups (World/UE)
    - {countries} countries
    - France:
        - {regions} regions
        - {counties} counties
        - {epcis} EPCIs
        - {towns} towns
        - {drom} DROM
        - {dromcom} DROM-COM
    ''', (), counter))
    log.info('Done')
Ejemplo n.º 10
0
def migrate():
    '''
    Migrate zones from old to new ids in datasets.

    Should only be run once with the new version of geozones w/ geohisto.
    '''
    counter = Counter()
    drom_zone = GeoZone.objects(id='country-subset:fr:drom').first()
    dromcom_zone = GeoZone.objects(id='country-subset:fr:dromcom').first()
    # Iter over datasets with zones
    for dataset in Dataset.objects(spatial__zones__gt=[]):
        counter['datasets'] += 1
        new_zones = []
        for zone in dataset.spatial.zones:
            if zone.id.startswith('fr/'):
                counter['zones'] += 1
                country, kind, zone_id = zone.id.split('/')
                zone_id = zone_id.upper()  # Corsica 2a/b case.
                if kind == 'town':
                    counter['towns'] += 1
                    new_zones.append(
                        GeoZone
                        .objects(code=zone_id, level='fr:commune')
                        .valid_at(date.today())
                        .first())
                elif kind == 'county':
                    counter['counties'] += 1
                    new_zones.append(
                        GeoZone
                        .objects(code=zone_id, level='fr:departement')
                        .valid_at(date.today())
                        .first())
                elif kind == 'region':
                    counter['regions'] += 1
                    # Only link to pre-2016 regions which kept the same id.
                    new_zones.append(
                        GeoZone
                        .objects(code=zone_id, level='fr:region')
                        .first())
                elif kind == 'epci':
                    counter['epcis'] += 1
                    new_zones.append(
                        GeoZone
                        .objects(code=zone_id, level='fr:epci')
                        .valid_at(dataset.created_at.date())
                        .first())
                else:
                    new_zones.append(zone)
            elif zone.id.startswith('country-subset/fr'):
                counter['zones'] += 1
                subset, country, kind = zone.id.split('/')
                if kind == 'dom':
                    counter['drom'] += 1
                    new_zones.append(drom_zone)
                elif kind == 'domtom':
                    counter['dromcom'] += 1
                    new_zones.append(dromcom_zone)
            elif zone.id.startswith('country/'):
                counter['zones'] += 1
                counter['countries'] += 1
                new_zones.append(zone.id.replace('/', ':'))
            elif zone.id.startswith('country-group/'):
                counter['zones'] += 1
                counter['countrygroups'] += 1
                new_zones.append(zone.id.replace('/', ':'))
            else:
                new_zones.append(zone)
        dataset.update(
            spatial=SpatialCoverage(
                granularity=dataset.spatial.granularity,
                zones=[getattr(z, 'id', z) for z in new_zones if z]
            )
        )
    log.info(Formatter().vformat('''Summary
    Processed {zones} zones in {datasets} datasets:
    - {countrygroups} country groups (World/UE)
    - {countries} countries
    - France:
        - {regions} regions
        - {counties} counties
        - {epcis} EPCIs
        - {towns} towns
        - {drom} DROM
        - {dromcom} DROM-COM
    ''', (), counter))
    log.info('Done')
Ejemplo n.º 11
0
def get_dataset(id_or_slug):
    obj = Dataset.objects(slug=id_or_slug).first()
    return obj or Dataset.objects.get(id=id_or_slug)
Ejemplo n.º 12
0
 def count_max_dataset_followers(self):
     dataset = (Dataset.objects(metrics__followers__gt=0).visible().
                order_by('-metrics.followers').first())
     self.metrics['max_dataset_followers'] = dataset.metrics[
         'followers'] if dataset else 0
     self.save()
Ejemplo n.º 13
0
 def count_max_dataset_reuses(self):
     dataset = (Dataset.objects(metrics__reuses__gt=0).visible().order_by(
         '-metrics.reuses').first())
     self.metrics[
         'max_dataset_reuses'] = dataset.metrics['reuses'] if dataset else 0
     self.save()