Exemple #1
0
    "-c",
    "--campaign",
    required=False,
    type=click.Choice(
        list(Campaign.objects.all().values_list("public_id", flat=True))),
    help=
    "For Modeling spreadsheets, you must specify a campaign to which the new model belongs.",
)
@click.option(
    "-u",
    "--user",
    required=False,
    type=str,
    help="The email of the user that is uploading the file.",
)
@click.argument("file", required=True, type=click.File("rb"))
def command(project, kind, campaign, user, file):
    """
    Import a spreadsheet file into the database for a given project.

    If the spreadsheet is invalid, this will simply print an error message;
    the system will remain in its previous state.

    If the spreadsheet is valid, this will cause the spreadsheet to be "activated",
    which means its imported contents will be applied to the underlying database.
    """
    # Try to find the project in question.
    public_id = project
    try:
        project = Project.objects.get(public_id=public_id)
    except Exception:
Exemple #2
0
            user = User.objects.get(email=email)
        except User.DoesNotExist:
            user = User.objects.create_user(username=email.split('@')[0],
                                            email=email)
        users.append(user)
    return users


def add_users_to_group(group, users):
    for u in users:
        group.user_set.add(u)
    group.save()


@click.command()
@click.argument('user_file', type=click.File('r'))
@click.option('--group',
              'groupname',
              type=click.STRING,
              help='Name of group to which all users should be added')
@click.option('--dryrun',
              default=False,
              is_flag=True,
              help='If set, no changes will be made to the database')
def command(user_file, groupname, dryrun):
    '''
    Bulk creates users from email addresses in the the specified text file,
    which should contain one email address per line.
    If the optional "--group <GROUPNAME>" argument is specified, then all the
    users (either found or created) are added to the matching group.
    '''
import csv
from pprint import pprint

import djclick as click
from appliances.models import Appliance, ProductLine


@click.command()
@click.argument("csv_file", type=click.File("r"))
def command(csv_file):
    csv_reader = csv.reader(csv_file)

    appliances = [
        Appliance(
            product_line=ProductLine.objects.get_or_create(
                name=product_line)[0],
            serial_number=serial_number,
            model_number=model_number,
        ) for product_line, model_number, serial_number in csv_reader
    ]

    print("Here are the appliances extracted:")
    pprint(appliances)

    if click.confirm("Proceed?"):
        Appliance.objects.bulk_create(appliances)
import djclick as click
from djclick.params import ModelInstance

from ...models import BackingIndex, Platform
from ...depgraph import DependencyGraph, GraphFormatter


@click.command()
@click.argument('index',
                nargs=-1,
                type=ModelInstance(BackingIndex, lookup='slug'))
@click.argument('platform', type=ModelInstance(Platform, lookup='slug'))
@click.argument('requirements_in', type=click.File('r'))
@click.argument('requirements_txt', type=click.File('w'))
def command(index, platform, requirements_in, requirements_txt):
    graph = DependencyGraph(index, platform)
    graph.compile(requirements_in.read())

    formatter = GraphFormatter()
    formatter.write(requirements_txt, graph)
Exemple #5
0
from typing import TextIO

from django.core.files import File
import djclick as click

from optimal_transport_morphometry.core.batch_parser import load_batch_from_csv
from optimal_transport_morphometry.core.models import Atlas, Dataset, UploadBatch


@click.command()
@click.argument('csv', type=click.File())
@click.option('--dataset-name', default='Test dataset')
@click.option('--clear', is_flag=True)
def command(csv: TextIO, dataset_name: str, clear: bool) -> None:
    for name in ['grey', 'white', 'csf', 'T1']:
        if not Atlas.objects.filter(name=f'{name}.nii').exists():
            print(f'Uploading {name} atlas')
            with open(f'sample_data/atlases/{name}.nii', 'rb') as fd:
                Atlas.objects.create(name=f'{name}.nii',
                                     blob=File(fd, name=f'{name}.nii'))

    if clear:
        print('Deleting all upload batches')
        UploadBatch.objects.all().delete()

    # Add dataset and pending upload batch
    dataset, _ = Dataset.objects.get_or_create(name=dataset_name)
    load_batch_from_csv(csv, dest=dataset)
Exemple #6
0
from django.contrib.sites.models import Site
from cacheops import invalidate_obj, invalidate_model
from alibrary.models import Media, Playlist


@click.group()
def cli():
    """Maintenance CLI"""
    pass


@cli.command()
@click.option('--limit-range',
              type=str,
              help='limit range/offset. e.g. "170:190"')
@click.option('--dump-to', type=click.File('ab'))
@click.option('--load-from', type=click.File('rb'))
@click.option('--tolerance', type=float, default=0.5)
@click.option('--log-file', type=click.File('wb'))
def repair_durations(limit_range, dump_to, load_from, tolerance, log_file):
    """
    Repair/reprocess master durations.
    """
    from base.audio.fileinfo import FileInfoProcessor

    items_to_reprocess = []
    affected_playlists = []
    affected_playlist_ids = []

    # invalidate cache for Media
    invalidate_model(Media)