def load_samples(url: str, dir: str, local: bool = False, ids: Optional[List[str]] = None): out_dir = Path(dir) os.makedirs(out_dir, exist_ok=True) def load_samples(set_data: Callable[[Any], Any]): files = [out_dir / f'{id}.json' for id in ids] if ids else out_dir.glob('*.json') for fname in files: with open(fname, 'r') as f: data = json.load(f) click.echo(f'importing {fname}') set_data(data) click.echo(f'imported {fname}') if local: with create_app().app_context(): load_samples(lambda data: samples.upload(data)) db.session.commit() else: click.echo(f'pushing to: {url}{api_prefix}') load_samples(lambda data: requests.post(f'{url}{api_prefix}', json=data).raise_for_status())
def dump_samples(url: str, dir: str, local: bool = False, ids: Optional[List[str]] = None): out_dir = PurePath(dir) os.makedirs(out_dir, exist_ok=True) def dump_samples(groups: List[Any], get_data: Callable[[str], Any]): for group in groups: for sample in group['files']: fid = sample['id'] fname = sample['name'] data = get_data(fid) with open(out_dir / f'{fid}.json', 'w') as f: json.dump(data, f, indent=2) click.echo(f'dumped {fid} {fname}') arg_ids = [dict(files=[dict(id=id, name=id) for id in ids])] if ids else [] if local: with create_app().app_context(): groups = samples.list_samples() if not ids else arg_ids dump_samples(groups, lambda fid: samples.dump(CSVFile.query.get(fid))) else: click.echo(f'requesting: {url}{api_prefix}') groups = requests.get( f'{url}{api_prefix}').json() if not ids else arg_ids dump_samples( groups, lambda fid: requests.get(f'{url}{api_prefix}/{fid}').json())
def unmark_sample(url: str, ids: List[str], local: bool = False): if local: with create_app().app_context(): for id in ids: csv = samples.disable_sample(CSVFile.query.get(id)) db.session.add(csv) db.session.commit() else: click.echo(f'using: {url}{api_prefix}') for id in ids: requests.delete(f'{url}{api_prefix}/{id}').raise_for_status()
def app(): with TemporaryDirectory() as upload_folder: app = create_app({ 'ENV': 'testing', 'SQLALCHEMY_DATABASE_URI': f'sqlite://', 'UPLOAD_FOLDER': upload_folder, 'OPENCPU_API_ROOT': 'http://localhost:8004/ocpu/library' }) with app.app_context(): db.create_all() yield app
def change_group(url: str, group: str, description: Optional[str], local: bool = False, order: Optional[int] = None): if local: with create_app().app_context(): samples.change_group(group, description, order) db.session.commit() else: click.echo(f'using: {url}{group_api_prefix}') data = dict(description=description, order=order) requests.patch(f'{url}{group_api_prefix}/{group}', json=data).raise_for_status()
def mark_sample(url: str, group: str, description: Optional[str], ids: List[str], local: bool = False, order: Optional[int] = None): if local: with create_app().app_context(): for id in ids: csv = samples.enable_sample(CSVFile.query.get(id), group, description, order) db.session.add(csv) db.session.commit() else: click.echo(f'using: {url}{api_prefix}') for id in ids: data = dict(group=group, description=description, order=order) requests.put(f'{url}{api_prefix}/{id}', json=data).raise_for_status()
def create_tables(): app = create_app() with app.app_context(): db.create_all() flask_migrate.stamp()
from logging.config import dictConfig from viime.app import create_app dictConfig({ 'version': 1, 'formatters': {'default': { 'format': '[%(asctime)s] %(levelname)s in %(module)s: %(message)s', }}, 'handlers': {'wsgi': { 'class': 'logging.StreamHandler', 'stream': 'ext://flask.logging.wsgi_errors_stream', 'formatter': 'default' }}, 'root': { 'level': 'INFO', 'handlers': ['wsgi'] } }) app = create_app()