def command(): """Render all commands spec. Render all commands with their spec, examples, schemas etc. so far created for a current version of the service. """ config = Config() commands_dir_path = os.path.join(Config.get_lily_path(), 'commands') if not os.path.exists(commands_dir_path): os.mkdir(commands_dir_path) version = config.next_version or config.version commands_path = os.path.join(commands_dir_path, f'{version}.json') with open(commands_path, 'w') as f: commands = CommandsRenderer().render() enums = commands.pop('enums') commands = { name: CommandSerializer(conf).data for name, conf in commands.items() } f.write( json.dumps({ **commands, 'enums': enums, }, indent=4, sort_keys=False)) click.secho(f'Commands rendered for to file {commands_path}', fg='green')
async def command(scenario_id, decision, role_name): """ Add decision to current scenario period, step the model and save result """ if scenario_id is None: click.secho("ERROR: scenario_id must specified", fg='red') return async with games_client as api_session: game = await api_session.games.get(slug='simpl-div') role = await api_session.roles.get( game=game.id, name=role_name, ) period = await api_session.periods.get(scenario=scenario_id) # add submitted decision to current period await save_decision(period.id, role.id, decision) result = None decisions = await api_session.decisions.filter(scenario=period.id) if len(decisions) == 2: # calculate new result result = await divide(scenario_id) click.echo("result is {}".format(result))
def crawl_releases_for_media_mbid(id): """Loop through all releases with mb_id - tries to find ids for tracks""" qs = Release.objects.filter(relations__service='musicbrainz') if id: # case if explicit id given qs = qs.filter(pk__in=[id]) else: # narrow queryset # we have to get the objects *with* relations first and then exclude them from the qs. # (other way round is too inefficient) _m_qs = Media.objects.all() _m_qs_with_mb = Media.objects.filter(relations__service='musicbrainz') _m_ids = _m_qs.exclude(pk__in=_m_qs_with_mb).values_list('id', flat=True) qs = qs.filter(media_release__pk__in=_m_ids).distinct() click.secho('Num. objects to process: {}'.format(qs.count()), fg='green') total_mb_ids_added = [] for obj in qs.nocache(): mb_ids_added = release_fetch_media_mb_ids(obj=obj) if mb_ids_added: total_mb_ids_added += mb_ids_added click.secho('Total mb ids added: {}'.format( len(total_mb_ids_added) ), fg='green')
def command(email): """Create auth token for a given account.""" account = Account.objects.get(email=email) token = AuthToken.encode(account) click.secho(f"Auth Token: '{token}'", fg='green')
def suisa_statistics(channel_id, year, month, path): """ usage: ./manage.py statistics_cli suisa_statistics -i 1 -y 2019 -m 03 If not both 'year' and 'month' are specified the last completed month will be used. """ if not (year and month): now = datetime.now() print(now.year) print(now.month) if now.month == 1: year = now.year - 1 month = 12 else: year = now.year month = now.month - 1 click.echo('generate label statistics: channel: {} - year: {} - month: {} - output: {}'.format( channel_id, year, month, path )) channel = Channel.objects.get(pk=channel_id) click.secho('{}'.format(channel)) monthly_for_channel_as_xls( channel=channel, year=year, month=month, output=path )
def command(database, filename): """ Django management command to make a backup of a PostgreSQL database. """ click.secho( "Backing up the database '{database}' on host '{host}' to file '{filename}'..." .format( database=settings.DATABASES[database]['NAME'], host=settings.DATABASES[database]['HOST'], filename=filename, )) # Make sure the backup path exists backup_path = get_backup_path() if not os.path.exists(backup_path): os.makedirs(backup_path) os.environ["PGPASSWORD"] = settings.DATABASES[database]['PASSWORD'] os.system( 'pg_dump -Fc -c -x -h {host} -U {username} --file={filename} {database}' .format( host=settings.DATABASES[database]['HOST'], username=settings.DATABASES[database]['USER'], database=settings.DATABASES[database]['NAME'], filename=filename, )) os.environ["PGPASSWORD"] = ''
def command(initial, index): if not index.last_update_serial or initial: chunk_size = 150 # Number of packages to update per task concurrency = 30 # Number of concurrent tasks # As we are syncing everything, get the current serial. index.last_update_serial = index.client.changelog_last_serial() # Get the set of all existing packages. We will discard IDs of updated # packages from it and then remove all the remaining packages. all_package_ids = set(index.package_set.values_list('id', flat=True)) # Get all the names of the packages on the selected index. click.secho('Fetching list of packages from {}...'.format(index.url), fg='yellow') all_packages = index.client.list_packages() # Import all packages metadata in different chunks and tasks. click.secho('Importing {} packages...'.format(len(all_packages)), fg='yellow') # Create a generator of (index.pk, all_packages[i:i+chunk_size]) tuples args = iterzip( itertools.repeat(index.pk), utils.iter_chunks(all_packages, chunk_size), ) # Submit each tuple in args to the workers, but limit it to at most # `concurrency` running tasks results_iterator = utils.bounded_submitter( import_packages, concurrency, args, ) with click.progressbar(length=len(all_packages), show_pos=True) as bar: for succeded, ignored, failed in results_iterator: bar.update(len(succeded) + len(ignored) + len(failed)) all_package_ids -= set(succeded.values()) if failed: click.echo('') for k, v in six.iteritems(failed): click.secho('Failed to import {} ({})'.format(k, v), fg='red') # Remove the set of not-updated (i.e. not found on the index anymore) # packages from the database. click.secho('Removing {} outdated packages...'.format( len(all_package_ids)), fg='yellow') index.package_set.filter(pk__in=all_package_ids).delete() index.save(update_fields=['last_update_serial']) # Sync everything since the last serial, also when initial == True, as # something might have changed in the meantime... events = index.client.changelog_last_serial() - index.last_update_serial if events: click.secho('Syncing remaining updates...', fg='yellow') sync_iter = index.itersync() with click.progressbar(sync_iter, length=events, show_pos=True) as bar: for event in bar: pass
def command(initial, index): if not index.last_update_serial or initial: chunk_size = 150 # Number of packages to update per task concurrency = 30 # Number of concurrent tasks # As we are syncing everything, get the current serial. index.last_update_serial = index.client.changelog_last_serial() # Get the set of all existing packages. We will discard IDs of updated # packages from it and then remove all the remaining packages. all_package_ids = set(index.package_set.values_list('id', flat=True)) # Get all the names of the packages on the selected index. click.secho('Fetching list of packages from {}...'.format(index.url), fg='yellow') all_packages = index.client.list_packages() # Import all packages metadata in different chunks and tasks. click.secho('Importing {} packages...'.format(len(all_packages)), fg='yellow') # Create a generator of (index.pk, all_packages[i:i+chunk_size]) tuples args = iterzip( itertools.repeat(index.pk), utils.iter_chunks(all_packages, chunk_size), ) # Submit each tuple in args to the workers, but limit it to at most # `concurrency` running tasks results_iterator = utils.bounded_submitter( import_packages, concurrency, args, ) with click.progressbar(length=len(all_packages), show_pos=True) as bar: for succeded, ignored, failed in results_iterator: bar.update(len(succeded) + len(ignored) + len(failed)) all_package_ids -= set(succeded.values()) if failed: click.echo('') for k, v in six.iteritems(failed): click.secho('Failed to import {} ({})'.format(k, v), fg='red') # Remove the set of not-updated (i.e. not found on the index anymore) # packages from the database. click.secho('Removing {} outdated packages...' .format(len(all_package_ids)), fg='yellow') index.package_set.filter(pk__in=all_package_ids).delete() index.save(update_fields=['last_update_serial']) # Sync everything since the last serial, also when initial == True, as # something might have changed in the meantime... events = index.client.changelog_last_serial() - index.last_update_serial if events: click.secho('Syncing remaining updates...', fg='yellow') sync_iter = index.itersync() with click.progressbar(sync_iter, length=events, show_pos=True) as bar: for event in bar: pass
def scan(id): """(Re-)scan directory""" try: massimport = Massimport.objects.get(pk=id) except Massimport.DoesNotExist as e: click.secho('Massimport session with id: {} does not exist.'.format(id), bold=True, fg='red') return massimport.scan()
def update(id): """Update/poll status""" try: massimport = Massimport.objects.get(pk=id) except Massimport.DoesNotExist as e: click.secho('Massimport session with id: {} does not exist.'.format(id), bold=True, fg='red') return massimport.update()
def wrapper(*args, **kwargs): start_time = timezone.now() result = fn(*args, **kwargs) end_time = timezone.now() timedelta = end_time - start_time click.secho(f"Done in {timedelta.total_seconds()}s") return result
def create_user(first_name, last_name, email, password, password_repeat): if password != password_repeat: return click.secho("Passwords does not match") user = User.objects.create_user( email=email, password=password, first_name=first_name, last_name=last_name ) user.save() click.secho(f'User {user.name} ({user.email}) created successfully')
def create_device(name, tracker_id): if not tracker_id or not name: return click.secho("Please provide a name and tracker id") device = Device(name=name, tracker_id=tracker_id) device.save() click.secho(f'Device {device.name} with tracker id: ' + click.style(device.tracker_id, bold=True, fg='green') + f' created successfully.\nNow you can add this device for a user with ' + f'given tracker id.')
def delete(id): """Delete session""" try: massimport = Massimport.objects.get(pk=id) except Massimport.DoesNotExist as e: click.secho('Massimport session with id: {} does not exist.'.format(id), bold=True, fg='red') return if click.confirm('Do you want to delete session id: {} ?'.format(id), default='Y'): massimport.delete()
def command(username: str): try: user = User.objects.get(username=username) except User.DoesNotExist: click.secho(f"{username} does not exist", fg="red") else: user.is_staff = True user.is_superuser = True user.save() click.secho(f"{username} has been made a superuser", fg="green")
def backup_entity(entity: str, path: pathlib.Path, format_: str) -> None: """ Backup data to file with given format """ model, resource = determine_model_and_resource(entity, method="backup") num_entries = model.objects.count() click.secho(f"Exporting {num_entries} {entity}", fg="green") dataset = resource().export() formatted = dataset.export(format_) filename = (path / entity).with_suffix("." + format_) with open(str(filename), "w") as f: f.write(formatted)
def get_most_recent_backup_directory(): """ Get the most recent backup directory """ try: paths = sorted( pathlib.Path("local/backup").iterdir(), key=lambda f: f.stat().st_mtime ) return paths[-1] except (FileNotFoundError, IndexError): click.secho("No backup to restore", fg="red") exit()
def _parse_msgpack_file(filepath): with open(filepath, "rb") as f: binary_data = f.read() try: root, lookup = msgpack.unpackb(binary_data, strict_map_key=False) except UnicodeDecodeError: click.secho(f"Could not decode {filepath}", fg="yellow") return None return _map_msgpack(root, lookup)
def read_stream_with_progress( stream: TextIOWrapper, progress_label: str, length: int = None, reader=None ): length = length or sum(1 for _ in stream) reader = reader or stream stream.seek(0) click.secho(f"Found {length} lines") with click.progressbar( reader, length=length, label=progress_label ) as progress_reader: yield progress_reader
def gather_event_date_from_prompt(): date = None while date is None: date_str = click.prompt( click.style("What is the date of the event? (Format: DD/MM/YYYY or MM/YYYY)", bold=True, fg="yellow") ) date = get_approximate_date(date_str) if date is None: click.secho("Wrong format! Try again :)", bold=True, fg="red") return date
def update(id): """Update/poll status""" try: massimport = Massimport.objects.get(pk=id) except Massimport.DoesNotExist as e: click.secho( 'Massimport session with id: {} does not exist.'.format(id), bold=True, fg='red') return massimport.update()
def invite_team_to_slack(team): """ This uses Slack API to invite organizers to our Slack channel """ for member in team: try: user_invite(member.email, member.first_name) click.secho("OK {} invited to Slack".format( member.email), fg='green') except SlackerError as e: click.secho("!! {} not invited to Slack, because {}".format( member.email, e), fg='red')
def enqueue(id, limit): """Send files to import queue""" try: massimport = Massimport.objects.get(pk=id) except Massimport.DoesNotExist as e: click.secho('Massimport session with id: {} does not exist.'.format(id), bold=True, fg='red') return qs = massimport.files.filter(status=0) click.secho('Files total: {} - limit: {}'.format(qs.count(), limit), bold=True) for item in massimport.files.filter(status=0)[0:limit]: item.enqueue()
def scan(id): """(Re-)scan directory""" try: massimport = Massimport.objects.get(pk=id) except Massimport.DoesNotExist as e: click.secho( 'Massimport session with id: {} does not exist.'.format(id), bold=True, fg='red') return massimport.scan()
def gather_event_date_from_prompt(): date = None while date is None: date_str = click.prompt( click.style( "What is the date of the event? (Format: DD/MM/YYYY or MM/YYYY)", bold=True, fg='yellow')) date = get_approximate_date(date_str) if date is None: click.secho("Wrong format! Try again :)", bold=True, fg='red') return date
def _request_check(media): """ requests preflight check for media on preflight api """ connection.close() try: _p, _c = PreflightCheck.objects.get_or_create(media=media) except Exception as e: click.secho('unable to request preflight check for media: {} - {}'.format(media.pk, e))
def command(world_id): try: world = World.objects.get(id=world_id) except World.DoesNotExist: click.secho("World not found!", fg="red") wp = world.worldpoll_set.all().order_by("time").first() if wp is None: click.secho("No world polls!", fg="red") return click.echo(WorldNotification().forum(world, wp.resources))
def crawl_musicbrainz(ct, cache_for): """ crawls for (secondary) identifiers. give content type(s) as argument(s): media, artist, release, label """ changes = [] if ct == 'artist': qs = Artist.objects.filter(relations__service='musicbrainz').distinct() _crawl_func = artist_crawl_musicbrainz if ct == 'label': qs = Label.objects.filter(relations__service='musicbrainz').distinct() _crawl_func = label_crawl_musicbrainz if ct == 'release': qs = Release.objects.filter(relations__service='musicbrainz').distinct() _crawl_func = release_crawl_musicbrainz if ct == 'media': qs = Media.objects.filter(relations__service='musicbrainz').distinct() _crawl_func = media_crawl_musicbrainz click.secho('Num. {} objects to process: {}'.format(ct, qs.count()), fg='green') for obj in qs.nocache(): cache_key = 'musicbrainz-{}-{}'.format(ct, obj.pk) if cache.get(cache_key): click.secho('object recently crawled: {}'.format(obj), bg='yellow', fg='black') else: _changes = _crawl_func(obj=obj) if _changes: changes.append(_changes) cache.set(cache_key, 1, cache_for) ################################################################### # summary display ################################################################### click.secho('#' * 72, fg='green') click.secho('Total updated objects: {}'.format( len(changes) ), fg='green') click.secho('Total updated properties: {}'.format( sum([len(c) for c in changes]) ), fg='green')
def restore_entity(entity: str, path: pathlib.Path, format_: str) -> None: """ Restore data from file in given path with given format """ model, resource = determine_model_and_resource(entity, method="restore") filename = (path / entity).with_suffix("." + format_) try: with open(str(filename), "r") as f: data = f.read() except FileNotFoundError: click.secho(f"No backup to restore for {entity}", fg="red") return import_format = determine_import_format(format_) dataset = import_format().create_dataset(data) # Try to import the data in dry_run mode result = resource().import_data(dataset, dry_run=True) if result.has_errors(): output = tablib.Dataset() output.headers = ['basket', 'user', "email", 'study', 'dataset', 'variable'] click.secho(f"Error while importing {entity} from {filename}", fg="red") for line, errors in result.row_errors(): for error in errors: click.secho( f"Error in line: {line}, {error.error}, {error.row}", fg="red" ) output.append(error.row.values()) log_file = path / 'error_log.csv' with open(str(log_file), 'w') as f: f.write(output.csv) else: # Actually write the data to the database if no errors were encountered in dry run resource().import_data(dataset, dry_run=False) click.secho( f"Succesfully imported {len(dataset)} {entity} from {filename}", fg="green" )
def command(v): config = Config() migrations_dir_path = os.path.join(config.get_lily_path(), 'migrations') migrations_path = os.path.join(migrations_dir_path, f'{v}.json') with open(migrations_path, 'r') as f: migrations_plan = json.loads(f.read())['plan'] for app_name, migration in migrations_plan: management.call_command('migrate', app_name, migration) click.secho(f'Migrations plan for version {v} applied.', fg='green')
def warm_cache(content_types): """Warm cache for given types.""" click.secho('Warming cache for: {}'.format(', '.join(content_types))) from alibrary.models import Artist if 'artist' in content_types or 'all' in content_types: artist_qs = Artist.objects.order_by('-updated').all() with click.progressbar(artist_qs, label='Warming cache for {} items'.format(artist_qs.count())) as bar: for item in bar: item.get_releases() item.get_media()
def start(path, limit, username, collection): """Start an import session""" click.secho( '--------------------------------------------------------------------', bold=True) click.echo('Username:\t {}'.format(username)) click.echo('Collection:\t {}'.format(collection)) click.echo('Limit:\t\t {}'.format(limit)) click.echo('Path:\t\t {}'.format(path)) click.secho( '--------------------------------------------------------------------', bold=True) click.echo('') if not os.path.isdir(path): click.secho('Directory does not exist: {}'.format(path), bold=True, fg='red') return if not path.endswith('/'): path += '/' if not get_user_model().objects.filter(username=username).exists(): click.secho('User does not exist: {}'.format(username), bold=True, fg='red') return if Massimport.objects.filter(directory=path).exists(): click.secho('Import session already exists: {}'.format(path), bold=True, fg='red') return massimport = Massimport( directory=path, user=get_user_model().objects.get(username=username), collection_name=collection) massimport.save() if click.confirm('Continue with scanning directories?', default='Y'): massimport.scan() if click.confirm('Continue with enqueuing files?'): for item in massimport.files.filter(status=0)[0:limit]: item.enqueue()
def delete(id): """Delete session""" try: massimport = Massimport.objects.get(pk=id) except Massimport.DoesNotExist as e: click.secho( 'Massimport session with id: {} does not exist.'.format(id), bold=True, fg='red') return if click.confirm('Do you want to delete session id: {} ?'.format(id), default='Y'): massimport.delete()
def _request_check(media): """ requests preflight check for media on preflight api """ connection.close() try: _p, _c = PreflightCheck.objects.get_or_create(media=media) except Exception as e: click.secho( 'unable to request preflight check for media: {} - {}'.format( media.pk, e))
def test_media(limit, offset): qs = Media.objects.exclude(master__isnull=True).nocache().filter( master_duration__lte=(60 * 20), fprint_ingested__isnull=False) for item in qs[offset:(offset + limit)]: click.secho(u'testing fprint: {} - {}'.format(item.uuid, item.name), fg='cyan') command = [ECHOPRINT_CODEGEN_BINARY, item.master.path] p = subprocess.Popen(command, stdout=subprocess.PIPE, close_fds=True) data = json.loads(p.stdout.read())[0] url = '{}/api/v1/fprint/identify/'.format(FPRINT_API_URL) r = requests.post(url, json=data) results = r.json() if results: top_match = results[0] uuid = top_match['uuid'] score = top_match['score'] if str(uuid) == str(item.uuid): click.secho(u'score: {}'.format(score), fg='green') else: click.secho(u'score: {}'.format(score), fg='yellow') else: click.secho(u'no results for: {}'.format(item.uuid), fg='red')
def command(): config = Config() migrations_dir_path = os.path.join(config.get_lily_path(), 'migrations') if not os.path.exists(migrations_dir_path): os.mkdir(migrations_dir_path) version = config.next_version or config.version migrations_path = os.path.join(migrations_dir_path, f'{version}.json') with open(migrations_path, 'w') as f: f.write(json.dumps(Renderer().render(), indent=4, sort_keys=False)) click.secho(f'Migrations plan rendered for to file {migrations_path}', fg='green')
def command(email, type): """Create account of a given type.""" try: Account.objects.get(email=email) except Account.DoesNotExist: Account.objects.create(email=email, type=type) else: raise click.ClickException('Account with that email already exists') click.secho( f'Successfully create an account with email: {email} and type: {type}', fg='green')
def start(path, limit, username, collection): """Start an import session""" click.secho('--------------------------------------------------------------------', bold=True) click.echo('Username:\t {}'.format(username)) click.echo('Collection:\t {}'.format(collection)) click.echo('Limit:\t\t {}'.format(limit)) click.echo('Path:\t\t {}'.format(path)) click.secho('--------------------------------------------------------------------', bold=True) click.echo('') if not os.path.isdir(path): click.secho('Directory does not exist: {}'.format(path), bold=True, fg='red') return if not path.endswith('/'): path += '/' if not User.objects.filter(username=username).exists(): click.secho('User does not exist: {}'.format(username), bold=True, fg='red') return if Massimport.objects.filter(directory=path).exists(): click.secho('Import session already exists: {}'.format(path), bold=True, fg='red') return massimport = Massimport( directory=path, user=User.objects.get(username=username), collection_name=collection ) massimport.save() if click.confirm('Continue with scanning directories?', default='Y'): massimport.scan() if click.confirm('Continue with enqueuing files?'): for item in massimport.files.filter(status=0)[0:limit]: item.enqueue()
def _ingest_fingerprint(media): """ generates and ingests fingerprint for given media object """ # close connection, needed for multiprocessing # https://stackoverflow.com/questions/8242837/django-multiprocessing-and-database-connections connection.close() client = FprintAPIClient() try: if client.ingest_for_media(obj=media): Media.objects.filter(pk=media.pk).update( fprint_ingested=timezone.now() ) except Exception as e: click.secho('unable to ingest fprint for media: {} - {}'.format(media.pk, e))
def request_checks(limit, force): """ requests preflight checks (via preflight service) """ if force: _count = PreflightCheck.objects.all().delete() click.secho('Deleted all existing preflighjt checks. ({})'.format(_count), fg='cyan') id_list = Media.objects.exclude( #master__isnull=True master='' ).nocache().filter( preflight_check__isnull=True ).values_list('id', flat=True) click.secho('{} media items to process'.format(id_list.count()), fg='cyan') for id in id_list[0:limit]: m = Media.objects.get(pk=id) _request_check(m)
def crawl_viaf_isni(cache_for): """ crawls viaf database for artist ISNI codes """ qs = Artist.objects.filter( Q(isni_code__isnull=True) | Q(isni_code=''), relations__service='viaf' ).distinct() for obj in qs.nocache(): cache_key = 'viaf-isni-{}-{}'.format('artist', obj.pk) if cache.get(cache_key): click.secho('object recently crawled: {}'.format(obj), bg='yellow', fg='black') else: url = obj.relations.filter(service='viaf').first().url + '/justlinks.json' r = requests.get(url) if r.status_code == 200: try: data = r.json() isni = data['ISNI'][0] click.secho('got ISNI {} for {}'.format(isni, obj), fg='green') type(obj).objects.filter(pk=obj.pk).update(isni_code=isni) except Exception as e: click.secho('unable to get ISNI for {}'.format(obj), fg='red') pass cache.set(cache_key, 1, cache_for)
def update_index(force): """ update fingerpint index (via fprint service) """ if force: _count = Media.objects.all().update(fprint_ingested=None) click.secho('Resetting all fingerprints. ({})'.format(_count), fg='cyan') id_list = Media.objects.exclude( master__isnull=True, master_duration__lte=20, ).filter( master_duration__lte=(60 * 20), fprint_ingested__isnull=True ).nocache().values_list('id', flat=True) click.secho('{} media items to process'.format(id_list.count()), fg='cyan') for id in id_list: m = Media.objects.get(pk=id) _ingest_fingerprint(m)
def command(): """Creates new Django Girls event""" # Basics (city, country, date, url, event_mail) = get_basic_info() # Main organizer main_organizer = get_main_organizer() # Team team = get_team(main_organizer) click.echo("OK! That's it. Now I'll create your event.") # Event and EventPage objects name = "Django Girls " + city latlng = get_coordinates_for_city(city, country) mail = event_mail + "@djangogirls.org" event = Event.objects.create( name=name, city=city, country=country, latlng=latlng, email=mail, date=date, is_on_homepage=False ) page = EventPage.objects.create(event=event, url=url, title=name) # Create users members = create_users(team, event) event.main_organizer = members[0] event.save() # Default content add_default_content(page) add_default_menu(page) click.secho("Website is ready here: http://djangogirls.org/{0}".format(url), fg="green") click.echo(DELIMITER) click.secho("Ok, now follow this:", fg="black", bg="green") click.echo("1. Find a photo of a city with CC license on Flickr. Download it.") click.echo("2. Go here: http://djangogirls.org/admin/core/event/{0}/".format(event.id)) click.echo("3. Upload a photo of city, add credits and tick 'is on homepage' checkbox. Save.") click.echo("4. Send e-mail with instructions to a team!") click.echo(DELIMITER) click.secho("This is a ready, filled out mail to sent to organizers:", fg="green") click.echo("SUBJECT: Django Girls {} setup".format(event.city)) click.echo("TO: {}, {}, [email protected]".format(", ".join([x.email for x in members]), event.email)) click.echo("BODY:") click.echo( render_to_string("emails/setup.txt", {"event": event, "email_password": "******", "settings": settings}) ) brag_on_slack_bang(city, country, members)
def test_media(limit, offset): qs = Media.objects.exclude( master__isnull=True ).nocache().filter( master_duration__lte=(60 * 20), fprint_ingested__isnull=False ) for item in qs[offset:(offset + limit)]: click.secho(u'testing fprint: {} - {}'.format(item.uuid, item.name), fg='cyan') command = [ ECHOPRINT_CODEGEN_BINARY, item.master.path ] p = subprocess.Popen(command, stdout=subprocess.PIPE, close_fds=True) data = json.loads(p.stdout.read())[0] url = '{}/api/v1/fprint/identify/'.format(FPRINT_API_URL) r = requests.post(url, json=data) results = r.json() if results: top_match = results[0] uuid = top_match['uuid'] score = top_match['score'] if str(uuid) == str(item.uuid): click.secho(u'score: {}'.format(score), fg='green') else: click.secho(u'score: {}'.format(score), fg='yellow') else: click.secho(u'no results for: {}'.format(item.uuid), fg='red')
def command(short): """Creates new Django Girls event""" # Basics (city, country, date, url, event_email) = get_basic_info() # Main organizer main_organizer = get_main_organizer() # Team team = get_team(main_organizer) click.echo("OK! That's it. Now I'll create your event.") # Event and EventPage objects name = 'Django Girls ' + city latlng = get_coordinates_for_city(city, country) email = event_email + '@djangogirls.org' form = EventForm({ 'city': city, 'country': country, 'date': date, 'email': email, 'latlng': latlng, 'name': name, 'page_title': name, 'page_url': url}) if not form.is_valid(): click.secho( "OOPS! Something went wrong!", fg='red') for field, errors in form.errors.items(): for error in errors: click.secho( " {field:10} {error}".format(error=error, field=field), fg='red') return event = form.save() # Create users members = create_users(team, event) event.main_organizer = members[0] # Add random cover picture event.set_random_cover() event.save() click.secho( "Website is ready here: http://djangogirls.org/{0}".format(url), fg='green') click.echo(DELIMITER) click.secho("Ok, now follow this:", fg='black', bg='green') click.echo("1. Create an email account for the event.") click.echo("2. Send e-mail with instructions to a team!") click.echo(DELIMITER) click.secho( "This is a ready, filled out mail to sent to organizers:", fg='green') click.echo("SUBJECT: Django Girls {} setup".format(event.city)) click.echo("TO: {}, {}, [email protected]".format( ', '.join([x.email for x in members]), event.email )) click.echo("BODY:") if short: click.echo(render_to_string('emails/setup-short.txt', { 'event': event, })) else: click.echo(render_to_string('emails/setup.txt', { 'event': event, 'email_password': '******', 'settings': settings })) brag_on_slack_bang(city, country, members)
def command(): click.secho('stdout', fg='blue', nl=False) click.secho('stderr', bg='red', err=True, nl=False)
def crawl_artwork(ct, id, cache_for): """ crawls for artwork. give content type(s) as argument(s): artist, release, label """ images_added = [] if ct == 'artist': services = [ 'wikidata', 'discogs', ] qs = Artist.objects.filter( Q(main_image__isnull=True) | Q(main_image=''), relations__service__in=services ).distinct() if ct == 'release': services = [ 'musicbrainz', 'wikidata', 'discogs', 'wikipedia', ] qs = Release.objects.filter( Q(main_image__isnull=True) | Q(main_image=''), relations__service__in=services ).distinct() if ct == 'label': services = [ 'wikidata', 'discogs', ] qs = Label.objects.filter( Q(main_image__isnull=True) | Q(main_image=''), relations__service__in=services ).distinct() click.secho('Num. {} objects to process: {}'.format(ct, qs.count()), fg='green') for obj in qs.nocache(): cache_key = 'artwork-{}-{}'.format(ct, obj.pk) if cache.get(cache_key): click.secho('object recently crawled: {}'.format(obj), bg='yellow', fg='black') else: image = obj_crawl_artwork(obj=obj, services=services, save=True) if image: images_added.append(image) cache.set(cache_key, 1, cache_for) click.secho('#' * 72, fg='green') click.secho('Total images added: {}'.format( len(images_added) ), fg='green')
def echo(text, fg="green", **kwargs): click.secho(render(text, **kwargs), fg=fg)
def echo_by(template, fg="green", **kwargs): click.secho(render_by(template, **kwargs), fg=fg)
def command(path, cycle): click.secho('Importing {}'.format(path), fg='red') report = load_report(cycle, path) save_report(report) calculate_scores_for_checks_in_cycle(report)
def status(id, details): """Show (current) import session(s) info""" if not id: massimports = Massimport.objects.order_by('status').all() tpl = '''{id}\t{status}\t{num_files}\t{username}\t{directory}''' click.secho('--------------------------------------------------------------------', bold=True) click.secho('ID\tstatus\tfiles\tuser\tdirectory\t', bold=True) click.secho('--------------------------------------------------------------------', bold=True) for item in massimports: click.echo(tpl.format( id=item.pk, status=item.get_status_display(), username=item.user, num_files=item.files.count(), directory=item.directory, )) click.echo('') else: try: massimport = Massimport.objects.get(pk=id) except Massimport.DoesNotExist as e: click.secho('Massimport session with id: {} does not exist.'.format(id), bold=True, fg='red') return massimport.update() if not details: tpl = '''{}: \t{}''' click.secho('--------------------------------------------------------------------', bold=True) click.secho('Status ({})\tcount'.format(id), bold=True) click.secho('--------------------------------------------------------------------', bold=True) for status in MassimportFile.STATUS_CHOICES: count = massimport.files.filter(status=status[0]).count() if count: click.echo(tpl.format( status[1], count )) click.secho('--------------------------------------------------------------------', bold=True) click.secho(('Total: \t{}'.format(massimport.files.all().count())), bold=True) click.echo('') return if details: from importer.models import ImportFile status_id = getattr(ImportFile, 'STATUS_{}'.format(details.upper()), 0) qs = massimport.files.filter(status=status_id) tpl = '''{}: \t{}''' click.secho('--------------------------------------------------------------------', bold=True) click.secho('{} ({})\tcount'.format(details, id), bold=True) click.secho('--------------------------------------------------------------------', bold=True) for item in qs: click.echo(tpl.format(item, item.import_file.media)) click.secho('--------------------------------------------------------------------', bold=True) click.secho(('Total: \t{}'.format(qs.count())), bold=True) click.echo('')
def echo(msg: str, v_level: int, **kwargs: Any) -> None: if verbosity < v_level: return click.secho(msg, **kwargs)