def get_team(team): """ We're asking user for names and address of the rest of the team, and append that to a list we got from get_main_organizer """ add_team = click.confirm(click.style( "Do you want to add additional team members?", bold=True, fg='yellow'), default=False) i = 1 while add_team: i += 1 name = click.prompt(click.style( "First and last name of #{0} member".format(i), bold=True, fg='yellow')) email = click.prompt(click.style( "E-mail address of #{0} member".format(i), bold=True, fg='yellow')) if len(name) > 0: try: team.append({'first_name': name.split(' ')[ 0], 'last_name': name.split(' ')[1], 'email': email}) except IndexError: team.append( {'first_name': name, 'last_name': '', 'email': email}) click.echo("All right, the #{0} team member of Django Girls is {1} ({2})".format( i, name, email)) add_team = click.confirm(click.style( "Do you want to add additional team members?", bold=True, fg='yellow'), default=False) return team
def get_team(team): """ We're asking user for names and address of the rest of the team, and append that to a list we got from get_main_organizer """ add_team = click.confirm(click.style( "Do you want to add additional team members?", bold=True, fg='yellow'), default=False) i = 1 while add_team: i += 1 name = click.prompt( click.style("First and last name of #{0} member".format(i), bold=True, fg='yellow')) email = click.prompt( click.style("E-mail address of #{0} member".format(i), bold=True, fg='yellow')) if len(name) > 0: team.append({'name': name, 'email': email}) click.echo( "All right, the #{0} team member of Django Girls is {1} ({2})". format(i, name, email)) add_team = click.confirm(click.style( "Do you want to add additional team members?", bold=True, fg='yellow'), default=False) return team
def start(path, limit, username, collection): """Start an import session""" click.secho( '--------------------------------------------------------------------', bold=True) click.echo('Username:\t {}'.format(username)) click.echo('Collection:\t {}'.format(collection)) click.echo('Limit:\t\t {}'.format(limit)) click.echo('Path:\t\t {}'.format(path)) click.secho( '--------------------------------------------------------------------', bold=True) click.echo('') if not os.path.isdir(path): click.secho('Directory does not exist: {}'.format(path), bold=True, fg='red') return if not path.endswith('/'): path += '/' if not get_user_model().objects.filter(username=username).exists(): click.secho('User does not exist: {}'.format(username), bold=True, fg='red') return if Massimport.objects.filter(directory=path).exists(): click.secho('Import session already exists: {}'.format(path), bold=True, fg='red') return massimport = Massimport( directory=path, user=get_user_model().objects.get(username=username), collection_name=collection) massimport.save() if click.confirm('Continue with scanning directories?', default='Y'): massimport.scan() if click.confirm('Continue with enqueuing files?'): for item in massimport.files.filter(status=0)[0:limit]: item.enqueue()
def create_service_user(email, password, token): click.secho('Create service user: {}'.format(email), fg='green') try: user = User.objects.get(email=email) click.secho('User already exists: {}'.format(user), fg='yellow') click.secho('Current auth token: {}'.format(user.auth_token), fg='white') if click.confirm('Do you want to delete this user account?'): user.delete() else: click.secho('Exiting.', fg='yellow') sys.exit() except User.DoesNotExist: pass user = User.objects.create_user(email=email, password=password) click.secho('Created user: {}'.format(user), fg='green') if token: Token.objects.filter(user=user).update(key=token) auth_token = Token.objects.get(user=user) click.secho('Current auth token: {}'.format(auth_token), fg='green')
def gather_information(): click.echo("Hello there sunshine! We're gonna copy an event website now.") event = get_event( click.prompt( click.style( "First, give me the latest ID of the Event " "object you want to copy", bold=True, fg='yellow'))) while not event: event = get_event(click.prompt("Wrong ID! Try again")) click.echo("Ok, we're copying {}, {}".format(event.city, event.country)) number = click.prompt( click.style( "What is the number of the event in this city? " "If this is a second event, write 2. If third, then 3. You got it", bold=True, fg='yellow')) date = gather_event_date_from_prompt() click.echo("The current team is: " + ", ".join(str(organizer) for organizer in event.team.all())) new_team = click.confirm(click.style( "Do you need to change the whole team?", bold=True, fg='yellow'), default=False) return (event, number, date, new_team)
def command(reset): """ Create and initialize Simpl Calc game. Create a "default" Simpl Calc run. Set the run phase to "Play". Add 1 leader ("leader") to the run Add 2 players ("s1", "s2") to the run. Add a scenario and period 1 for each player. """ # Handle resetting the game if reset: if click.confirm( 'Are you sure you want to delete the default game run and recreate from scratch?' ): delete_default_run(games_client) # Create a Game game = games_client.games.get_or_create(name='Simpl Calc', slug='simpl-calc') echo('getting or creating game: ', game.name) # Create game Phases ("Play") play_phase = games_client.phases.get_or_create( game=game.id, name='Play', order=1, ) echo('getting or creating phase: ', play_phase.name) # Add run with 2 players ready to play run = add_run(game, 'default', 2, play_phase, games_client) echo('Completed setting up run: id=', run.id)
def gather_information(): click.echo("Hello there sunshine! We're gonna copy an event website now.") event = get_event( click.prompt(click.style("First, give me the latest ID of the Event " "object you want to copy", bold=True, fg='yellow')) ) while not event: event = get_event(click.prompt("Wrong ID! Try again")) click.echo("Ok, we're copying {}, {}".format( event.city, event.country)) number = click.prompt(click.style("What is the number of the event in this city? " "If this is a second event, write 2. If third, then 3. You got it", bold=True, fg='yellow') ) date = gather_event_date_from_prompt() click.echo("The current team is: " + ", ".join( str(organizer) for organizer in event.team.all())) new_team = click.confirm(click.style( "Do you need to change the whole team?", bold=True, fg='yellow'), default=False ) return (event, number, date, new_team)
def command(): """Creates new Django Girls organizer""" event_id = click.prompt( click.style( "What's the event ID? NOT the event page ID. We want EVENT ID here", bold=True, fg='yellow')) event = Event.objects.get(id=event_id) click.echo("Ok, we're adding to an event in {}, {}".format( event.city, event.country)) team = [get_organizer_data()] while click.confirm( click.style("Do you want to add additional team members?", bold=True, fg='yellow'), default=False): team.append(get_organizer_data()) click.echo("OK! That's it. Now I'll add your organizers.") members = create_users(team, event) for member in members: click.echo("User {} has been added and notified".format(member.email)) click.echo(DELIMITER) click.echo("You still need to invite people to Google Group!")
def view_files(username): # get files - CHANGE URL TO HEROKU AFTER DEVELOPMENT r = requests.post( 'http://afternoon-fortress-38321.herokuapp.com/fda_view_all_files/', {'username': username}) if r.status_code == 200: json_str = r.text list = json.loads(json_str) reports_list = list['reports_list'] click.echo('Here is a list of your reports:') for report in reports_list: click.echo(report['report_title']) if click.confirm('Do you want to display a report?'): view_report_contents(reports_list) else: click.echo('Goodbye then.') exit() elif r.status_code == 404: click.echo('You do not currently have any reports. Goodbye.') else: click.echo('Error. Please contact site manager.') exit()
def command(): """Creates new Django Girls organizer""" event_id = click.prompt(click.style( "What's the event ID? NOT the event page ID. We want EVENT ID here", bold=True, fg='yellow')) event = Event.objects.get(id=event_id) click.echo("Ok, we're adding to an event in {}, {}".format( event.city, event.country)) team = [get_organizer_data()] while click.confirm( click.style("Do you want to add additional team members?", bold=True, fg='yellow'), default=False): team.append(get_organizer_data()) click.echo("OK! That's it. Now I'll add your organizers.") members = create_users(team, event) for member in members: click.echo( "User {} has been added and notified".format(member.email)) click.echo(DELIMITER) click.echo("You still need to invite people to Google Group!")
def command(reset): """ Create and initialize Blackjack game. Create a "default" Blackjack run. Set the run phase to "Play". Add 1 player "*****@*****.**" with password "demo" Add a scenario and period 1 for each player. """ # Handle resetting the game if reset: if click.confirm( "Are you sure you want to delete the default game run and recreate from scratch?" ): delete_default_run(games_client) # Create a Game game = games_client.games.get_or_create(name="Blackjack", slug="blackjack") echo("getting or creating game: ", game.name) # Create game Phases ("Play") play_phase = games_client.phases.get_or_create(game=game.id, name="Play", order=1) echo("getting or creating phase: ", play_phase.name) run = games_client.runs.get_or_create(game=game.id, name="default") echo("getting or creating run: ", run.name) # Set run to phase run.phase = play_phase.id run.save() echo("setting run to phase: ", play_phase.name) player = games_client.users.get_or_create( password="******", first_name="Blackjack", last_name="Demo", email="*****@*****.**", ) echo("getting or creating user: "******"Scenario 1" ) click.echo( "getting or creating runuser {} scenario: {}".format(runuser.id, scenario.id) ) period = games_client.periods.get_or_create(scenario=scenario.id, order=1) click.echo( "getting or creating runuser {} period 1 for scenario: {}".format( runuser.id, scenario.id ) ) echo("Completed setting up run: id=", run.id)
def check_download(files, report_id, reports_list, files_encrypted): if click.confirm("Would you like to download a file from this report?"): file = click.prompt('Please enter the name of the file') check = False for f in files: if f == file: check = True if check is False: click.echo('No existing file with that name.') check_download(files, report_id, reports_list, files_encrypted) else: download_files(report_id, reports_list, file, files_encrypted) else: if click.confirm("Would you like to view another report's contents?"): view_report_contents(reports_list) else: click.echo('Goodbye then.') exit()
def delete_invalid_feeds(days=1, limit=100, threshold=99): sql = """ SELECT feed_id, title, link, url, status_code, count FROM ( SELECT feed_id, status_code, count(1) as count FROM rssant_api_rawfeed WHERE dt_created >= %s and (status_code < 200 or status_code >= 400) group by feed_id, status_code having count(1) > 3 order by count desc limit %s ) error_feed join rssant_api_feed on error_feed.feed_id = rssant_api_feed.id order by feed_id, status_code, count; """ sql_ok_count = """ SELECT feed_id, count(1) as count FROM rssant_api_rawfeed WHERE dt_created >= %s and (status_code >= 200 and status_code < 400) AND feed_id=ANY(%s) group by feed_id """ t_begin = timezone.now() - timezone.timedelta(days=days) error_feeds = defaultdict(dict) with connection.cursor() as cursor: cursor.execute(sql, [t_begin, limit]) for feed_id, title, link, url, status_code, count in cursor.fetchall(): error_feeds[feed_id].update(feed_id=feed_id, title=title, link=link, url=url) error = error_feeds[feed_id].setdefault('error', {}) error_name = FeedResponseStatus.name_of(status_code) error[error_name] = count error_feeds[feed_id]['error_count'] = sum(error.values()) error_feeds[feed_id].update(ok_count=0, error_percent=100) cursor.execute(sql_ok_count, [t_begin, list(error_feeds)]) for feed_id, ok_count in cursor.fetchall(): feed = error_feeds[feed_id] total = feed['error_count'] + ok_count error_percent = round((feed['error_count'] / total) * 100) feed.update(ok_count=ok_count, error_percent=error_percent) error_feeds = list( sorted(error_feeds.values(), key=lambda x: x['error_percent'], reverse=True)) delete_feed_ids = [] for feed in error_feeds: if feed['error_percent'] >= threshold: delete_feed_ids.append(feed['feed_id']) click.echo(pretty_format_json(feed)) if delete_feed_ids: confirm_delete = click.confirm(f'Delete {len(delete_feed_ids)} feeds?') if not confirm_delete: click.echo('Abort!') else: UnionFeed.bulk_delete(delete_feed_ids) click.echo('Done!') return error_feeds
def delete(id): """Delete session""" try: massimport = Massimport.objects.get(pk=id) except Massimport.DoesNotExist as e: click.secho('Massimport session with id: {} does not exist.'.format(id), bold=True, fg='red') return if click.confirm('Do you want to delete session id: {} ?'.format(id), default='Y'): massimport.delete()
def start(path, limit, username, collection): """Start an import session""" click.secho('--------------------------------------------------------------------', bold=True) click.echo('Username:\t {}'.format(username)) click.echo('Collection:\t {}'.format(collection)) click.echo('Limit:\t\t {}'.format(limit)) click.echo('Path:\t\t {}'.format(path)) click.secho('--------------------------------------------------------------------', bold=True) click.echo('') if not os.path.isdir(path): click.secho('Directory does not exist: {}'.format(path), bold=True, fg='red') return if not path.endswith('/'): path += '/' if not User.objects.filter(username=username).exists(): click.secho('User does not exist: {}'.format(username), bold=True, fg='red') return if Massimport.objects.filter(directory=path).exists(): click.secho('Import session already exists: {}'.format(path), bold=True, fg='red') return massimport = Massimport( directory=path, user=User.objects.get(username=username), collection_name=collection ) massimport.save() if click.confirm('Continue with scanning directories?', default='Y'): massimport.scan() if click.confirm('Continue with enqueuing files?'): for item in massimport.files.filter(status=0)[0:limit]: item.enqueue()
def download_files(report_id, reports_list, file_name, files_encrypted): # get report files - CHANGE URL TO HEROKU AFTER DEVELOPMENT r = requests.post( 'http://afternoon-fortress-38321.herokuapp.com/fda_get_files/', { 'report_id': report_id, 'file_name': file_name }) if files_encrypted: if click.confirm("Do you have the private key?"): key_file = click.prompt( 'Enter the path to the keyfile for this file') key = None try: with open(key_file, 'rb') as f: key = b64decode(f.read()) out_name = file_name if file_name.split('.')[-1] == 'enc': out_name = file_name[:-4] decrypt_file(file_name, key, out_name) except FileNotFoundError: click.echo('ERROR: Keyfile not found') exit() click.echo('File saved as {}'.format(out_name)) else: click.echo('You cannot download the file.') if click.confirm( "Would you like to view another report's contents?"): view_report_contents(reports_list) else: click.echo('Goodbye then.') exit() else: file = open(file_name, 'wb') file.write(r.content) file.close() click.echo('Success. Your file has been downloaded.') if click.confirm("Would you like to view another report's contents?"): view_report_contents(reports_list) else: click.echo('Goodbye then.') exit()
def migrate_published_version_metadata(dandiset: str, published_version: str, to_version: str): click.echo( f'Migrating published version {dandiset}/{published_version} metadata to version {to_version}' # noqa: E501 ) version = Version.objects.filter(~Q(version='draft')).get( dandiset=dandiset, version=published_version) metadata = version.metadata # If there is no schemaVersion, assume to_version if 'schemaVersion' not in metadata: metadata['schemaVersion'] = to_version try: metanew = migrate(metadata, to_version=to_version, skip_validation=False) except Exception as e: click.echo(f'Failed to migrate {dandiset}/{published_version}') click.echo(e) raise click.Abort() if metadata == metanew: click.echo('No changes detected') else: click.echo('Diff of changes to be saved:') click.echo(''.join( ndiff( pformat(metadata).splitlines(keepends=True), pformat(metanew).splitlines(keepends=True), ))) click.confirm('Do you want to save these changes?', abort=True) version.metadata = metanew version.save() write_manifest_files.delay(version.id)
def delete(id): """Delete session""" try: massimport = Massimport.objects.get(pk=id) except Massimport.DoesNotExist as e: click.secho( 'Massimport session with id: {} does not exist.'.format(id), bold=True, fg='red') return if click.confirm('Do you want to delete session id: {} ?'.format(id), default='Y'): massimport.delete()
def view_report_contents(reports_list): report_name = click.prompt('Which report would you like to display?') # get report ID check = False report_id = 0 for r in reports_list: if r['report_title'] == report_name: report_id = r['report_id'] check = True if check is False: click.echo( 'No existing report with that name. Please enter another report.') view_report_contents(reports_list) else: # get report contents - CHANGE URL TO HEROKU AFTER DEVELOPMENT r = requests.post( 'http://afternoon-fortress-38321.herokuapp.com/fda_view_report_contents/', {'report_id': report_id}) if r.status_code == 200: json_str = r.text r_info = json.loads(json_str) report_info = r_info['report_info'] # display report contents click.echo('Title: ' + report_info['title']) click.echo('Owner: ' + report_info['owner']) click.echo('Short Description: ' + report_info['short_desc']) click.echo('Summary: ' + report_info['long_desc']) click.echo('Shared With: ' + report_info['shared_with']) click.echo('Created: ' + report_info['timestamp']) click.echo('Files: ') for r in report_info['files']: click.echo(r) if report_info['files'] == []: if click.confirm( "Would you like to view another report's contents?"): view_report_contents(reports_list) else: click.echo('Goodbye then.') exit() else: check_download(report_info['files'], report_id, reports_list, report_info['files_encrypted']) elif r.status_code == 404: click.echo('You do not currently have any reports. Goodbye.') else: click.echo('Error. Please contact site manager.') exit()
def encrypt_file(): key = random_generator(256) file_name = click.prompt('Enter the path of the file you wish to encrypt') try: encrypt(file_name, key) except FileNotFoundError: click.echo('ERROR: File not found') encrypt_file() pk_file_name = file_name + '.pem' with open(pk_file_name, 'wb') as f: f.write(b64encode(key)) click.echo('Encrypted file saved as {}.enc'.format(file_name)) click.echo('Keyfile saved as {}'.format(pk_file_name)) if click.confirm('Would you like to encrypt another file?'): encrypt_file() exit()
def command(csv_file): csv_reader = csv.reader(csv_file) appliances = [ Appliance( product_line=ProductLine.objects.get_or_create( name=product_line)[0], serial_number=serial_number, model_number=model_number, ) for product_line, model_number, serial_number in csv_reader ] print("Here are the appliances extracted:") pprint(appliances) if click.confirm("Proceed?"): Appliance.objects.bulk_create(appliances)
def login(): click.echo('Please login.') username = click.prompt('Username', type=str) password = click.prompt('Password', type=str) # authenticate user using requests - CHANGE URL TO HEROKU AFTER DEVELOPMENT r = requests.post( 'http://afternoon-fortress-38321.herokuapp.com/fda_login/', data={ 'username': username, 'password': password }) if (r.status_code == requests.codes.ok): if click.confirm('Would you like to encrypt a file?'): encrypt_file() view_files(username) else: click.echo('Invalid username and password') login()
def command(): """Creates new Django Girls organizer""" event_id = click.prompt( click.style( "What's the event ID? NOT the event page ID. We want EVENT ID here", bold=True, fg='yellow')) event = Event.objects.get(id=event_id) click.echo("Ok, we're adding to an event in {}, {}".format( event.city, event.country)) team = [get_organizer_data()] while click.confirm( click.style("Do you want to add additional team members?", bold=True, fg='yellow'), default=False): team.append(get_organizer_data()) click.echo("OK! That's it. Now I'll add your organizers.") members, members_as_list = create_users(team) for member in members: event.team.add(member) event.save() for member in members_as_list: if 'password' in member: click.echo("{} - email: {} password {}".format( member['first_name'], member['email'], member['password'])) else: click.echo( "{} - email: {} already has account".format( member['first_name'], member['email'])) click.echo(DELIMITER) invite_team_to_slack(members) click.echo(DELIMITER) click.echo("You still need to invite people to Google Group!")
def command(reset, name): """ Create and initialize Simpl Calc game. Create a "default" Simpl Calc run. Set the run phase to "Play". Add 1 leader ("leader") to the run Add 2 players ("s1", "s2") to the run. Add a scenario and period 1 for each player. """ # Create a Game game = games_client.games.get_or_create(name='Simpl Calc', slug='simpl-calc') echo('getting or creating game: ', game.name) if reset: # Handle resetting the run lookup = {'game': game.id, 'name': name} try: run = games_client.runs.get(**lookup) except games_client.ResourceNotFound: run = None except games_client.TypeError: run = None if run is not None: if click.confirm( 'Are you sure you want to delete the "{0}" game run and ' 'recreate from scratch?'.format(name)): delete_run(run) # Create game Phases ("Play") play_phase = games_client.phases.get_or_create( game=game.id, name='Play', order=1, ) echo('getting or creating phase: ', play_phase.name) # Add run with 2 players ready to play run = add_run(game, name, 2, play_phase, games_client) echo('Completed setting up run: id=', run.id)
def command(): """Creates new Django Girls organizer""" event_id = click.prompt(click.style( "What's the event ID? NOT the event page ID. We want EVENT ID here", bold=True, fg='yellow')) event = Event.objects.get(id=event_id) click.echo("Ok, we're adding to an event in {}, {}".format( event.city, event.country)) team = [get_organizer_data()] while click.confirm( click.style("Do you want to add additional team members?", bold=True, fg='yellow'), default=False): team.append(get_organizer_data()) click.echo("OK! That's it. Now I'll add your organizers.") members, members_as_list = create_users(team) for member in members: event.team.add(member) event.save() for member in members_as_list: if 'password' in member: click.echo("{} - email: {} password {}".format( member['first_name'], member['email'], member['password'])) else: click.echo( "{} - email: {} already has account".format( member['first_name'], member['email'])) click.echo(DELIMITER) invite_team_to_slack(members) click.echo(DELIMITER) click.echo("You still need to invite people to Google Group!")
def command(): click.echo("=== Setup Simpl API ===") click.echo("") # Deal with super users superusers = find_super_users() if not superusers: click.echo("We found no superusers, please set one up below") create_super_user() else: click.echo( "We found the following superusers that were already setup:") for u in superusers: click.echo(f" - {u}") click.echo("") click.echo( "If you would like to setup another super user for yourself run `./manage.py createsuperuser`" ) click.echo("") # Deal with staff users staffusers = find_staff_users() if not staffusers: click.echo( "We found no staff users, you will need one of these for your model service to connect to the Simpl API" ) click.echo( "This is typically setup as a setting `SIMPL_GAMES_AUTH` from the environment variables `SIMPL_USER` and `SIMPL_PASS`" ) click.echo("") create_staff_user() else: click.echo( "We found the following staff users that are already setup:") for u in staffusers: click.echo(f" - {u}") click.echo("") if click.confirm("Would you like to setup another staff user?"): create_staff_user()
def collect_garbage(assets: bool, assetblobs: bool, uploads: bool, s3blobs: bool): """Manually run garbage collection on the database.""" # Log how many things there are before deleting them doing_deletes = assets or assetblobs or uploads or s3blobs if doing_deletes: echo_report() if assetblobs: raise click.NoSuchOption('Deleting AssetBlobs is not yet implemented') if uploads: raise click.NoSuchOption('Deleting Uploads is not yet implemented') if s3blobs: raise click.NoSuchOption('Deleting S3 Blobs is not yet implemented') if assets: assets_to_delete = stale_assets() if click.confirm( f'This will delete {assets_to_delete.count()} assets. Are you sure?' ): assets_to_delete.delete() # Log how many things there are, either after deletion # or if the user forgot to specify anything to delete echo_report()
def import_from_file(osm_data, resume: bool, rerun: bool): Settings = IngestSettings( max_distance=Distance(km=50), max_segments=300, max_concurrent=40, quality_settings=DefaultQualitySettings, location_filter=None, ) digest = sha256_digest(osm_data) print('Digest: ', digest) previous_import = e.Import.objects.filter( complete=False, sha256_sum=digest).order_by('-updated_at').first() if not resume or previous_import is None: if e.Import.objects.filter(sha256_sum=digest, complete=True): print('Import already done!') if rerun: e.Import.objects.filter(sha256_sum=digest, complete=True).delete() else: return #e.Import.objects.all().update(active=False) import_obj = e.Import(active=True, complete=False, border=Polygon(), name=str(osm_data), sha256_sum=digest) import_obj.save() digests = set() else: import_obj = previous_import if not click.confirm( f'Resuming import {import_obj.name}, last modified {import_obj.updated_at} currently containing {import_obj.networks.count()} trail networks' ): return 1 # TODO: probably n queries digests = {n.digest for n in import_obj.networks.all()} print(f'{len(digests)} loaded') loader = OSMIngestor(Settings) loader.load_osm(osm_data, extra_links=[(885729040, 827103027)]) networks = [] for network in tqdm(loader.trail_networks(already_processed=digests)): try: multiline_strs = MultiLineString([ LineString(trail.points()) for trail in network.trail_segments() ]) border = multiline_strs.convex_hull simplified = multiline_strs # .simplify(tolerance=0.01) if isinstance(simplified, LineString): simplified = MultiLineString([simplified]) # TODO: look for polygons that intersect this one trailheads = MultiPoint( [t.node.to_point() for t in network.trailheads]) est_network = e.TrailNetwork(name=network.name or '', source=import_obj, trails=simplified, poly=border, total_length=network.total_length(), graph=pickle.dumps(network.graph), area=border.area, trailheads=trailheads, digest=network.digest) est_network.save() networks.append(est_network) except Exception as ex: import pdb pdb.set_trace() print(ex) import_obj.complete = True if networks: import_border = MultiPolygon([n.poly for n in networks]) import_obj.border = import_border.convex_hull import_obj.save()
def repair_durations(limit_range, dump_to, load_from, tolerance, log_file): """ Repair/reprocess master durations. """ from base.audio.fileinfo import FileInfoProcessor items_to_reprocess = [] affected_playlists = [] affected_playlist_ids = [] # invalidate cache for Media invalidate_model(Media) if load_from: if limit_range: raise NotImplementedError( '--limit-range option not allowed in combination with --load-from' ) # using `set` to remove duplicate ids item_ids = set([ int(l.strip().split(',')[0]) for l in load_from.readlines() if float(l.strip().split(',')[1]) > tolerance ]) click.echo('loaded {} ids from dump file'.format(len(item_ids))) items_to_reprocess = Media.objects.filter(pk__in=item_ids) else: # mysql does not support remote/streaming cursors # to save memory items are loaded from db individually values = Media.objects.order_by('pk').values('id').nocache() if limit_range: _limits = limit_range.split(':') values = values[_limits[0]:_limits[1]] item_ids = [i['id'] for i in values] with click.progressbar(item_ids, show_pos=True, width=48, label='Reprocessing {} tracks'.format( len(item_ids))) as bar: for item_pk in bar: close_old_connections() item = Media.objects.get(pk=item_pk) if item.master and item.master.path: p = FileInfoProcessor(item.master.path) current_duration = item.master_duration new_duration = p.duration try: diff = abs(current_duration - new_duration) except TypeError: diff = 100.0 if diff > tolerance: items_to_reprocess.append(item) # add to csv log if diff > tolerance and dump_to: dump_to.write('{pk},{diff}\n'.format(pk=item.pk, diff=diff)) dump_to.flush() click.echo('{} tracks have differences in duration'.format( len(items_to_reprocess))) if click.confirm( 'Do you want to update/repair the durations on {} tracks?'.format( len(items_to_reprocess))): base_url = 'http://{}'.format(Site.objects.get_current().domain) tpl = u'''id: {id} - "{name}" {url} old: {current_duration} new: {new_duration} diff: {diff} ''' tpl_log = u'{ct},{pk},{type},{current_duration},{new_duration},{diff},{url}\n' # write column header if log_file: log_file.write( tpl_log.format( ct='content-type', pk='id', url='url', type='type', # current_duration='old_duration', new_duration='new_duration', diff='diff', )) # loop affected media, fix durations, get playlist appearances & print/log info for item in items_to_reprocess: p = FileInfoProcessor(item.master.path) current_duration = item.master_duration new_duration = p.duration try: diff = current_duration - new_duration except TypeError: diff = '-' click.echo( tpl.format( id=item.id, name=item.name, url=base_url + item.get_absolute_url(), # current_duration=current_duration, new_duration=new_duration, diff=diff)) if log_file: log_file.write( tpl_log.format( ct='media', pk=item.pk, url=base_url + item.get_absolute_url(), type=item.get_mediatype_display(), # current_duration=current_duration, new_duration=new_duration, diff=diff)) log_file.flush() for p in item.get_appearances(): if not p.pk in affected_playlist_ids: affected_playlist_ids.append(p.pk) # we need to store the 'current' value of the duration affected_playlists.append({ 'obj': p, 'current_duration': p.get_duration() }) # update media duration Media.objects.filter(pk=item.pk).update( master_duration=new_duration) invalidate_obj(item) # loop playlists & print/log info for item in affected_playlists: invalidate_obj(item['obj']) current_duration = float(item['current_duration']) / 1000 new_duration = float(item['obj'].get_duration()) / 1000 try: diff = current_duration - new_duration except TypeError: diff = '-' click.echo( tpl.format( id=item['obj'].id, name=item['obj'].name, url=base_url + item['obj'].get_absolute_url(), # current_duration=current_duration, new_duration=new_duration, diff=diff)) if log_file: log_file.write( tpl_log.format( ct='playlist', pk=item['obj'].pk, url=base_url + item['obj'].get_absolute_url(), type=item['obj'].get_type_display(), # current_duration=current_duration, new_duration=new_duration, diff=diff)) log_file.flush() # update playlist duration Playlist.objects.filter(pk=item['obj'].pk).update( duration=new_duration * 1000) invalidate_obj(item)
def repair_durations(limit_range, dump_to, load_from, tolerance, log_file): """ Repair/reprocess master durations. """ from base.audio.fileinfo import FileInfoProcessor items_to_reprocess = [] affected_playlists = [] affected_playlist_ids = [] # invalidate cache for Media invalidate_model(Media) if load_from: if limit_range: raise NotImplementedError('--limit-range option not allowed in combination with --load-from') # using `set` to remove duplicate ids item_ids = set([ int(l.strip().split(',')[0]) for l in load_from.readlines() if float(l.strip().split(',')[1]) > tolerance ]) click.echo('loaded {} ids from dump file'.format(len(item_ids))) items_to_reprocess = Media.objects.filter(pk__in=item_ids) else: # mysql does not support remote/streaming cursors # to save memory items are loaded from db individually values = Media.objects.order_by('pk').values('id').nocache() if limit_range: _limits = limit_range.split(':') values = values[_limits[0]:_limits[1]] item_ids = [i['id'] for i in values] with click.progressbar(item_ids, show_pos=True, width=48, label='Reprocessing {} tracks'.format(len(item_ids))) as bar: for item_pk in bar: close_old_connections() item = Media.objects.get(pk=item_pk) if item.master and item.master.path: p = FileInfoProcessor(item.master.path) current_duration = item.master_duration new_duration = p.duration try: diff = abs(current_duration - new_duration) except TypeError: diff = 100.0 if diff > tolerance: items_to_reprocess.append(item) # add to csv log if diff > tolerance and dump_to: dump_to.write('{pk},{diff}\n'.format(pk=item.pk, diff=diff)) dump_to.flush() click.echo('{} tracks have differences in duration'.format(len(items_to_reprocess))) if click.confirm('Do you want to update/repair the durations on {} tracks?'.format(len(items_to_reprocess))): base_url = 'http://{}'.format(Site.objects.get_current().domain) tpl = u'''id: {id} - "{name}" {url} old: {current_duration} new: {new_duration} diff: {diff} ''' tpl_log = u'{ct},{pk},{type},{current_duration},{new_duration},{diff},{url}\n' # write column header if log_file: log_file.write(tpl_log.format( ct='content-type', pk='id', url='url', type='type', # current_duration='old_duration', new_duration='new_duration', diff='diff', )) # loop affected media, fix durations, get playlist appearances & print/log info for item in items_to_reprocess: p = FileInfoProcessor(item.master.path) current_duration = item.master_duration new_duration = p.duration try: diff = current_duration - new_duration except TypeError: diff = '-' click.echo(tpl.format( id=item.id, name=item.name, url=base_url + item.get_absolute_url(), # current_duration=current_duration, new_duration=new_duration, diff=diff )) if log_file: log_file.write(tpl_log.format( ct='media', pk=item.pk, url=base_url + item.get_absolute_url(), type=item.get_mediatype_display(), # current_duration=current_duration, new_duration=new_duration, diff=diff )) log_file.flush() for p in item.get_appearances(): if not p.pk in affected_playlist_ids: affected_playlist_ids.append(p.pk) # we need to store the 'current' value of the duration affected_playlists.append({ 'obj': p, 'current_duration': p.get_duration() }) # update media duration Media.objects.filter(pk=item.pk).update(master_duration=new_duration) invalidate_obj(item) # loop playlists & print/log info for item in affected_playlists: invalidate_obj(item['obj']) current_duration = float(item['current_duration']) / 1000 new_duration = float(item['obj'].get_duration()) / 1000 try: diff = current_duration - new_duration except TypeError: diff = '-' click.echo(tpl.format( id=item['obj'].id, name=item['obj'].name, url=base_url + item['obj'].get_absolute_url(), # current_duration=current_duration, new_duration=new_duration, diff=diff )) if log_file: log_file.write(tpl_log.format( ct='playlist', pk=item['obj'].pk, url=base_url + item['obj'].get_absolute_url(), type=item['obj'].get_type_display(), # current_duration=current_duration, new_duration=new_duration, diff=diff )) log_file.flush() # update playlist duration Playlist.objects.filter(pk=item['obj'].pk).update(duration=new_duration * 1000) invalidate_obj(item)