def status_offload(request): if not uwsgi or not settings.STATUS_OFFLOAD_SOCKET: return JsonResponse( { "error": "Status unavailable in this environment.", }, status=404, ) if not request.user.is_authenticated(): return JsonResponse({ "error": "Unauthenticated", }, status=401) redis = get_lock_redis() taskstore = TaskStore.get_for_user(request.user) pickle_id = str(uuid.uuid4()) pickled_data = pickle.dumps({ "taskstore": taskstore, "username": request.user.username }) redis.set(f"pickle_{pickle_id}", pickled_data, ex=60) hostname, port = settings.STATUS_OFFLOAD_SOCKET.split(":") offload_ip = socket.gethostbyname(hostname) uwsgi.add_var("PICKLE_ID", str(pickle_id)) uwsgi.add_var("OFFLOAD_TO_SSE", "y") uwsgi.add_var("OFFLOAD_SERVER", ":".join([offload_ip, port])) return HttpResponse()
def handle(self, *args, **options): subcommand = options['subcommand'][0] username = options['username'] minutes = options['minutes'] if subcommand == 'lock': store = TaskStore.objects.get(user__username=username) store.set_lock_state(lock=True, seconds=minutes * 60) print('{} locked'.format(store)) elif subcommand == 'unlock': store = TaskStore.objects.get(user__username=username) store.set_lock_state(lock=False) print('{} unlocked'.format(store)) elif subcommand == 'search': users = User.objects.filter( Q(email__contains=username) | Q(username__contains=username) | Q(first_name__contains=username) | Q(last_name__contains=username)) for user in users: print(user.username) elif subcommand == 'list': redis = get_lock_redis() for key in redis.keys('*.lock'): value = datetime.datetime.fromtimestamp( int(float(redis.get(key)))) if value > datetime.datetime.utcnow(): print('{}: {}'.format(key, value))
def __init__(self, env, start_response): start_response("200 OK", self.HEADERS) self.last_heartbeat = None self.env = env self.response = env self.signer = Signer() self.initialized = False self.queue = Queue() client = get_lock_redis() pickled_data = pickle.loads(client.get(f"pickle_{env['PICKLE_ID']}")) self.store = pickled_data["taskstore"] self.username = pickled_data["username"] try: logger.info( "Starting event stream for TaskStore %s for user %s", self.store.pk, self.username, ) query = urlparse.parse_qs( urlparse.urlparse(wsgiref_utils.request_uri(env)).query) try: self.head = query["head"][0] except (KeyError, IndexError): self.head = self.store.repository.head().decode("utf-8") # Subscribe to the event stream self.subscription = get_announcements_subscription( self.store, self.username, [ "local_sync.{username}", "changed_task.{username}", "log_message.{username}", "personal.{username}", settings.ANNOUNCEMENTS_CHANNEL, ], ) # Kick-off a sync just to be sure kwargs = { "asynchronous": True, "function": ("views.Status.iterator") } self.store.sync(msg="Iterator initialization", **kwargs) # Let the client know the head has changed if they've asked # for a different head than the one we're on: if self.head != self.store.repository.head().decode("utf-8"): for task_id in self.store.get_changed_task_ids(self.head): self.add_message("task_changed", task_id) self.initialized = True except Exception as e: logger.exception("Error starting event stream: %s", str(e))
def get_announcements_subscription(store, username, channels): client = get_lock_redis() subscription = client.pubsub(ignore_subscribe_messages=True) final_channels = [] for channel in channels: final_channels.append(channel.format(username=username.encode("utf8"))) subscription.subscribe(*channels) return subscription
def status_offload(request): if not uwsgi: return JsonResponse( { 'error': 'Status unavailable in this environment.', }, status=404, ) if not request.user.is_authenticated(): return JsonResponse( { 'error': 'Unauthenticated', }, status=401 ) redis = get_lock_redis() taskstore = TaskStore.get_for_user(request.user) pickle_id = str(uuid.uuid4()) pickled_data = pickle.dumps({ 'taskstore': taskstore, 'username': request.user.username }) redis.set( 'pickle_{}'.format(pickle_id), pickled_data, ex=60 ) uwsgi.add_var("PICKLE_ID", str(pickle_id)) uwsgi.add_var("OFFLOAD_TO_SSE", "y") uwsgi.add_var("OFFLOAD_SERVER", "/tmp/inthe_am_status.sock") return HttpResponse()
def get_redis_connection(self): if not hasattr(self, '_redis'): self._redis = get_lock_redis() self._subscription = None return self._redis
def handle(self, *args, **options): subcommand = options['subcommand'][0] username = options['username'] minutes = options['minutes'] repack_size = options['repack_size'] squash_size = options['squash_size'] min_use_recency_days = options['min_use_recency_days'] if subcommand == 'lock': store = TaskStore.objects.get(user__username=username) store.set_lock_state(lock=True, seconds=minutes * 60) print('{} locked'.format(store)) elif subcommand == 'unlock': store = TaskStore.objects.get(user__username=username) store.set_lock_state(lock=False) print('{} unlocked'.format(store)) elif subcommand == 'search': users = User.objects.filter( Q(email__contains=username) | Q(username__contains=username) | Q(first_name__contains=username) | Q(last_name__contains=username)) for user in users: print(user.username) elif subcommand == 'list': redis = get_lock_redis() for key in redis.keys('*.lock'): value = datetime.datetime.fromtimestamp( int(float(redis.get(key)))) if value > datetime.datetime.utcnow(): print('{}: {}'.format(key, value)) elif subcommand == 'update_statistics': run_id = 'update_statistics_{date}'.format( date=datetime.datetime.now().strftime('%Y%m%dT%H%M%SZ')) print("Run ID: {}".format(run_id)) with progressbar.ProgressBar(max_value=TaskStore.objects.count(), widgets=[ ' [', progressbar.Timer(), '] ', progressbar.Bar(), ' (', progressbar.ETA(), ') ', ]) as bar: for idx, store in enumerate( TaskStore.objects.order_by('-last_synced')): TaskStoreStatistic.objects.create( store=store, measure=TaskStoreStatistic.MEASURE_SIZE, value=store.get_repository_size(), run_id=run_id, ) bar.update(idx) elif subcommand == 'gc_large_repos': for store in TaskStore.objects.order_by('-last_synced'): try: last_size_measurement = store.statistics.filter( measure=TaskStoreStatistic.MEASURE_SIZE).latest( 'created') except TaskStoreStatistic.DoesNotExist: continue if last_size_measurement.value > squash_size: print("> Squashing {store}...".format(store=store)) try: store.squash() store.gc() final_size = store.get_repository_size() print(">> {diff} MB recovered".format( diff=int((last_size_measurement.value - final_size) / 1e6))) except Exception as e: print("> FAILED: %s" % e) traceback.print_exc() elif last_size_measurement.value > repack_size: print("> Repacking {store}...".format(store=store)) try: store.gc() final_size = store.get_repository_size() print(">> {diff} MB recovered".format( diff=int((last_size_measurement.value - final_size) / 1e6))) except Exception as e: print("> FAILED: %s" % e) traceback.print_exc() elif subcommand == 'squash': store = TaskStore.objects.get(user__username=username) starting_size = store.get_repository_size() store.squash(force=options['force']) store.gc() ending_size = store.get_repository_size() print(">> {diff} MB recovered".format( diff=int((starting_size - ending_size) / 1e6))) elif subcommand == 'delete_old_accounts': min_action_recency = now() - datetime.timedelta( days=min_use_recency_days) for store in TaskStore.objects.filter( last_synced__lt=min_action_recency, user__last_login__lt=min_action_recency).order_by( '-last_synced'): print('> Deleting %s' % store.local_path) store.delete() store.user.delete() elif subcommand == 'list_old_accounts': min_action_recency = now() - datetime.timedelta( days=min_use_recency_days) output_format = u'{path}\t{last_synced}\t{last_login}' print( output_format.format(path='path', last_synced='last_synced', last_login='******')) for store in TaskStore.objects.filter( last_synced__lt=min_action_recency, user__last_login__lt=min_action_recency).order_by( '-last_synced'): print( output_format.format( path=store.local_path, last_synced=store.last_synced, last_login=store.user.last_login, ))
def handle(self, *args, **options): subcommand = options['subcommand'][0] username = options['username'] minutes = options['minutes'] repack_size = options['repack_size'] if subcommand == 'lock': store = TaskStore.objects.get(user__username=username) store.set_lock_state(lock=True, seconds=minutes*60) print('{} locked'.format(store)) elif subcommand == 'unlock': store = TaskStore.objects.get(user__username=username) store.set_lock_state(lock=False) print('{} unlocked'.format(store)) elif subcommand == 'search': users = User.objects.filter( Q(email__contains=username) | Q(username__contains=username) | Q(first_name__contains=username) | Q(last_name__contains=username) ) for user in users: print(user.username) elif subcommand == 'list': redis = get_lock_redis() for key in redis.keys('*.lock'): value = datetime.datetime.fromtimestamp( int(float(redis.get(key))) ) if value > datetime.datetime.utcnow(): print('{}: {}'.format(key, value)) elif subcommand == 'update_statistics': run_id = 'update_statistics_{date}'.format( date=datetime.datetime.now().strftime('%Y%m%dT%H%M%SZ') ) with progressbar.ProgressBar( max_value=TaskStore.objects.count(), widgets=[ ' [', progressbar.Timer(), '] ', progressbar.Bar(), ' (', progressbar.ETA(), ') ', ] ) as bar: for idx, store in enumerate( TaskStore.objects.order_by('-last_synced') ): TaskStoreStatistic.objects.create( store=store, measure=TaskStoreStatistic.MEASURE_SIZE, value=store.get_repository_size(), run_id=run_id, ) bar.update(idx) elif subcommand == 'gc_large_repos': for store in TaskStore.objects.order_by('-last_synced'): try: last_size_measurement = store.statistics.filter( measure=TaskStoreStatistic.MEASURE_SIZE ).latest('created') except TaskStoreStatistic.DoesNotExist: continue if last_size_measurement.value > repack_size: print("> Repacking {store}...".format(store=store)) results = store.gc() print(json.dumps(results, sort_keys=True, indent=4)) final_size = store.get_repository_size() print( ">> {diff} MB recovered".format( diff=int( (last_size_measurement.value - final_size) / 1e6 ) ) ) elif subcommand == 'squash': store = TaskStore.objects.get(user__username=username) lock_name = get_lock_name_for_store(store) starting_size = store.get_repository_size() with redis_lock( lock_name, message='Squash', lock_timeout=60*60, wait_timeout=60, ): if ( store.trello_local_head and store.trello_local_head != self.repository.head() ): raise ValueError("Trello head out-of-date; aborting!") head_commit, _ = store._git_command( 'rev-list', '--max-parents=0', 'HEAD', ).communicate() head_commit = head_commit.strip() store._git_command( 'reset', '--soft', head_commit, ).communicate() store.create_git_checkpoint("Repository squashed.") if store.trello_local_head: store.trello_local_head = self.repository.head() store.save() #results = store.gc() ending_size = store.get_repository_size() print( ">> {diff} MB recovered".format( diff=int( (starting_size - ending_size) / 1e6 ) ) )
def get_redis_connection(self): if not hasattr(self, '_redis'): self._redis = get_lock_redis() self._subscription = None return self._redis
def handle(self, *args, **options): subcommand = options["subcommand"][0] username = options["username"] minutes = options["minutes"] repack_size = options["repack_size"] squash_size = options["squash_size"] min_use_recency_days = options["min_use_recency_days"] if subcommand == "lock": store = TaskStore.objects.get(user__username=username) store.set_lock_state(lock=True, seconds=minutes * 60) print(f"{store} locked") elif subcommand == "unlock": store = TaskStore.objects.get(user__username=username) store.set_lock_state(lock=False) print(f"{store} unlocked") elif subcommand == "search": users = User.objects.filter( Q(email__contains=username) | Q(username__contains=username) | Q(first_name__contains=username) | Q(last_name__contains=username)) for user in users: print(user.username) elif subcommand == "list": redis = get_lock_redis() for key in redis.keys("*.lock"): value = datetime.datetime.fromtimestamp( int(float(redis.get(key)))) if value > datetime.datetime.utcnow(): print(f"{key}: {value}") elif subcommand == "update_statistics": run_id = "update_statistics_{date}".format( date=datetime.datetime.now().strftime("%Y%m%dT%H%M%SZ")) print(f"Run ID: {run_id}") with progressbar.ProgressBar( max_value=TaskStore.objects.count(), widgets=[ " [", progressbar.Timer(), "] ", progressbar.Bar(), " (", progressbar.ETA(), ") ", ], ) as bar: for idx, store in enumerate( TaskStore.objects.order_by("-last_synced")): TaskStoreStatistic.objects.create( store=store, measure=TaskStoreStatistic.MEASURE_SIZE, value=store.get_repository_size(), run_id=run_id, ) bar.update(idx) elif subcommand == "migrate_all": old_path = "/var/www/twweb/task_data/" new_path = "/task_data/" with progressbar.ProgressBar( max_value=TaskStore.objects.count(), widgets=[ " [", progressbar.Timer(), "] ", progressbar.Bar(), " (", progressbar.ETA(), ") ", ], ) as bar: successful = 0 total = 0 for idx, store in enumerate( TaskStore.objects.order_by("-last_synced")): success = True try: success_rate = successful / total * 100 except Exception: success_rate = 0 try: store.local_path = store.local_path.replace( old_path, new_path) store.clear_cached_properties() try: store.repository.head().decode("utf-8") except (KeyError, NotGitRepository): store.create_git_repository() with fast_git_checkpoint(store, "Migrating"): try: for k, v in store.metadata.items(): if isinstance(v, str): store.metadata[k] = v.replace( old_path, new_path) except Exception as e: print(f"Failed to update metadata: {e}") raise store.clear_cached_properties() try: for k, v in store.taskrc.items(): store.taskrc[k] = v.replace( old_path, new_path) except Exception as e: print(f"Failed to update taskrc: {e}") raise try: includes = { include.replace(old_path, new_path) for include in store.taskrc.includes } store.taskrc.includes = list(includes) store.taskrc._write() except Exception as e: print(f"Failed to update taskrc: {e}") raise store.clear_cached_properties() store.save() except Exception as e: print( f"Failed to migrate {store}: {success_rate}% OK: {e}" ) traceback.print_exc() success = False if success: successful += 1 total += 1 bar.update(idx) elif subcommand == "gc_large_repos": for store in TaskStore.objects.order_by("-last_synced"): try: last_size_measurement = store.statistics.filter( measure=TaskStoreStatistic.MEASURE_SIZE).latest( "created") except TaskStoreStatistic.DoesNotExist: continue if last_size_measurement.value > squash_size: print(f"> Squashing {store}...") try: store.squash() store.gc() final_size = store.get_repository_size() print(">> {diff} MB recovered".format( diff=int((last_size_measurement.value - final_size) / 1e6))) except Exception as e: print(f"> FAILED: {e}") traceback.print_exc() elif last_size_measurement.value > repack_size: print(f"> Repacking {store}...") try: store.gc() final_size = store.get_repository_size() print(">> {diff} MB recovered".format( diff=int((last_size_measurement.value - final_size) / 1e6))) except Exception as e: print(f"> FAILED: {e}") traceback.print_exc() elif subcommand == "squash": store = TaskStore.objects.get(user__username=username) starting_size = store.get_repository_size() store.squash(force=options["force"]) store.gc() ending_size = store.get_repository_size() print( f">> {int((starting_size - ending_size) / 1000000.0)} MB recovered" ) elif subcommand == "delete_old_accounts": min_action_recency = now() - datetime.timedelta( days=min_use_recency_days) for store in TaskStore.objects.filter( last_synced__lt=min_action_recency, user__last_login__lt=min_action_recency, ).order_by("-last_synced"): print(f"> Deleting {store.local_path}") store.delete() store.user.delete() elif subcommand == "list_old_accounts": min_action_recency = now() - datetime.timedelta( days=min_use_recency_days) output_format = "{path}\t{last_synced}\t{last_login}" print( output_format.format( path="path", last_synced="last_synced", last_login="******", )) for store in TaskStore.objects.filter( last_synced__lt=min_action_recency, user__last_login__lt=min_action_recency, ).order_by("-last_synced"): print( output_format.format( path=store.local_path, last_synced=store.last_synced, last_login=store.user.last_login, ))
def get_redis_connection(self): if not hasattr(self, '_redis'): self._redis = get_lock_redis() return self._redis
def get_redis_connection(self): if not hasattr(self, '_redis'): self._redis = get_lock_redis() return self._redis
def handle(self, *args, **options): subcommand = options['subcommand'][0] username = options['username'] minutes = options['minutes'] repack_size = options['repack_size'] squash_size = options['squash_size'] min_use_recency_days = options['min_use_recency_days'] if subcommand == 'lock': store = TaskStore.objects.get(user__username=username) store.set_lock_state(lock=True, seconds=minutes*60) print('{} locked'.format(store)) elif subcommand == 'unlock': store = TaskStore.objects.get(user__username=username) store.set_lock_state(lock=False) print('{} unlocked'.format(store)) elif subcommand == 'search': users = User.objects.filter( Q(email__contains=username) | Q(username__contains=username) | Q(first_name__contains=username) | Q(last_name__contains=username) ) for user in users: print(user.username) elif subcommand == 'list': redis = get_lock_redis() for key in redis.keys('*.lock'): value = datetime.datetime.fromtimestamp( int(float(redis.get(key))) ) if value > datetime.datetime.utcnow(): print('{}: {}'.format(key, value)) elif subcommand == 'update_statistics': run_id = 'update_statistics_{date}'.format( date=datetime.datetime.now().strftime('%Y%m%dT%H%M%SZ') ) print("Run ID: {}".format(run_id)) with progressbar.ProgressBar( max_value=TaskStore.objects.count(), widgets=[ ' [', progressbar.Timer(), '] ', progressbar.Bar(), ' (', progressbar.ETA(), ') ', ] ) as bar: for idx, store in enumerate( TaskStore.objects.order_by('-last_synced') ): TaskStoreStatistic.objects.create( store=store, measure=TaskStoreStatistic.MEASURE_SIZE, value=store.get_repository_size(), run_id=run_id, ) bar.update(idx) elif subcommand == 'gc_large_repos': for store in TaskStore.objects.order_by('-last_synced'): try: last_size_measurement = store.statistics.filter( measure=TaskStoreStatistic.MEASURE_SIZE ).latest('created') except TaskStoreStatistic.DoesNotExist: continue if last_size_measurement.value > squash_size: print("> Squashing {store}...".format(store=store)) try: store.squash() store.gc() final_size = store.get_repository_size() print( ">> {diff} MB recovered".format( diff=int( (last_size_measurement.value - final_size) / 1e6 ) ) ) except Exception as e: print("> FAILED: %s" % e) traceback.print_exc() elif last_size_measurement.value > repack_size: print("> Repacking {store}...".format(store=store)) try: store.gc() final_size = store.get_repository_size() print( ">> {diff} MB recovered".format( diff=int( (last_size_measurement.value - final_size) / 1e6 ) ) ) except Exception as e: print("> FAILED: %s" % e) traceback.print_exc() elif subcommand == 'squash': store = TaskStore.objects.get(user__username=username) starting_size = store.get_repository_size() store.squash(force=options['force']) store.gc() ending_size = store.get_repository_size() print( ">> {diff} MB recovered".format( diff=int( (starting_size - ending_size) / 1e6 ) ) ) elif subcommand == 'delete_old_accounts': min_action_recency = now() - datetime.timedelta( days=min_use_recency_days ) for store in TaskStore.objects.filter( last_synced__lt=min_action_recency, user__last_login__lt=min_action_recency ).order_by('-last_synced'): print('> Deleting %s' % store.local_path) store.delete() store.user.delete() elif subcommand == 'list_old_accounts': min_action_recency = now() - datetime.timedelta( days=min_use_recency_days ) output_format = u'{path}\t{last_synced}\t{last_login}' print( output_format.format( path='path', last_synced='last_synced', last_login='******' ) ) for store in TaskStore.objects.filter( last_synced__lt=min_action_recency, user__last_login__lt=min_action_recency ).order_by('-last_synced'): print( output_format.format( path=store.local_path, last_synced=store.last_synced, last_login=store.user.last_login, ) )