def test_status_info(self, now_mock): """ Application status info dict. """ now_mock.return_value = timezone.make_aware(timezone.datetime(2018, 1, 1)) BackupSettings.get_solo() BackupSettings.objects.update(daily_backup=False) tools_status = dsmr_backend.services.backend.status_info()['tools'] # Tools should be asserted, other content is tested in dsmr_frontend. self.assertFalse(tools_status['backup']['enabled']) self.assertFalse(tools_status['dropbox']['enabled']) self.assertFalse(tools_status['pvoutput']['enabled']) self.assertIsNone(tools_status['backup']['latest_backup']) self.assertIsNone(tools_status['dropbox']['latest_sync']) self.assertIsNone(tools_status['pvoutput']['latest_sync']) # Now when enabled. BackupSettings.objects.update(daily_backup=True, latest_backup=timezone.now()) DropboxSettings.objects.update(access_token='xxx', latest_sync=timezone.now()) PVOutputAddStatusSettings.objects.update(export=True, latest_sync=timezone.now()) tools_status = dsmr_backend.services.backend.status_info()['tools'] self.assertTrue(tools_status['backup']['enabled']) self.assertTrue(tools_status['dropbox']['enabled']) self.assertTrue(tools_status['pvoutput']['enabled']) self.assertEqual(tools_status['backup']['latest_backup'], timezone.now()) self.assertEqual(tools_status['dropbox']['latest_sync'], timezone.now()) self.assertEqual(tools_status['pvoutput']['latest_sync'], timezone.now())
def test_check_interval_restriction(self, now_mock, create_full_mock, create_partial_mock): """ Test whether backups are restricted by one backup per day. """ now_mock.return_value = timezone.make_aware( timezone.datetime(2016, 1, 1, hour=1, minute=5)) # Fake latest backup. now = timezone.localtime(timezone.now()) backup_settings = BackupSettings.get_solo() backup_settings.latest_backup = now backup_settings.backup_time = (now - timezone.timedelta(minutes=1)).time() backup_settings.save() self.assertIsNotNone(BackupSettings.get_solo().latest_backup) self.assertFalse(create_partial_mock.called) self.assertFalse(create_full_mock.called) # Should not do anything. dsmr_backup.services.backup.check() self.assertFalse(create_partial_mock.called) self.assertFalse(create_full_mock.called) backup_settings.latest_backup = now - timezone.timedelta(days=1) backup_settings.save() # Should be fine to backup now. dsmr_backup.services.backup.check() self.assertTrue(create_partial_mock.called) self.assertTrue(create_full_mock.called)
def test_reschedule_backup(self, exists_mock, mkdirs_mock, now_mock): URL = reverse('admin:dsmr_backup_backupsettings_changelist') now_mock.return_value = timezone.make_aware(timezone.datetime(2019, 1, 1)) # Lock time BackupSettings.get_solo() BackupSettings.objects.all().update(latest_backup=timezone.now()) self.assertFalse(BackupSettings.objects.filter(latest_backup__isnull=True).exists()) data = dict( backup_time='06:00:00', folder='backups/' ) # Just posting should reset it. NOTE: To apply settings, form params must validate! response = self.client.post(URL, data=data) self.assertEqual(response.status_code, 302) self.assertTrue(BackupSettings.objects.filter(latest_backup__isnull=True).exists()) # Test non existing folder and cause permission denied. data.update(dict(folder='/non/existing/')) exists_mock.return_value = False mkdirs_mock.side_effect = IOError('Denied') self.client.post(URL, data=data) self.assertTrue(mkdirs_mock.called) self.assertFalse(BackupSettings.objects.filter(folder=data['folder']).exists()) # OK flow. mkdirs_mock.side_effect = None self.client.post(URL, data=data) self.assertTrue(BackupSettings.objects.filter(folder=data['folder']).exists())
def test_create(self, compress_mock, subprocess_mock): self.assertFalse(compress_mock.called) self.assertFalse(subprocess_mock.called) self.assertIsNone(BackupSettings.get_solo().latest_backup) dsmr_backup.services.backup.create() self.assertTrue(compress_mock.called) self.assertTrue(subprocess_mock.called) self.assertIsNotNone(BackupSettings.get_solo().latest_backup)
def test_create(self, compress_mock, subprocess_mock): self.assertFalse(compress_mock.called) self.assertFalse(subprocess_mock.called) self.assertIsNone(BackupSettings.get_solo().latest_backup) self.assertTrue(BackupSettings.get_solo().compress) dsmr_backup.services.backup.create() self.assertTrue(compress_mock.called) self.assertTrue(subprocess_mock.called) self.assertIsNotNone(BackupSettings.get_solo().latest_backup)
def test_check_backups_disabled(self, create_backup_mock): backup_settings = BackupSettings.get_solo() backup_settings.daily_backup = False backup_settings.save() self.assertFalse(BackupSettings.get_solo().daily_backup) self.assertFalse(create_backup_mock.called) # Should create initial backup. dsmr_backup.services.backup.check() self.assertFalse(create_backup_mock.called)
def test_check_initial(self, now_mock, create_full_mock, create_partial_mock): """ Test whether a initial backup is created immediately. """ now_mock.return_value = timezone.make_aware( timezone.datetime(2016, 1, 1, hour=18)) self.assertFalse(create_partial_mock.called) self.assertFalse(create_full_mock.called) self.assertIsNone(BackupSettings.get_solo().latest_backup) # Should create initial backup. dsmr_backup.services.backup.check() self.assertTrue(create_partial_mock.called) self.assertTrue(create_full_mock.called) self.assertIsNotNone(BackupSettings.get_solo().latest_backup)
def handle(self, **options): if not settings.DEBUG: raise CommandError( _('Intended usage is NOT production! Only allowed when DEBUG = True' )) # Just wipe all settings which can affect the environment. APISettings.objects.update(allow=not options['no_api'], auth_key='test') BackendSettings.objects.update( disable_electricity_returned_capability=False, process_sleep=0) BackupSettings.objects.update(daily_backup=False) BackupSettings.get_solo().save() # Trigger signal EmailBackupSettings.objects.update( interval=EmailBackupSettings.INTERVAL_NONE) EmailBackupSettings.get_solo().save() # Trigger signal EmailSettings.objects.update(email_from=None, email_to=None, host=None, port=None, username=None, password=None) DropboxSettings.objects.update(refresh_token=None) ConsumptionSettings.objects.update( electricity_grouping_type=ConsumptionSettings. ELECTRICITY_GROUPING_BY_READING) MinderGasSettings.objects.update(export=False, auth_token=None) NotificationSetting.objects.update(notification_service=None, pushover_api_key=None, pushover_user_key=None, prowl_api_key=None) MQTTBrokerSettings.objects.update(hostname='localhost', port=1883, secure=MQTTBrokerSettings.INSECURE, username=None, password=None) PVOutputAPISettings.objects.update(auth_token=None, system_identifier=None) queue.Message.objects.all().delete() Notification.objects.update(read=True) Notification.objects.create(message='Development reset completed.') try: admin = User.objects.get(username='******') except User.DoesNotExist: User.objects.create_superuser('admin', 'root@localhost', 'admin') else: admin.set_password('admin') admin.save()
def test_create_without_compress(self, compress_mock, subprocess_mock): backup_settings = BackupSettings.get_solo() backup_settings.compress = False backup_settings.save() self.assertFalse(compress_mock.called) self.assertFalse(subprocess_mock.called) self.assertIsNone(BackupSettings.get_solo().latest_backup) self.assertFalse(BackupSettings.get_solo().compress) dsmr_backup.services.backup.create() self.assertFalse(compress_mock.called) self.assertTrue(subprocess_mock.called) self.assertIsNotNone(BackupSettings.get_solo().latest_backup)
def status_info(): """ Returns the status info of the application. """ capabilities = get_capabilities() status = { 'capabilities': capabilities, 'electricity': get_electricity_status(capabilities), 'gas': get_gas_status(capabilities), 'readings': get_reading_status(), 'statistics': get_statistics_status(), 'tools': { 'backup': { 'enabled': False, 'latest_backup': None, }, 'dropbox': { 'enabled': False, 'latest_sync': None, }, 'pvoutput': { 'enabled': False, 'latest_sync': None, }, 'mindergas': { 'enabled': False, 'latest_sync': None, }, 'mqtt': get_mqtt_status(), } } # (External) tools below. backup_settings = BackupSettings.get_solo() if backup_settings.daily_backup: status['tools']['backup']['enabled'] = True status['tools']['backup'][ 'latest_backup'] = backup_settings.latest_backup dropbox_settings = DropboxSettings.get_solo() if dropbox_settings.access_token: status['tools']['dropbox']['enabled'] = True status['tools']['dropbox'][ 'latest_sync'] = dropbox_settings.latest_sync pvoutput_settings = PVOutputAddStatusSettings.get_solo() if pvoutput_settings.export: status['tools']['pvoutput']['enabled'] = True status['tools']['pvoutput'][ 'latest_sync'] = pvoutput_settings.latest_sync mindergas_settings = MinderGasSettings.get_solo() if mindergas_settings.export: status['tools']['mindergas']['enabled'] = True status['tools']['mindergas'][ 'latest_sync'] = mindergas_settings.latest_sync return status
def check(): """ Checks whether a new backup should be created. Creates one if needed as well. """ backup_settings = BackupSettings.get_solo() # Skip when backups disabled. if not backup_settings.daily_backup: return # Postpone when we already created a backup today. if backup_settings.latest_backup and backup_settings.latest_backup.date( ) == timezone.now().date(): return # Timezone magic to make sure we select and combine the CURRENT day, in the user's timezone. next_backup_timestamp = timezone.make_aware( timezone.datetime.combine(timezone.localtime(timezone.now()), backup_settings.backup_time)) if backup_settings.latest_backup and timezone.now( ) < next_backup_timestamp: # Postpone when the user's backup time preference has not been passed yet. return # For backend logging in Supervisor. print(' - Creating new backup.') create()
def test_check_backups_disabled(self, create_full_mock, create_partial_mock): backup_settings = BackupSettings.get_solo() backup_settings.daily_backup = False backup_settings.save() self.assertFalse(BackupSettings.get_solo().daily_backup) self.assertFalse(create_partial_mock.called) self.assertFalse(create_full_mock.called) self.assertIsNone(BackupSettings.get_solo().latest_backup) # Should not do anything. dsmr_backup.services.backup.check() self.assertFalse(create_partial_mock.called) self.assertFalse(create_full_mock.called) self.assertIsNone(BackupSettings.get_solo().latest_backup)
def create(): """ Creates a backup of the database. Optionally gzipped. """ # Backup file with day name included, for weekly rotation. backup_file = os.path.join( get_backup_directory(), 'dsmrreader-{}-backup-{}.sql'.format( connection.vendor, formats.date_format(timezone.now().date(), 'l'))) # PostgreSQL backup. if connection.vendor == 'postgresql': # pragma: no cover backup_process = subprocess.Popen( [ 'pg_dump', '--host={}'.format(settings.DATABASES['default']['HOST']), '--user={}'.format(settings.DATABASES['default']['USER']), '--dbname={}'.format(settings.DATABASES['default']['NAME']), ], env={'PGPASSWORD': settings.DATABASES['default']['PASSWORD']}, stdout=open(backup_file, 'w') # pragma: no cover ) # MySQL backup. elif connection.vendor == 'mysql': # pragma: no cover backup_process = subprocess.Popen( [ 'mysqldump', '--compress', '--hex-blob', '--extended-insert', '--quick', '--host', settings.DATABASES['default']['HOST'], '--user', settings.DATABASES['default']['USER'], '--password={}'.format( settings.DATABASES['default']['PASSWORD']), settings.DATABASES['default']['NAME'], ], stdout=open(backup_file, 'w') # pragma: no cover ) # SQLite backup. elif connection.vendor == 'sqlite': # pragma: no cover backup_process = subprocess.Popen([ 'sqlite3', settings.DATABASES['default']['NAME'], '.dump', ], stdout=open(backup_file, 'w')) # pragma: no cover else: raise NotImplementedError('Unsupported backup backend: {}'.format( connection.vendor)) # pragma: no cover backup_process.wait() backup_settings = BackupSettings.get_solo() if backup_settings.compress: compress(file_path=backup_file) backup_settings.latest_backup = timezone.now() backup_settings.save()
def run(scheduled_process: ScheduledProcess) -> None: """ Checks whether a new backup should be created. Creates one if needed as well. """ # Create a partial, minimal backup first. Since it will grow and take disk space, only create one weekly. today = timezone.localtime(timezone.now()).date() if today.isoweekday() == 1: create_partial(folder=os.path.join(get_backup_directory(), 'archive', formats.date_format(today, 'Y'), formats.date_format(today, 'm')), models_to_backup=(DayStatistics, )) # Now create full. create_full(folder=get_backup_directory()) # Schedule tomorrow, for the time specified. backup_settings = BackupSettings.get_solo() next_backup_timestamp = timezone.now() + timezone.timedelta(days=1) next_backup_timestamp = timezone.localtime(next_backup_timestamp) next_backup_timestamp = next_backup_timestamp.replace( hour=backup_settings.backup_time.hour, minute=backup_settings.backup_time.minute, second=0, microsecond=0) scheduled_process.reschedule(next_backup_timestamp)
def test_check_backup_time_restriction(self, now_mock, create_full_mock, create_partial_mock): """ Test whether backups are restricted by user's backup time preference. """ now_mock.return_value = timezone.make_aware( timezone.datetime(2016, 1, 1, hour=1, minute=5)) now = timezone.localtime(timezone.now()) backup_settings = BackupSettings.get_solo() backup_settings.latest_backup = now - timezone.timedelta(days=1) backup_settings.backup_time = (now + timezone.timedelta(seconds=15)).time() backup_settings.save() # Should not do anything, we should backup a minute from now. self.assertFalse(create_partial_mock.called) self.assertFalse(create_full_mock.called) dsmr_backup.services.backup.check() self.assertFalse(create_partial_mock.called) self.assertFalse(create_full_mock.called) # Should be fine to backup now. Passed prefered time of user. backup_settings.backup_time = now.time() backup_settings.save() dsmr_backup.services.backup.check() self.assertTrue(create_partial_mock.called) self.assertTrue(create_full_mock.called)
def test_check_backups_disabled(self, create_full_mock, create_partial_mock): backup_settings = BackupSettings.get_solo() backup_settings.daily_backup = False backup_settings.save() # Post save signal. self.schedule_process.refresh_from_db() self.assertFalse(self.schedule_process.active)
def get_backup_directory(): """ Returns the path to the directory where all backups are stored locally. """ backup_directory = BackupSettings.get_solo().folder if backup_directory.startswith('/'): return os.path.abspath(backup_directory) else: return os.path.join(settings.BASE_DIR, '..', backup_directory)
def create(): """ Creates a backup of the database. Optionally gzipped. """ # Backup file with day name included, for weekly rotation. backup_file = os.path.join(get_backup_directory(), 'dsmrreader-{}-backup-{}.sql'.format( connection.vendor, formats.date_format(timezone.now().date(), 'l') )) # PostgreSQL backup. if connection.vendor == 'postgresql': # pragma: no cover backup_process = subprocess.Popen( [ 'pg_dump', '--host={}'.format(settings.DATABASES['default']['HOST']), '--user={}'.format(settings.DATABASES['default']['USER']), '--dbname={}'.format(settings.DATABASES['default']['NAME']), ], env={ 'PGPASSWORD': settings.DATABASES['default']['PASSWORD'] }, stdout=open(backup_file, 'w') # pragma: no cover ) # MySQL backup. elif connection.vendor == 'mysql': # pragma: no cover backup_process = subprocess.Popen( [ 'mysqldump', '--compress', '--hex-blob', '--extended-insert', '--quick', '--host', settings.DATABASES['default']['HOST'], '--user', settings.DATABASES['default']['USER'], '--password={}'.format(settings.DATABASES['default']['PASSWORD']), settings.DATABASES['default']['NAME'], ], stdout=open(backup_file, 'w') # pragma: no cover ) # SQLite backup. elif connection.vendor == 'sqlite': backup_process = subprocess.Popen( [ 'sqlite3', settings.DATABASES['default']['NAME'], '.dump', ], stdout=open(backup_file, 'w') ) else: raise NotImplementedError('Unsupported backup backend: {}'.format(connection.vendor)) # pragma: no cover backup_process.wait() backup_settings = BackupSettings.get_solo() if backup_settings.compress: compress(file_path=backup_file) backup_settings.latest_backup = timezone.now() backup_settings.save()
def get_context_data(self, **kwargs): context_data = super(Configuration, self).get_context_data(**kwargs) context_data['api_settings'] = APISettings.get_solo() context_data['consumption_settings'] = ConsumptionSettings.get_solo() context_data['datalogger_settings'] = DataloggerSettings.get_solo() context_data['frontend_settings'] = FrontendSettings.get_solo() context_data['weather_settings'] = WeatherSettings.get_solo() context_data['backup_settings'] = BackupSettings.get_solo() context_data['dropbox_settings'] = DropboxSettings.get_solo() context_data['mindergas_settings'] = MinderGasSettings.get_solo() return context_data
def compress(file_path: str) -> str: """ Compresses a file using (fast) gzip. Removes source file when compression succeeded. """ compression_level = BackupSettings.get_solo().compression_level file_path_gz = '{}.gz'.format(file_path) # Straight from the Python 3x docs. with open(file_path, 'rb') as f_in: with gzip.open(file_path_gz, 'wb', compresslevel=compression_level) as f_out: shutil.copyfileobj(f_in, f_out) os.unlink(file_path) return file_path_gz
def test_check_interval_restriction(self, now_mock, create_backup_mock): """ Test whether backups are restricted by one backup per day. """ now_mock.return_value = timezone.make_aware(timezone.datetime(2016, 1, 1, hour=1, minute=5)) # Fake latest backup. now = timezone.localtime(timezone.now()) backup_settings = BackupSettings.get_solo() backup_settings.latest_backup = now backup_settings.backup_time = (now - timezone.timedelta(minutes=1)).time() backup_settings.save() self.assertIsNotNone(BackupSettings.get_solo().latest_backup) self.assertFalse(create_backup_mock.called) # Should not do anything. dsmr_backup.services.backup.check() self.assertFalse(create_backup_mock.called) backup_settings.latest_backup = now - timezone.timedelta(days=1) backup_settings.save() # Should be fine to backup now. dsmr_backup.services.backup.check() self.assertTrue(create_backup_mock.called)
def check(): """ Checks whether a new backup should be created. Creates one if needed as well. """ backup_settings = BackupSettings.get_solo() # Skip when backups disabled. if not backup_settings.daily_backup: return # Postpone when we already created a backup today. if backup_settings.latest_backup and backup_settings.latest_backup.date( ) == timezone.now().date(): return # Timezone magic to make sure we select and combine the CURRENT day, in the user's timezone. next_backup_timestamp = timezone.make_aware( timezone.datetime.combine(timezone.localtime(timezone.now()), backup_settings.backup_time)) if backup_settings.latest_backup and timezone.now( ) < next_backup_timestamp: # Postpone when the user's backup time preference has not been passed yet. return # Create a partial, minimal backup first. today = timezone.localtime(timezone.now()).date() create_partial(folder=os.path.join(get_backup_directory(), 'archive', formats.date_format(today, 'Y'), formats.date_format(today, 'm')), models_to_backup=(DayStatistics, )) # Now create full. create_full(folder=get_backup_directory()) backup_settings = BackupSettings.get_solo() backup_settings.latest_backup = timezone.now() backup_settings.save()
def test_rescheduling(self, now_mock, create_full_mock, create_partial_mock): """ Test scheduling after success. """ now_mock.return_value = timezone.make_aware(timezone.datetime(2020, 1, 1, hour=12)) backup_settings = BackupSettings.get_solo() backup_settings.backup_time = time(6, 0, 0) # 6:00:00 backup_settings.save() # Just run and check new planned timestamp. dsmr_backup.services.backup.run(self.schedule_process) print(self.schedule_process.planned) self.schedule_process.refresh_from_db() self.assertEqual( self.schedule_process.planned, timezone.make_aware(timezone.datetime(2020, 1, 2, hour=6)) )
def test_create_partial(self, compress_mock, subprocess_mock): if connection.vendor != 'postgres': # pragma: no cover return self.skipTest(reason='Only PostgreSQL supported') FOLDER = '/var/tmp/test-dsmr' BackupSettings.objects.all().update(folder=FOLDER) self.assertFalse(compress_mock.called) self.assertFalse(subprocess_mock.called) self.assertIsNone(BackupSettings.get_solo().latest_backup) dsmr_backup.services.backup.create_partial( folder=dsmr_backup.services.backup.get_backup_directory(), models_to_backup=(DayStatistics, )) self.assertTrue(compress_mock.called) self.assertTrue(subprocess_mock.called) shutil.rmtree(FOLDER)
def get_context_data(self, **kwargs): context_data = super(Configuration, self).get_context_data(**kwargs) # 20+ queries, we should cache this at some point. context_data.update( dict( api_settings=APISettings.get_solo(), backend_settings=BackendSettings.get_solo(), backup_settings=BackupSettings.get_solo(), consumption_settings=ConsumptionSettings.get_solo(), datalogger_settings=DataloggerSettings.get_solo(), dropbox_settings=DropboxSettings.get_solo(), email_settings=EmailSettings.get_solo(), frontend_settings=FrontendSettings.get_solo(), mindergas_settings=MinderGasSettings.get_solo(), mqtt_broker_settings=MQTTBrokerSettings.get_solo(), mqtt_jsondaytotals_settings=JSONDayTotalsMQTTSettings.get_solo( ), mqtt_splittopicdaytotals_settings= SplitTopicDayTotalsMQTTSettings.get_solo(), mqtt_jsoncurrentperiodtotals_settings= JSONCurrentPeriodTotalsMQTTSettings.get_solo(), mqtt_splittopiccurrentperiodtotals_settings= SplitTopicCurrentPeriodTotalsMQTTSettings.get_solo(), mqtt_jsongasconsumption_settings=JSONGasConsumptionMQTTSettings .get_solo(), mqtt_splittopicgasconsumption_settings= SplitTopicGasConsumptionMQTTSettings.get_solo(), mqtt_splittopicmeterstatistics_settings= SplitTopicMeterStatisticsMQTTSettings.get_solo(), mqtt_jsontelegram_settings=JSONTelegramMQTTSettings.get_solo(), mqtt_rawtelegram_settings=RawTelegramMQTTSettings.get_solo(), mqtt_splittopictelegram_settings=SplitTopicTelegramMQTTSettings .get_solo(), notification_settings=NotificationSetting.get_solo(), pvoutput_api_settings=PVOutputAPISettings.get_solo(), pvoutput_addstatus_settings=PVOutputAddStatusSettings.get_solo( ), retention_settings=RetentionSettings.get_solo(), weather_settings=WeatherSettings.get_solo(), influxdb_settings=InfluxdbIntegrationSettings.get_solo(), )) return context_data
def test_check_backup_time_restriction(self, now_mock, create_backup_mock): """ Test whether backups are restricted by user's backup time preference. """ now_mock.return_value = timezone.make_aware(timezone.datetime(2016, 1, 1, hour=1, minute=5)) now = timezone.localtime(timezone.now()) backup_settings = BackupSettings.get_solo() backup_settings.latest_backup = now - timezone.timedelta(days=1) backup_settings.backup_time = (now + timezone.timedelta(seconds=15)).time() backup_settings.save() # Should not do anything, we should backup a minute from now. self.assertFalse(create_backup_mock.called) dsmr_backup.services.backup.check() self.assertFalse(create_backup_mock.called) # Should be fine to backup now. Passed prefered time of user. backup_settings.backup_time = now.time() backup_settings.save() dsmr_backup.services.backup.check() self.assertTrue(create_backup_mock.called)
def check(): """ Checks whether a new backup should be created. Creates one if needed as well. """ backup_settings = BackupSettings.get_solo() # Skip when backups disabled. if not backup_settings.daily_backup: return # Postpone when we already created a backup today. if backup_settings.latest_backup and backup_settings.latest_backup.date() == timezone.now().date(): return # Timezone magic to make sure we select and combine the CURRENT day, in the user's timezone. next_backup_timestamp = timezone.make_aware(timezone.datetime.combine( timezone.localtime(timezone.now()), backup_settings.backup_time )) if backup_settings.latest_backup and timezone.now() < next_backup_timestamp: # Postpone when the user's backup time preference has not been passed yet. return create()
def setUp(self): BackupSettings.get_solo()
def setUp(self): self.instance = BackupSettings().get_solo()
def post(self, request): backup_settings = BackupSettings.get_solo() backup_settings.latest_backup = timezone.now() - timezone.timedelta(days=7) backup_settings.save() return redirect(reverse('frontend:configuration'))
def setUp(self): BackupSettings.get_solo() self.schedule_process = ScheduledProcess.objects.get(module=settings.DSMRREADER_MODULE_DAILY_BACKUP) self.schedule_process.update(active=True, planned=timezone.make_aware(timezone.datetime(2000, 1, 1)))