def test_checkin(self): site = models.KegbotSite.get() site.registration_id = 'original-regid' site.save() version = get_version() with patch('requests.post') as mock_post: mock_post.return_value = mock_response = Mock() mock_response.status_code = 200 mock_response.json.return_value = { 'status': 'ok', 'reg_id': 'new-regid' } checkin.checkin('http://example.com/checkin', 'test-product', 1.23) mock_post.assert_called_with( 'http://example.com/checkin', headers={'User-Agent': 'KegbotServer/%s' % version}, data={ 'reg_id': u'original-regid', 'product': 'test-product', 'version': version, }, timeout=1.23) site = models.KegbotSite.get() self.assertEquals('new-regid', site.registration_id)
def webhook_post(url, event_dict): """Posts an event to the supplied URL. The request body is a JSON dictionary of: * type: webhook message type (currently always 'event') * event_dict: webhook data (the event payload) Event payloads are in the same format as the /api/events/ endpoint. """ logger.info('Posting webhook: url=%s event=%s' % (url, event_dict)) hook_dict = { 'type': 'event', 'data': event_dict, } headers = { 'content-type': 'application/json', 'user-agent': 'Kegbot/%s' % get_version(), } try: return requests.post(url, data=kbjson.dumps(hook_dict), headers=headers) except requests.exceptions.RequestException, e: logger.warning('Error posting hook: %s' % e) return False
def slack_post(url, event_dict, slack_conf, msg): """Posts an event to slack. The request body is a JSON dictionary of: * type: webhook message type (currently always 'event') * event_dict: webhook data (the event payload) Event payloads are in the same format as the /api/events/ endpoint. """ logger.info('Posting to slack: url=%s event=%s' % (url, event_dict)) hook_dict = { 'type': 'event', 'payload': { "channel": slack_conf.channel, "username": event_dict.user.username, "text": msg, "icon_url": "https://avatars3.githubusercontent.com/u/395880" } } headers = { 'content-type': 'application/json', 'user-agent': 'Kegbot/%s' % get_version(), } try: return requests.post(url, data=kbjson.dumps(hook_dict), headers=headers) except requests.exceptions.RequestException, e: logger.warning('Error posting hook: %s' % e) return False
def test_checkin(self): site = models.KegbotSite.get() site.registration_id = 'original-regid' site.save() with patch('requests.post') as mock_post: mock_post.return_value = mock_response = Mock() mock_response.status_code = 200 mock_response.json.return_value = { 'status': 'ok', 'reg_id': 'new-regid' } checkin.checkin('http://example.com/checkin', 'test-product', 1.23) mock_post.assert_called_with('http://example.com/checkin', headers={'User-Agent': 'KegbotServer/0.9.17-pre1'}, data={ 'reg_id': u'original-regid', 'product': 'test-product', 'version': get_version(), }, timeout=1.23) site = models.KegbotSite.get() self.assertEquals('new-regid', site.registration_id)
def test_get_version(self): self.assertNotEqual("0.0.0", util.get_version()) try: util.get_version_object() except ValueError as e: self.fail("Illegal version: " + str(e)) self.assertTrue(util.get_version_object().version >= (0, 9, 23))
def test_checkin(self): site = models.KegbotSite.get() site.registration_id = "original-regid" site.save() version = get_version() with patch("requests.post") as mock_post: mock_post.return_value = mock_response = Mock() mock_response.status_code = 200 mock_response.json.return_value = { "status": "ok", "reg_id": "new-regid" } checkin.checkin("http://example.com/checkin", "test-product", 1.23) mock_post.assert_called_with( "http://example.com/checkin", headers={"User-Agent": "KegbotServer/%s" % version}, data={ "reg_id": "original-regid", "product": "test-product", "version": version, }, timeout=1.23, ) site = models.KegbotSite.get() self.assertEqual("new-regid", site.registration_id)
def get_status(request): try: session = current_session(request) except Http404: session = None controllers = models.Controller.objects.all() drinks = models.Drink.objects.all()[:5] events = models.SystemEvent.objects.all()[:5] kegs = models.Keg.objects.all().filter(online=True) meters = models.FlowMeter.objects.all() sound_events = [] # deprecated taps = models.KegTap.objects.all() toggles = models.FlowToggle.objects.all() current_users = set() if session: for stat in models.Stats.objects.filter(session=session): user = stat.user if not user.is_guest(): current_users.add(user) title = models.KegbotSite.get().title version = core_util.get_version() response = protolib.GetSyncResponse( active_kegs=kegs, active_session=session, active_users=current_users, controllers=controllers, drinks=drinks, events=events, meters=meters, site_title=title, server_version=version, sound_events=sound_events, taps=taps, toggles=toggles) return response
def kbsite(request): kbsite = getattr(request, 'kbsite', None) ret = { 'DEBUG': settings.DEBUG, 'DEMO_MODE': getattr(settings, 'DEMO_MODE', False), 'EMBEDDED': getattr(settings, 'EMBEDDED', False), 'EPOCH': pykeg.EPOCH, 'VERSION': util.get_version(), 'HAVE_SESSIONS': False, 'HAVE_ADMIN': settings.KEGBOT_ENABLE_ADMIN, 'GOOGLE_ANALYTICS_ID': None, 'kbsite': kbsite, 'request_path': request.path, 'login_form': LoginForm(initial={'next_page': request.path}), 'guest_info': { 'name': 'guest', 'image': None, }, 'PLUGINS': getattr(request, 'plugins', {}), } if kbsite: ret['guest_info']['name'] = kbsite.settings.guest_name ret['guest_info']['image'] = kbsite.settings.guest_image ret['SERIAL_NUMBER'] = kbsite.serial_number ret['HAVE_SESSIONS'] = models.DrinkingSession.objects.all().count() > 0 ret['GOOGLE_ANALYTICS_ID'] = kbsite.settings.google_analytics_id return ret
def slack_post(url, event_dict): """Posts an event to slack. The request body is a JSON dictionary of: * type: webhook message type (currently always 'event') * event_dict: webhook data (the event payload) Event payloads are in the same format as the /api/events/ endpoint. """ logger.info('Posting to slack: url=%s event=%s' % (url, event_dict)) hook_dict = { 'type': 'event', 'data': event_dict, } headers = { 'content-type': 'application/json', 'user-agent': 'Kegbot/%s' % get_version(), } try: return requests.post(url, data=kbjson.dumps(hook_dict), headers=headers) except requests.exceptions.RequestException, e: logger.warning('Error posting hook: %s' % e) return False
def get_status(request): try: session = current_session(request) except Http404: session = None controllers = models.Controller.objects.all() drinks = models.Drink.objects.all()[:5] events = models.SystemEvent.objects.all()[:5] kegs = models.Keg.objects.all().filter(status=models.Keg.STATUS_ON_TAP) meters = models.FlowMeter.objects.all() sound_events = [] # deprecated taps = models.KegTap.objects.all() toggles = models.FlowToggle.objects.all() current_users = set() if session: for stat in models.Stats.objects.filter(session=session, user__isnull=False): user = stat.user if not user.is_guest(): current_users.add(user) title = models.KegbotSite.get().title version = core_util.get_version() response = protolib.GetSyncResponse( active_kegs=kegs, active_session=session, active_users=current_users, controllers=controllers, drinks=drinks, events=events, meters=meters, site_title=title, server_version=version, sound_events=sound_events, taps=taps, toggles=toggles) return response
def test_get_version(self): self.assertNotEqual('0.0.0', util.get_version()) try: util.get_version_object() except ValueError as e: self.fail('Illegal version: ' + str(e)) self.assertTrue(util.get_version_object().version >= (0, 9, 23))
def create_backup_tree(date, storage, include_media=True): """Builds a complete backup in a temporary directory, return the path.""" backup_dir = tempfile.mkdtemp() metadata = {} # Save databases. output_filename = os.path.join(backup_dir, SQL_FILENAME) with open(output_filename, 'w') as out_fd: db_impl.dump(out_fd) # Save stored media. metadata[META_NUM_MEDIA_FILES] = 0 def add_files(storage, dirname, destdir): """Recursively copies all files in `dirname` to `destdir`.""" subdirs, files = storage.listdir(dirname) for filename in files: full_filename = os.path.join(dirname, filename) output_filename = os.path.join(destdir, full_filename) output_dirname = os.path.dirname(output_filename) if not os.path.exists(output_dirname): os.makedirs(output_dirname) with storage.open(full_filename, 'r') as srcfile: with open(output_filename, 'w') as dstfile: logger.debug('+++ Creating {}'.format(output_filename)) shutil.copyfileobj(srcfile, dstfile) metadata[META_NUM_MEDIA_FILES] += 1 for subdir in subdirs: add_files(storage, os.path.join((dirname, subdir)), destdir) if include_media: destdir = os.path.join(backup_dir, 'media') for media_dir in MEDIA_WHITELIST: if storage.exists(media_dir): add_files(storage, media_dir, destdir) else: logger.warning('Not including media.') # Store metadata file. metadata[META_SERVER_NAME] = get_title() metadata[META_SERVER_VERSION] = get_version() metadata[META_CREATED_TIME] = isodate.datetime_isoformat(date) metadata[META_DB_ENGINE] = db_impl.engine_name() metadata[META_BACKUP_FORMAT] = BACKUP_FORMAT metadata_filename = os.path.join(backup_dir, METADATA_FILENAME) with open(metadata_filename, 'w') as outfile: json.dump(metadata, outfile, sort_keys=True, indent=2) valid = False try: verify_backup_directory(backup_dir) valid = True return backup_dir finally: if not valid: shutil.rmtree(backup_dir)
def assertMetadata(self, backup_dir, when=None, site_name="My Kegbot", num_media_files=0): when = when or self.now backup.verify_backup_directory(backup_dir) metadata_file = os.path.join(backup_dir, backup.METADATA_FILENAME) metadata_json = kbjson.loads(open(metadata_file).read()) self.assertEqual(when, metadata_json[backup.META_CREATED_TIME]) self.assertEqual(site_name, metadata_json[backup.META_SERVER_NAME]) self.assertEqual(num_media_files, metadata_json[backup.META_NUM_MEDIA_FILES]) self.assertEqual(get_version(), metadata_json[backup.META_SERVER_VERSION])
def assertMetadata(self, backup_dir, when=None, site_name='My Kegbot', num_media_files=0): when = when or self.now backup.verify_backup_directory(backup_dir) metadata_file = os.path.join(backup_dir, backup.METADATA_FILENAME) metadata_json = kbjson.loads(open(metadata_file).read()) self.assertEqual(when, metadata_json[backup.META_CREATED_TIME]) self.assertEquals(site_name, metadata_json[backup.META_SERVER_NAME]) self.assertEquals(num_media_files, metadata_json[backup.META_NUM_MEDIA_FILES]) self.assertEquals(get_version(), metadata_json[backup.META_SERVER_VERSION])
def checkin(url=CHECKIN_URL, product=PRODUCT, timeout=None, quiet=False): """Issue a single checkin to the checkin server. No-op if kbsite.check_for_updates is False. Returns A checkin response dictionary, or None if checkin is disabled. Raises ValueError: On malformed reponse. requests.RequestException: On error talking to server. """ kbsite = models.KegbotSite.get() if not kbsite.check_for_updates: LOGGER.debug('Upgrade check is disabled') return site = models.KegbotSite.get() reg_id = site.registration_id headers = { 'User-Agent': util.get_user_agent(), } payload = { FIELD_PRODUCT: product, FIELD_REG_ID: reg_id, FIELD_VERSION: util.get_version(), } try: LOGGER.debug('Checking in, url=%s reg_id=%s' % (url, reg_id)) result = requests.post(url, data=payload, headers=headers, timeout=timeout).json() new_reg_id = result.get(FIELD_REG_ID) if new_reg_id != reg_id: LOGGER.debug('Updating reg_id=%s' % new_reg_id) site.registration_id = new_reg_id site.save() LOGGER.debug('Checkin result: %s' % str(result)) if not quiet: LOGGER.info('Checkin complete, reg_id=%s' % (reg_id, )) site.last_checkin_response = result site.last_checkin_time = timezone.now() site.save() return result except (ValueError, requests.RequestException) as e: if not quiet: LOGGER.warning('Checkin error: %s' % str(e)) raise CheckinError(e)
def checkin(url=CHECKIN_URL, product=PRODUCT, timeout=None, quiet=False): """Issue a single checkin to the checkin server. No-op if kbsite.check_for_updates is False. Returns A checkin response dictionary, or None if checkin is disabled. Raises ValueError: On malformed reponse. requests.RequestException: On error talking to server. """ if settings.EMBEDDED: LOGGER.debug('Checkin disabled in embedded mode') return kbsite = models.KegbotSite.get() if not kbsite.check_for_updates: LOGGER.debug('Upgrade check is disabled') return site = models.KegbotSite.get() reg_id = site.registration_id headers = { 'User-Agent': util.get_user_agent(), } payload = { FIELD_PRODUCT: product, FIELD_REG_ID: reg_id, FIELD_VERSION: util.get_version(), } try: LOGGER.debug('Checking in, url=%s reg_id=%s' % (url, reg_id)) result = requests.post(url, data=payload, headers=headers, timeout=timeout).json() new_reg_id = result.get(FIELD_REG_ID) if new_reg_id != reg_id: LOGGER.debug('Updating reg_id=%s' % new_reg_id) site.registration_id = new_reg_id site.save() LOGGER.debug('Checkin result: %s' % str(result)) if not quiet: LOGGER.info('Checkin complete, reg_id=%s' % (reg_id,)) set_last_checkin(timezone.now(), result) return result except (ValueError, requests.RequestException) as e: if not quiet: LOGGER.warning('Checkin error: %s' % str(e)) raise CheckinError(e)
def kbsite(request): kbsite = getattr(request, 'kbsite', None) redir = urllib.urlencode( {'redir': request.build_absolute_uri(request.path)}) sso_login_url = getattr(settings, 'SSO_LOGIN_URL', '') if sso_login_url: sso_login_url = '{}?{}'.format(sso_login_url, redir) sso_logout_url = getattr(settings, 'SSO_LOGOUT_URL', '') if sso_logout_url: sso_logout_url = '{}?{}'.format(sso_logout_url, redir) ret = { 'DEBUG': settings.DEBUG, 'DEMO_MODE': settings.DEMO_MODE, 'EMBEDDED': settings.EMBEDDED, 'EPOCH': pykeg.EPOCH, 'VERSION': util.get_version(), 'HAVE_SESSIONS': False, 'HAVE_ADMIN': settings.KEGBOT_ENABLE_ADMIN, 'GOOGLE_ANALYTICS_ID': None, 'SSO_LOGIN_URL': sso_login_url, 'SSO_LOGOUT_URL': sso_logout_url, 'CAN_INVITE': kbsite.can_invite(request.user) if kbsite else False, 'kbsite': kbsite, 'request_path': request.path, 'login_form': LoginForm(initial={'next_page': request.path}), 'guest_info': { 'name': 'guest', 'image': None, }, 'PLUGINS': getattr(request, 'plugins', {}), } if kbsite: ret['guest_info']['name'] = kbsite.guest_name ret['guest_info']['image'] = kbsite.guest_image ret['HAVE_SESSIONS'] = models.DrinkingSession.objects.all().count() > 0 ret['GOOGLE_ANALYTICS_ID'] = kbsite.google_analytics_id ret['metric_volumes'] = (kbsite.volume_display_units == 'metric') ret['temperature_display_units'] = kbsite.temperature_display_units return ret
def bugreport(fd): now = datetime.datetime.now() writeline(fd, 'Kegbot Server {} Bugreport'.format(get_version())) writeline(fd, 'Generated {}'.format(isodate.datetime_isoformat(now))) fd.write(SEPARATOR) writeline(fd, '## System info\n') writepair(fd, 'kegbot path', get_output('which kegbot')) writepair(fd, 'python bin path', get_output('which python')) writeline(fd, '\n') fd.write(SEPARATOR) writeline(fd, '## Environment\n') for k in sorted(os.environ): writepair(fd, k, os.environ[k]) writeline(fd, '\n') fd.write(SEPARATOR) writeline(fd, '## `pip freeze` output\n') writeline(fd, get_output('pip freeze')) writeline(fd, '\n') fd.write(SEPARATOR) writeline(fd, '## `kegbot migrate --list` output\n') writeline(fd, get_output('kegbot migrate --list --no-color --noinput')) writeline(fd, '\n') fd.write(SEPARATOR) writeline(fd, '## kegbot logs\n') try: r = redis.Redis() logs = r.lrange('kb:log', 0, -1) for log in logs: try: log = json.loads(log) except ValueError: continue writelog(fd, log) except redis.RedisError as e: writeline(fd, 'ERR ({})'.format(e)) writeline(fd, '\n') fd.write(SEPARATOR) writeline(fd, '\\m/ End of bugreport \\m/')
def kbsite(request): kbsite = getattr(request, 'kbsite', None) redir = urllib.urlencode({'redir': request.build_absolute_uri(request.path)}) sso_login_url = getattr(settings, 'SSO_LOGIN_URL', '') if sso_login_url: sso_login_url = '{}?{}'.format(sso_login_url, redir) sso_logout_url = getattr(settings, 'SSO_LOGOUT_URL', '') if sso_logout_url: sso_logout_url = '{}?{}'.format(sso_logout_url, redir) ret = { 'DEBUG': settings.DEBUG, 'DEMO_MODE': settings.DEMO_MODE, 'EMBEDDED': settings.EMBEDDED, 'EPOCH': pykeg.EPOCH, 'VERSION': util.get_version(), 'HAVE_SESSIONS': False, 'HAVE_ADMIN': settings.KEGBOT_ENABLE_ADMIN, 'GOOGLE_ANALYTICS_ID': None, 'SSO_LOGIN_URL': sso_login_url, 'SSO_LOGOUT_URL': sso_logout_url, 'CAN_INVITE': kbsite.can_invite(request.user) if kbsite else False, 'kbsite': kbsite, 'request_path': request.path, 'login_form': LoginForm(initial={'next_page': request.path}), 'guest_info': { 'name': 'guest', 'image': None, }, 'PLUGINS': getattr(request, 'plugins', {}), } if kbsite: ret['guest_info']['name'] = kbsite.guest_name ret['guest_info']['image'] = kbsite.guest_image ret['HAVE_SESSIONS'] = models.DrinkingSession.objects.all().count() > 0 ret['GOOGLE_ANALYTICS_ID'] = kbsite.google_analytics_id ret['metric_volumes'] = (kbsite.volume_display_units == 'metric') ret['temperature_display_units'] = kbsite.temperature_display_units return ret
def bugreport(fd): now = datetime.datetime.now() writeline(fd, "Kegbot Server {} Bugreport".format(get_version())) writeline(fd, "Generated {}".format(isodate.datetime_isoformat(now))) fd.write(SEPARATOR) writeline(fd, "## System info\n") writepair(fd, "kegbot path", get_output("which kegbot")) writepair(fd, "python bin path", get_output("which python")) writeline(fd, "\n") fd.write(SEPARATOR) writeline(fd, "## Environment\n") for k in sorted(os.environ): writepair(fd, k, os.environ[k]) writeline(fd, "\n") fd.write(SEPARATOR) writeline(fd, "## `pip freeze` output\n") writeline(fd, get_output("pip freeze")) writeline(fd, "\n") fd.write(SEPARATOR) writeline(fd, "## `kegbot migrate --list` output\n") writeline(fd, get_output("kegbot migrate --list --no-color --noinput")) writeline(fd, "\n") fd.write(SEPARATOR) writeline(fd, "## kegbot logs\n") try: r = redis.Redis() logs = r.lrange("kb:log", 0, -1) for log in logs: try: log = json.loads(log) except ValueError: continue writelog(fd, log) except redis.RedisError as e: writeline(fd, "ERR ({})".format(e)) writeline(fd, "\n") fd.write(SEPARATOR) writeline(fd, "\\m/ End of bugreport \\m/")
def handle(self, **options): self.do_epoch_upgrades() run(syncdb.Command(), args=['--noinput', '-v', '0']) run(migrate.Command(), args=['-v', '0']) run(kb_regen_stats.Command()) run(collectstatic.Command(), args=['--noinput']) site = models.KegbotSite.get() site.epoch = EPOCH site.server_version = get_version() site.save() # Refresh any news (since we have a new version). try: checkin.checkin(timeout=5.0, quiet=True) except (checkin.CheckinError, Exception) as e: pass print '' print 'Upgrade complete!'
def kbsite(request): kbsite = getattr(request, "kbsite", None) redir = urllib.parse.urlencode({"redir": request.build_absolute_uri(request.path)}) sso_login_url = getattr(settings, "SSO_LOGIN_URL", "") if sso_login_url: sso_login_url = "{}?{}".format(sso_login_url, redir) sso_logout_url = getattr(settings, "SSO_LOGOUT_URL", "") if sso_logout_url: sso_logout_url = "{}?{}".format(sso_logout_url, redir) ret = { "DEBUG": settings.DEBUG, "VERSION": util.get_version(), "HAVE_SESSIONS": False, "KEGBOT_ENABLE_ADMIN": settings.KEGBOT_ENABLE_ADMIN, "ENABLE_SENSING": kbsite.enable_sensing if kbsite else True, "ENABLE_USERS": kbsite.enable_users if kbsite else True, "GOOGLE_ANALYTICS_ID": None, "SSO_LOGIN_URL": sso_login_url, "SSO_LOGOUT_URL": sso_logout_url, "CAN_INVITE": kbsite.can_invite(request.user) if kbsite else False, "kbsite": kbsite, "request_path": request.path, "login_form": LoginForm(initial={"next_page": request.path}), "guest_info": {"name": "guest", "image": None,}, "PLUGINS": getattr(request, "plugins", {}), } if kbsite: ret["HAVE_SESSIONS"] = models.DrinkingSession.objects.all().count() > 0 ret["GOOGLE_ANALYTICS_ID"] = kbsite.google_analytics_id ret["metric_volumes"] = kbsite.volume_display_units == "metric" ret["temperature_display_units"] = kbsite.temperature_display_units return ret
def kbsite(request): kbsite = getattr(request, "kbsite", None) redir = urllib.urlencode({"redir": request.build_absolute_uri(request.path)}) sso_login_url = getattr(settings, "SSO_LOGIN_URL", "") if sso_login_url: sso_login_url = "{}?{}".format(sso_login_url, redir) sso_logout_url = getattr(settings, "SSO_LOGOUT_URL", "") if sso_logout_url: sso_logout_url = "{}?{}".format(sso_logout_url, redir) ret = { "DEBUG": settings.DEBUG, "DEMO_MODE": settings.DEMO_MODE, "EMBEDDED": settings.EMBEDDED, "VERSION": util.get_version(), "HAVE_SESSIONS": False, "HAVE_ADMIN": settings.KEGBOT_ENABLE_ADMIN, "GOOGLE_ANALYTICS_ID": None, "SSO_LOGIN_URL": sso_login_url, "SSO_LOGOUT_URL": sso_logout_url, "CAN_INVITE": kbsite.can_invite(request.user) if kbsite else False, "kbsite": kbsite, "request_path": request.path, "login_form": LoginForm(initial={"next_page": request.path}), "guest_info": {"name": "guest", "image": None}, "PLUGINS": getattr(request, "plugins", {}), } if kbsite: ret["HAVE_SESSIONS"] = models.DrinkingSession.objects.all().count() > 0 ret["GOOGLE_ANALYTICS_ID"] = kbsite.google_analytics_id ret["metric_volumes"] = kbsite.volume_display_units == "metric" ret["temperature_display_units"] = kbsite.temperature_display_units return ret
def get_info(request): return {'kegbot_server_version': core_util.get_version()}
gflags.DEFINE_string('db_password', '', 'MySQL/Postgres password.') gflags.DEFINE_string('db_database', 'kegbot', 'MySQL/Postgres database name.') FLAGS = gflags.FLAGS SETTINGS_TEMPLATE = """# Kegbot local settings. # Auto-generated by %s version %s. # Safe to edit by hand. See http://kegbot.org/docs/server/ for more info. # NEVER set DEBUG to `True` in production. DEBUG = True TEMPLATE_DEBUG = DEBUG """ % (sys.argv[0], util.get_version()) # Context values which will be copied to the output settings. SETTINGS_NAMES = ( 'DATABASES', 'KEGBOT_ROOT', 'MEDIA_ROOT', 'STATIC_ROOT', 'CACHES', 'SECRET_KEY', ) class FatalError(Exception): """Cannot proceed."""
def get_version(request): return {'server_version': core_util.get_version()}
def create_backup_tree(date, storage, include_media=True): """Creates filesystem tree of backup data.""" backup_dir = tempfile.mkdtemp() metadata = {} # Save databases. tables_dir = os.path.join(backup_dir, TABLES_DIRNAME) os.makedirs(tables_dir) all_models = get_models_to_backup() metadata[META_NUM_TABLES] = len(all_models) for model in all_models: table_name = model._meta.db_table output_filename = os.path.join(tables_dir, table_name + '.json') logger.debug('+++ Creating {}'.format(output_filename)) with open(output_filename, 'w') as out: serializers.serialize('json', model.objects.all(), indent=2, stream=out) # Save stored media. metadata[META_NUM_MEDIA_FILES] = 0 def add_files(storage, dirname, destdir): """Recursively copies all files in `dirname` to `destdir`.""" subdirs, files = storage.listdir(dirname) for filename in files: full_filename = os.path.join(dirname, filename) output_filename = os.path.join(destdir, full_filename) output_dirname = os.path.dirname(output_filename) if not os.path.exists(output_dirname): os.makedirs(output_dirname) with storage.open(full_filename, 'r') as srcfile: with open(output_filename, 'w') as dstfile: logger.debug('+++ Creating {}'.format(output_filename)) shutil.copyfileobj(srcfile, dstfile) metadata[META_NUM_MEDIA_FILES] += 1 for subdir in subdirs: add_files(storage, os.path.join((dirname, subdir)), destdir) if include_media: destdir = os.path.join(backup_dir, 'media') for media_dir in MEDIA_WHITELIST: if storage.exists(media_dir): add_files(storage, media_dir, destdir) else: logger.warning('Not including media.') # Store metadata file. metadata[META_SERVER_NAME] = models.KegbotSite.get().title metadata[META_SERVER_VERSION] = get_version() metadata[META_CREATED_TIME] = isodate.datetime_isoformat(date) metadata[META_BACKUP_FORMAT] = BACKUP_FORMAT metadata_filename = os.path.join(backup_dir, METADATA_FILENAME) with open(metadata_filename, 'w') as outfile: json.dump(metadata, outfile, sort_keys=True, indent=2) valid = False try: verify_backup_directory(backup_dir) valid = True return backup_dir finally: if not valid: shutil.rmtree(backup_dir)
'File name for the Kegbot sqlite database within `data_root`. Ignored if not using SQLite.') gflags.DEFINE_string('use_memcached', True, 'Configure Kegbot to use memcached. ') FLAGS = gflags.FLAGS SETTINGS_TEMPLATE = """# Kegbot local settings. # Auto-generated by %s version %s. # Safe to edit by hand. See http://kegbot.org/docs/server/ for more info. # NEVER set DEBUG to `True` in production. DEBUG = True TEMPLATE_DEBUG = DEBUG """ % (sys.argv[0], util.get_version()) # Context values which will be copied to the output settings. SETTINGS_NAMES = ( 'DATABASES', 'KEGBOT_ROOT', 'MEDIA_ROOT', 'STATIC_ROOT', 'CACHES', 'SECRET_KEY', ) class FatalError(Exception): """Cannot proceed.""" def load_existing():
def test_get_version(self): response, data = self.get('version') self.assertEquals(data.meta.result, 'ok') self.assertEquals(data.object.get('server_version'), get_version())
def get_version(request): return {"server_version": core_util.get_version()}
def get(cls): """Gets the default site settings.""" return KegbotSite.objects.get_or_create(name='default', defaults={'is_setup': False, 'server_version': get_version()})[0]
gflags.DEFINE_string("db_database", "kegbot", "MySQL/Postgres database name.") FLAGS = gflags.FLAGS SETTINGS_TEMPLATE = """# Kegbot local settings. # Auto-generated by %s version %s. # Safe to edit by hand. See http://kegbot.org/docs/server/ for more info. # NEVER set DEBUG to `True` in production. DEBUG = True TEMPLATE_DEBUG = DEBUG """ % ( sys.argv[0], util.get_version(), ) # Context values which will be copied to the output settings. SETTINGS_NAMES = ("DATABASES", "KEGBOT_ROOT", "MEDIA_ROOT", "STATIC_ROOT", "CACHES", "SECRET_KEY") class FatalError(Exception): """Cannot proceed.""" def load_existing(): """Attempts to load the existing local_settings module. Returns: Loaded module, or None if not loadable.
def test_get_version(self): response, data = self.get("version") self.assertEqual(data.meta.result, "ok") self.assertEqual(data.object.get("server_version"), get_version())