class SbankenUser(jsonobject.JsonObject): 'Objectify Sbanken User' # properties defined in Sbanken json structs # {'customerId': '', # str, norwegian ssn # 'dateOfBirth': 'YYYY-MM-DDT00:00:00', # str, timestamp # 'emailAddress': '', # str, email # 'firstName': '', # str # 'lastName': '', # str # 'phoneNumbers': [{'countryCode': '', 'number': ''}, # {'countryCode': '', 'number': ''}], # 'postalAddress': {'addressLine1': '', # 'addressLine2': '', # 'addressLine3': '', # 'addressLine4': '', # 'city': None, # 'country': '', # 'zipCode': None}, # 'streetAddress': {'addressLine1': '', # 'addressLine2': '', # 'addressLine3': None, # 'addressLine4': None, # 'city': '', # 'country': None, # 'zipCode': ''} customerId = jsonobject.StringProperty() dateOfBirth = jsonobject.DefaultProperty() # TODO: use DateTimeProperty() emailAddress = jsonobject.StringProperty() firstName = jsonobject.StringProperty() lastName = jsonobject.StringProperty() phoneNumbers = jsonobject.ListProperty(SbankenPhoneNumber) postalAddress = jsonobject.ObjectProperty(SbankenAddress) streetAddress = jsonobject.ObjectProperty(SbankenAddress)
class DotsApiParam(StrictJsonObject): """99DOTS <-> eNikshay API Parameter Definition This class defines api parameters for the patient details API between 99DOTS and eNikshay. For incoming api requests from 99DOTS, it defines where and how to save parameters. For outgoing api requests to 99DOTS, it defines which properties to watch for changes to and how they are compiled. """ # the parameter name for the json sent and received api_param_name = jsonobject.StringProperty(required=True) # whether this parameter is required when receiving and API request required_ = jsonobject.BooleanProperty(default=False, name='required') exclude_if_none = jsonobject.BooleanProperty(default=True) choices = jsonobject.ObjectProperty(DotsApiParamChoices) # the case type to save or get this property from case_type = jsonobject.ObjectProperty(DotsApiSectorParam) # the case property to save to or get case_property = jsonobject.ObjectProperty(DotsApiSectorParam) # path to a function to get the value of this property getter = jsonobject.StringProperty() # path to a jsonObject that will wrap the value from the getter payload_object = jsonobject.StringProperty() # if using a custom getter, the case properties to watch for changes to send outwards case_properties = jsonobject.ObjectProperty(DotsApiParamChoices) # path to a function to set the case property for incoming requests. Should # return a dict of case properties to update setter = jsonobject.StringProperty() # whether we should send, receive, or both. direction = jsonobject.IntegerProperty( default=DIRECTION_BOTH, choices=[DIRECTION_INBOUND, DIRECTION_OUTBOUND, DIRECTION_BOTH]) # path to a function that takes a sector parameter and returns a validator function # see checkbox_validator in this file for an example validator = jsonobject.StringProperty() # values passed into the validator function validator_values = jsonobject.ObjectProperty(DotsApiParamChoices) def get_by_sector(self, prop, sector): prop = getattr(self, prop) if isinstance(prop, DotsApiSectorParam): return getattr(prop, sector) or prop.both else: return prop
class SmartDBConfig(jsonobject.JsonObject): _allow_dynamic_properties = False main = jsonobject.ObjectProperty(lambda: MainDBOptions, required=True) formplayer = jsonobject.ObjectProperty(lambda: FormplayerDBOptions, required=True) ucr = jsonobject.ObjectProperty(lambda: UcrDBOptions, required=True) synclogs = jsonobject.ObjectProperty(lambda: SynclogsDBOptions, required=False) form_processing = jsonobject.ObjectProperty(lambda: FormProcessingConfig, required=False) custom = jsonobject.ListProperty(lambda: CustomDBOptions) standby = jsonobject.ListProperty(lambda: StandbyDBOptions)
class TerraformConfig(jsonobject.JsonObject): _allow_dynamic_properties = False aws_profile = jsonobject.StringProperty(required=True) account_alias = jsonobject.StringProperty() manage_users = jsonobject.BooleanProperty(default=True) state_bucket = jsonobject.StringProperty() state_bucket_region = jsonobject.StringProperty() region = jsonobject.StringProperty() environment = jsonobject.StringProperty() openvpn_image = jsonobject.StringProperty() openvpn_instance_type = jsonobject.StringProperty() openvpn_az = jsonobject.StringProperty() azs = jsonobject.ListProperty(str) az_codes = jsonobject.ListProperty(str, default=['a', 'b', 'c']) ssl_policy = jsonobject.StringProperty(default="ELBSecurityPolicy-2016-08") vpc_begin_range = jsonobject.StringProperty() vpn_connections = jsonobject.ListProperty(lambda: VpnConnectionConfig) external_routes = jsonobject.ListProperty(lambda: ExternalRouteConfig) servers = jsonobject.ListProperty(lambda: ServerConfig) proxy_servers = jsonobject.ListProperty(lambda: ServerConfig) rds_instances = jsonobject.ListProperty(lambda: RdsInstanceConfig) pgbouncer_nlbs = jsonobject.ListProperty(lambda: PgbouncerNlbs) internal_albs = jsonobject.ListProperty(lambda: InternalAlbs) elasticache = jsonobject.ObjectProperty(lambda: ElasticacheConfig, default=None) elasticache_cluster = jsonobject.ObjectProperty( lambda: ElasticacheClusterConfig, default=None) r53_private_zone = jsonobject.ObjectProperty( lambda: RoutePrivateZoneConfig, default=None) efs_file_systems = jsonobject.ListProperty(lambda: EfsFileSystem, default=None) ec2_auto_recovery = jsonobject.ListProperty(lambda: Ec2AutoRecovery, default=None) @classmethod def wrap(cls, data): if 'aws_profile' not in data: data['aws_profile'] = data.get('account_alias') return super(TerraformConfig, cls).wrap(data) def to_generated_json(self): obj = self.to_json() obj['servers'] = [ server.to_generated_json() for server in self.servers ] obj['proxy_servers'] = [ server.to_generated_json() for server in self.proxy_servers ] return obj
def get_payload_properties(sector): if sector not in SECTORS: raise ValueError('sector argument should be one of {}'.format( ",".join(SECTORS))) properties = {} spec = load_api_spec() for param in spec.api_params: if param.payload_object: properties[param.api_param_name] = jsonobject.ObjectProperty( to_function(param.payload_object), required=param.required_, exclude_if_none=param.exclude_if_none, ) else: properties[param.api_param_name] = jsonobject.StringProperty( choices=param.get_by_sector('choices', sector), required=param.required_, exclude_if_none=param.exclude_if_none, validators=([ to_function(param.validator)(sector, param.validator_values) ] if param.validator else []), ) return properties
class TerraformConfig(jsonobject.JsonObject): _allow_dynamic_properties = False aws_profile = jsonobject.StringProperty(required=True) account_alias = jsonobject.StringProperty() manage_users = jsonobject.BooleanProperty(default=True) state_bucket = jsonobject.StringProperty() state_bucket_region = jsonobject.StringProperty() region = jsonobject.StringProperty() environment = jsonobject.StringProperty() openvpn_image = jsonobject.StringProperty() azs = jsonobject.ListProperty(str) az_codes = jsonobject.ListProperty(str, default=['a', 'b', 'c']) vpc_begin_range = jsonobject.StringProperty() vpn_connections = jsonobject.ListProperty(lambda: VpnConnectionConfig) external_routes = jsonobject.ListProperty(lambda: ExternalRouteConfig) servers = jsonobject.ListProperty(lambda: ServerConfig) proxy_servers = jsonobject.ListProperty(lambda: ServerConfig) rds_instances = jsonobject.ListProperty(lambda: RdsInstanceConfig) elasticache = jsonobject.ObjectProperty(lambda: ElasticacheConfig, default=None) @classmethod def wrap(cls, data): if 'aws_profile' not in data: data['aws_profile'] = data.get('account_alias') return super(TerraformConfig, cls).wrap(data)
class ServerConfig(jsonobject.JsonObject): _allow_dynamic_properties = False server_name = jsonobject.StringProperty() server_instance_type = jsonobject.StringProperty() network_tier = jsonobject.StringProperty( choices=['app-private', 'public', 'db-private']) az = jsonobject.StringProperty() volume_size = jsonobject.IntegerProperty(default=20) volume_encrypted = jsonobject.BooleanProperty(default=True, required=True) block_device = jsonobject.ObjectProperty(lambda: BlockDevice, default=None) group = jsonobject.StringProperty() os = jsonobject.StringProperty( required=True, choices=['trusty', 'bionic', 'ubuntu_pro_bionic']) count = jsonobject.IntegerProperty(default=None) @classmethod def wrap(cls, data): self = super(cls, ServerConfig).wrap(data) if self.count is not None and not self.server_name.split( '-', 1)[0].endswith('{i}'): raise ValueError( 'To use count, server_name must be a template string using {i}, ' 'and {i} must be the final part before the env suffix') return self def get_all_server_names(self): if self.count is None: # e.g. server0-test => ["server0-test"] return [self.server_name] else: # e.g. server_a{i}-test => ["server_a000-test", "server_a001-test", ...] return [ self.server_name.format(i='{:03d}'.format(i)) for i in range(self.count) ] def get_all_host_names(self): host_name = self.server_name.split('-', 1)[0] if self.count is None: # e.g. server0-test => ["server0"] return [host_name] else: # e.g. server_a{i}-test => ["server_a000", "server_a001", ...] return [ host_name.format(i='{:03d}'.format(i)) for i in range(self.count) ] def get_host_group_name(self): if self.count is None: raise ValueError( "Can only call get_host_group_name() on a server with count") else: # e.g. server_a{i}-test => ["server_a"] return self.server_name.split('-', 1)[0][:-3] def to_generated_json(self): obj = self.to_json() obj['get_all_server_names'] = self.get_all_server_names() return obj
class FabSettingsConfig(jsonobject.JsonObject): _allow_dynamic_properties = False sudo_user = jsonobject.StringProperty() default_branch = jsonobject.StringProperty() home = jsonobject.StringProperty() project = jsonobject.StringProperty() code_repo = GitUriProperty() timing_log = jsonobject.StringProperty() keepalive = jsonobject.IntegerProperty() ignore_kafka_checkpoint_warning = jsonobject.BooleanProperty() acceptable_maintenance_window = jsonobject.ObjectProperty( lambda: AcceptableMaintenanceWindow) email_enabled = jsonobject.BooleanProperty() @classmethod def wrap(cls, data): for deprecated_property in ('py3_include_venv', 'py3_run_deploy'): if deprecated_property in data: print("{} {} {}".format( color_notice("The property"), color_code(deprecated_property), color_notice("is deprecated and has no effect."))) print( color_notice( "Feel free to remove it from your fab-settings.yml.")) del data[deprecated_property] obj = super(FabSettingsConfig, cls).wrap(data) return obj
class ServerConfig(jsonobject.JsonObject): _allow_dynamic_properties = False server_name = jsonobject.StringProperty() server_instance_type = jsonobject.StringProperty() network_tier = jsonobject.StringProperty( choices=['app-private', 'public', 'db-private']) az = jsonobject.StringProperty() volume_size = jsonobject.IntegerProperty(default=20) block_device = jsonobject.ObjectProperty(lambda: BlockDevice, default=None)
class SimpleSchedulingRule(jsonobject.JsonObject): name = jsonobject.StringProperty() case_type = jsonobject.StringProperty() criteria = jsonobject.ListProperty(MatchPropertyCriterion) recipients = jsonobject.ListProperty(jsonobject.ListProperty(jsonobject.StringProperty(required=False))) reset_case_property_name = jsonobject.StringProperty() start_date_case_property = jsonobject.StringProperty() specific_start_date = jsonobject.DateProperty() scheduler_module_info = jsonobject.ObjectProperty(CreateScheduleInstanceActionDefinition.SchedulerModuleInfo)
class SimpleSMSDailyScheduleWithTime(jsonobject.JsonObject): schedule_type = SIMPLE_SMS_DAILY_SCHEDULE_WITH_TIME time = jsonobject.TimeProperty() message = jsonobject.DictProperty(six.text_type) total_iterations = jsonobject.IntegerProperty() start_offset = jsonobject.IntegerProperty() start_day_of_week = jsonobject.IntegerProperty() extra_options = jsonobject.ObjectProperty(ExtraSchedulingOptions) repeat_every = jsonobject.IntegerProperty()
class ServerConfig(jsonobject.JsonObject): _allow_dynamic_properties = False server_name = jsonobject.StringProperty() server_instance_type = jsonobject.StringProperty() network_tier = jsonobject.StringProperty(choices=['app-private', 'public', 'db-private']) az = jsonobject.StringProperty() volume_size = jsonobject.IntegerProperty(default=20) block_device = jsonobject.ObjectProperty(lambda: BlockDevice, default=None) group = jsonobject.StringProperty() # todo: invert this so that all new machines are bionic unless otherwise specified os = jsonobject.StringProperty(required=True)
class FormProcessingConfig(jsonobject.JsonObject): _allow_dynamic_properties = False proxy = jsonobject.ObjectProperty(lambda: FormProcessingProxyDBOptions, required=True) proxy_standby = jsonobject.ObjectProperty(lambda: FormProcessingStandbyProxyDBOptions, required=False) partitions = jsonobject.DictProperty(lambda: StrictPartitionDBOptions, required=True) @classmethod def wrap(cls, data): for i, (django_alias, db) in enumerate(data['partitions'].items()): db['django_alias'] = db.get('django_alias', django_alias) db['name'] = db.get('name', 'commcarehq_{}'.format(db['django_alias'])) self = super(FormProcessingConfig, cls).wrap(data) return self def get_db_list(self): return ( [self.proxy] + ([self.proxy_standby] if self.proxy_standby.host else []) + sorted(self.partitions.values(), key=lambda db: alphanum_key(db.django_alias)) )
class Spec(StrictJsonObject): """ Parser for spec files These files declare how many machines should be allocated for each role. See specs/example_spec.yml for an example. """ aws_config = jsonobject.ObjectProperty(lambda: AwsConfig) settings = jsonobject.ObjectProperty(lambda: Settings) allocations = jsonobject.DictProperty(lambda: Allocation) @classmethod def wrap(cls, obj): allocations = { key: {'count': value} if isinstance(value, int) else value for key, value in obj.get('allocations', {}).items() } obj['allocations'] = allocations return super(Spec, cls).wrap(obj)
class FabSettingsConfig(jsonobject.JsonObject): _allow_dynamic_properties = False sudo_user = jsonobject.StringProperty() default_branch = jsonobject.StringProperty() home = jsonobject.StringProperty() project = jsonobject.StringProperty() code_repo = GitUriProperty() timing_log = jsonobject.StringProperty() keepalive = jsonobject.IntegerProperty() ignore_kafka_checkpoint_warning = jsonobject.BooleanProperty() acceptable_maintenance_window = jsonobject.ObjectProperty( lambda: AcceptableMaintenanceWindow) email_enabled = jsonobject.BooleanProperty()
class User(ApiObject): _COOKIES: Optional[CookieJar] _PATH = "/user/{username}" username = jo.StringProperty(required=True) twitterUsername = jo.StringProperty() redditUsername = jo.StringProperty() ircNick = jo.StringProperty() description = jo.StringProperty() forumUsername = jo.StringProperty() mods = jo.ListProperty(jo.ObjectProperty(Mod)) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._mod_map: Dict[int, int] = {m.id: i for i, m in enumerate(self.mods)} def get_mod(self, mod_id: int, reload=False) -> Optional[Mod]: try: mod_idx = self._mod_map[mod_id] mod: Mod = self.mods[mod_idx] if reload: mod = mod.reload() self.mods[mod_idx] = mod return mod except KeyError: return None @classmethod def login(cls, username: str, password: str) -> "User": res = requests.post( f"{API_URL}/login", files=(("username", (None, username)), ("password", (None, password))), timeout=REQUEST_TIMEOUT, ) res.raise_for_status() cls._set_cookies(res.cookies) return cls.get(username) @classmethod def get(cls, username: str) -> "User": return cls._get(cls._url(username=username))
def generate_object_type(words=None): class_name = generate_class_name(words) n_properties = random.choice(range(5)) dct = {} for _ in range(n_properties): words = get_phrase() property_type = random.choice([ jo.ObjectProperty, jo.ListProperty, Ellipsis ]) if property_type is jo.ObjectProperty: property_name = generate_property_name(words) dct[property_name] = jo.ObjectProperty(generate_object_type(words)) elif property_type is jo.ListProperty: property_name = generate_list_name(words) dct[property_name] = jo.ListProperty(generate_list_type(words)) object_type = type(jo.JsonObject)(class_name, (jo.JsonObject,), dct) object_types.append(object_type) return object_type
class FabSettingsConfig(jsonobject.JsonObject): _allow_dynamic_properties = False sudo_user = jsonobject.StringProperty() default_branch = jsonobject.StringProperty() home = jsonobject.StringProperty() project = jsonobject.StringProperty() code_repo = GitUriProperty() timing_log = jsonobject.StringProperty() keepalive = jsonobject.IntegerProperty() ignore_kafka_checkpoint_warning = jsonobject.BooleanProperty() acceptable_maintenance_window = jsonobject.ObjectProperty( lambda: AcceptableMaintenanceWindow) email_enabled = jsonobject.BooleanProperty() tag_deploy_commits = jsonobject.BooleanProperty(default=False) use_shared_dir_for_staticfiles = jsonobject.BooleanProperty(default=False) shared_dir_for_staticfiles = jsonobject.StringProperty(default=None) deploy_event_url = jsonobject.StringProperty(default=None) generate_deploy_diffs = jsonobject.BooleanProperty(default=True) custom_deploy_details = jsonobject.DictProperty() @classmethod def wrap(cls, data): for deprecated_property in ('py3_include_venv', 'py3_run_deploy'): if deprecated_property in data: print("{} {} {}".format( color_notice("The property"), color_code(deprecated_property), color_notice("is deprecated and has no effect."))) print( color_notice( "Feel free to remove it from your fab-settings.yml.")) del data[deprecated_property] obj = super(FabSettingsConfig, cls).wrap(data) if obj.use_shared_dir_for_staticfiles: assert obj.shared_dir_for_staticfiles, \ "Cannot have use_shared_dir_for_staticfiles without shared_dir_for_staticfiles" return obj
class ElasticsearchConfig(jsonobject.JsonObject): _allow_dynamic_properties = False settings = jsonobject.ObjectProperty(lambda: SettingsByIndexConfig)
class AwsConfig(jsonobject.JsonObject): _allow_dynamic_properties = False credential_style = jsonobject.StringProperty(choices=('sso', 'iam'), default='iam') sso_config = jsonobject.ObjectProperty(lambda: SsoConfig, default=None)
class Mod(WithId, ApiObject): _PATH = "/mod/{id}" url = jo.StringProperty(required=True) name = jo.StringProperty(required=True) game = jo.StringProperty(required=True) game_id = jo.IntegerProperty(required=True) author = jo.StringProperty(required=True) shared_authors = jo.ListProperty(jo.StringProperty()) license = jo.StringProperty() website = jo.StringProperty() source_code = jo.StringProperty() short_description = jo.StringProperty() description = jo.StringProperty() downloads = jo.IntegerProperty() followers = jo.IntegerProperty() donations = jo.StringProperty() background = jo.StringProperty() bg_offset_x = jo.IntegerProperty() bg_offset_y = jo.IntegerProperty() default_version_id = jo.IntegerProperty() versions = jo.ListProperty(jo.ObjectProperty(ModVersion)) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._version_by_id: Dict[int, int] = { v.id: i for i, v in enumerate(self.versions) } self._version_by_fv: Dict[VersionBase, int] = { v.version: i for i, v in enumerate(self.versions) } @property def default_version(self) -> Optional[ModVersion]: return self.get_version_by_id(self.default_version_id) def get_version_by_id(self, version_id: int) -> Optional[ModVersion]: try: return self.versions[self._version_by_id[version_id]] except KeyError: return None def get_version(self, version: Optional[VersionBase]) -> Optional[ModVersion]: try: return self.versions[ self._version_by_fv[version]] if version else None except KeyError: return None @classmethod def get(cls, mod_id: int) -> "Mod": return cls._get(cls._url(id=mod_id)) def reload(self) -> "Mod": return self.get(self.id) # pylint: disable=too-many-arguments def update( self, version: str, changelog: str, game_version: str, zipball: StrPath, notify_followers=True, ) -> None: if not self._COOKIES: raise SpacedockError("Updating requires authentication") try: zipball = Path(zipball) res = requests.post( f"{self._url(id=self.id)}/update", cookies=self._COOKIES, files=( ("version", (None, version)), ("changelog", (None, changelog)), ("game-version", (None, game_version)), ("notify-followers", (None, "yes" if notify_followers else "no")), ("zipball", (zipball.name, zipball.read_bytes())), ), timeout=REQUEST_TIMEOUT, ) res.raise_for_status() except Exception as e: raise SpacedockError( f"Unable to update mod {self.name} to {version}", e) from e
class Face(jsonobject.JsonObject): top_prediction = jsonobject.ObjectProperty(Prediction) bounding_box = jsonobject.ObjectProperty(BoundingBox) all_predictions = jsonobject.ObjectProperty(Prediction)
class SimpleSMSAlertSchedule(jsonobject.JsonObject): schedule_type = SIMPLE_SMS_ALERT_SCHEDULE message = jsonobject.DictProperty(six.text_type) extra_options = jsonobject.ObjectProperty(ExtraSchedulingOptions)
class PatientPayload(jsonobject.JsonObject): beneficiary_id = jsonobject.StringProperty(required=True) enikshay_id = jsonobject.StringProperty(required=False) first_name = jsonobject.StringProperty(required=False) last_name = jsonobject.StringProperty(required=False) state_code = jsonobject.StringProperty(required=False) district_code = jsonobject.StringProperty(required=False) tu_code = jsonobject.StringProperty(required=False) phi_code = jsonobject.StringProperty(required=False, exclude_if_none=True) he_code = jsonobject.StringProperty(required=False, exclude_if_none=True) phone_numbers = jsonobject.StringProperty(required=False) merm_params = jsonobject.ObjectProperty(MermParams, required=False, exclude_if_none=True) treatment_start_date = jsonobject.StringProperty(required=False) treatment_supporter_name = jsonobject.StringProperty(required=False) treatment_supporter_phone_number = jsonobject.StringProperty(required=False) @classmethod def create(cls, person_case, episode_case): person_case_properties = person_case.dynamic_case_properties() episode_case_properties = episode_case.dynamic_case_properties() all_properties = episode_case_properties.copy() all_properties.update(person_case_properties) # items set on person trump items set on episode person_locations = get_person_locations(person_case, episode_case) try: locations = dict( state_code=person_locations.sto, district_code=person_locations.dto, tu_code=person_locations.tu, phi_code=person_locations.phi, ) except AttributeError: locations = dict( state_code=person_locations.sto, district_code=person_locations.dto, tu_code=person_locations.tu, he_code=person_locations.pcp, ) refill_reminder_date = episode_case_properties.get(MERM_REFILL_REMINDER_DATE, None) refill_reminder_time = episode_case_properties.get(MERM_REFILL_REMINDER_TIME, None) if refill_reminder_time and refill_reminder_date: refill_reminder_datetime = "{}T{}".format(refill_reminder_date, refill_reminder_time) else: refill_reminder_datetime = None merm_params = MermParams( IMEI=episode_case_properties.get(MERM_ID, None), daily_reminder_status=episode_case_properties.get(MERM_DAILY_REMINDER_STATUS, None), daily_reminder_time=episode_case_properties.get(MERM_DAILY_REMINDER_TIME, None), refill_reminder_status=episode_case_properties.get(MERM_REFILL_REMINDER_STATUS, None), refill_reminder_datetime=refill_reminder_datetime, RT_hours=episode_case_properties.get(MERM_RT_HOURS, None), ) return cls( beneficiary_id=person_case.case_id, enikshay_id=person_case_properties.get(ENIKSHAY_ID, None), first_name=person_case_properties.get(PERSON_FIRST_NAME, None), last_name=person_case_properties.get(PERSON_LAST_NAME, None), phone_numbers=_get_phone_numbers(all_properties), merm_params=merm_params if episode_case_properties.get(MERM_ID, '') != '' else None, treatment_start_date=episode_case_properties.get(TREATMENT_START_DATE, None), treatment_supporter_name=u"{} {}".format( episode_case_properties.get(TREATMENT_SUPPORTER_FIRST_NAME, ''), episode_case_properties.get(TREATMENT_SUPPORTER_LAST_NAME, ''), ), treatment_supporter_phone_number=( _format_number( _parse_number(episode_case_properties.get(TREATMENT_SUPPORTER_PHONE)) ) ), weight_band=episode_case_properties.get(WEIGHT_BAND), address=person_case_properties.get(CURRENT_ADDRESS), sector='private' if person_case_properties.get(ENROLLED_IN_PRIVATE) == 'true' else 'public', **locations )
class PostgresqlConfig(jsonobject.JsonObject): _allow_dynamic_properties = False SEPARATE_SYNCLOGS_DB = jsonobject.BooleanProperty(default=True) SEPARATE_FORM_PROCESSING_DBS = jsonobject.BooleanProperty(default=True) DEFAULT_POSTGRESQL_HOST = jsonobject.StringProperty(default=None) DEFAULT_CONN_MAX_AGE = jsonobject.IntegerProperty(default=None) REPORTING_DATABASES = jsonobject.DictProperty( default=lambda: {"ucr": "ucr"}) LOAD_BALANCED_APPS = jsonobject.DictProperty(default={}) host_settings = jsonobject.DictProperty(lambda: HostSettings) dbs = jsonobject.ObjectProperty(lambda: SmartDBConfig) postgres_override = jsonobject.ObjectProperty(PostgresqlOverride) pgbouncer_override = jsonobject.ObjectProperty(PgbouncerOverride) @classmethod def wrap(cls, data): # for better validation error message PostgresqlOverride.wrap(data.get('postgres_override', {})) PgbouncerOverride.wrap(data.get('pgbouncer_override', {})) self = super(PostgresqlConfig, cls).wrap(data) for db in self.generate_postgresql_dbs(): if not db.user: db.user = DEFAULT_POSTGRESQL_USER if not db.password: db.password = DEFAULT_POSTGRESQL_PASSWORD return self def to_generated_variables(self, environment): data = self.to_json() del data['postgres_override'] del data['pgbouncer_override'] data['postgresql_dbs'] = data.pop('dbs') sorted_dbs = sorted( (db.to_json() for db in self.generate_postgresql_dbs()), key=lambda db: db['name']) data['postgresql_dbs']['all'] = sorted_dbs data.update(self.postgres_override.to_json()) data.update(self.pgbouncer_override.to_json()) # generate list of databases per host for use in pgbouncer and postgresql configuration postgresql_hosts = environment.groups.get('postgresql', []) if self.DEFAULT_POSTGRESQL_HOST not in postgresql_hosts: postgresql_hosts.append(self.DEFAULT_POSTGRESQL_HOST) dbs_by_host = defaultdict(list) for db in sorted_dbs: if db['pgbouncer_host'] in postgresql_hosts: dbs_by_host[db['pgbouncer_host']].append(db) for host in environment.groups.get('pg_standby', []): root_pg_host = self._get_root_pg_host(host, environment) dbs_by_host[host] = dbs_by_host[root_pg_host] data['postgresql_dbs']['by_host'] = dict(dbs_by_host) return data def _get_root_pg_host(self, standby_host, env): vars = env.get_host_vars(standby_host) standby_master = vars.get('hot_standby_master') if not standby_master: raise PGConfigException( '{} has not root pg host'.format(standby_host)) if standby_master in env.groups['postgresql']: return standby_master return self._get_root_pg_host(standby_master, env) def replace_hosts(self, environment): if self.DEFAULT_POSTGRESQL_HOST is None: self.DEFAULT_POSTGRESQL_HOST = environment.groups['postgresql'][0] elif self.DEFAULT_POSTGRESQL_HOST != '127.0.0.1': self.DEFAULT_POSTGRESQL_HOST = environment.translate_host( self.DEFAULT_POSTGRESQL_HOST, environment.paths.postgresql_yml) host_settings = { environment.translate_host(host, environment.paths.postgresql_yml): value for host, value in self.host_settings.items() } for db in self.generate_postgresql_dbs(): if db.host is None: db.host = self.DEFAULT_POSTGRESQL_HOST elif db.host != '127.0.0.1': db.host = environment.translate_host( db.host, environment.paths.postgresql_yml) if db.pgbouncer_host is None: db.pgbouncer_host = db.host else: db.pgbouncer_host = environment.translate_host( db.pgbouncer_host, environment.paths.postgresql_yml) if db.port is None: if db.host in host_settings: db.port = host_settings[db.host].port else: db.port = DEFAULT_PORT if db.conn_max_age is None: db.conn_max_age = self.DEFAULT_CONN_MAX_AGE for entry in self.postgres_override.postgresql_hba_entries: netmask = entry.get('netmask') if netmask and not re.match(r'(\d+\.?){4}', netmask): host, mask = netmask.split('/') host = environment.translate_host( host, environment.paths.postgresql_yml) entry['netmask'] = '{}/{}'.format(host, mask) def generate_postgresql_dbs(self): return filter(None, [ self.dbs.main, self.dbs.synclogs, ] + (self.dbs.form_processing.get_db_list() if self.dbs.form_processing else []) + [self.dbs.ucr, self.dbs.formplayer] + self.dbs.custom + self.dbs.standby) def _check_reporting_databases(self): referenced_django_aliases = set() defined_django_aliases = { db.django_alias for db in self.generate_postgresql_dbs() if db.django_alias is not None } for reporting_alias, value in self.REPORTING_DATABASES.items(): if isinstance(value, six.string_types): referenced_django_aliases.add(value) else: # value is {WRITE: alias, READ: [(alias, weight)...]} referenced_django_aliases.add(value['WRITE']) for alias, _ in value['READ']: referenced_django_aliases.add(alias) assert referenced_django_aliases - defined_django_aliases == set(), \ ("REPORTING_DATABASES must refer only to defined django aliases: {} not in {}" .format(', '.join(sorted(referenced_django_aliases - defined_django_aliases)), ', '.join(sorted(defined_django_aliases)))) def _check_shards(self): if self.dbs.form_processing: validate_shards({ name: db.shards for name, db in self.dbs.form_processing.partitions.items() }) def check(self): self._check_reporting_databases() self._check_shards() assert (self.SEPARATE_SYNCLOGS_DB if self.dbs.synclogs is not None else not self.SEPARATE_SYNCLOGS_DB), \ 'synclogs should be None if and only if SEPARATE_SYNCLOGS_DB is False' assert (self.SEPARATE_FORM_PROCESSING_DBS if self.dbs.form_processing is not None else not self.SEPARATE_FORM_PROCESSING_DBS), \ 'form_processing should be None if and only if SEPARATE_FORM_PROCESSING_DBS is False'
def es_settings_property(): return jsonobject.ObjectProperty(lambda: SettingsConfig)
class Credential(jsonobject.JsonObject): target = jsonobject.StringProperty(required=True) auth = jsonobject.ObjectProperty(lambda: Auth)
class UsersConfig(jsonobject.JsonObject): _allow_dynamic_properties = False dev_users = jsonobject.ObjectProperty(lambda: DevUsers)
class PostgresqlConfig(jsonobject.JsonObject): _allow_dynamic_properties = False SEPARATE_SYNCLOGS_DB = jsonobject.BooleanProperty(default=True) SEPARATE_FORM_PROCESSING_DBS = jsonobject.BooleanProperty(default=True) DEFAULT_POSTGRESQL_HOST = jsonobject.StringProperty(default=None) REPORTING_DATABASES = jsonobject.DictProperty(default=lambda: {"ucr": "ucr"}) LOAD_BALANCED_APPS = jsonobject.DictProperty(default={}) host_settings = jsonobject.DictProperty(lambda: HostSettings) dbs = jsonobject.ObjectProperty(lambda: SmartDBConfig) replications = jsonobject.ListProperty(lambda: LogicalReplicationOptions, required=False) postgres_override = jsonobject.ObjectProperty(PostgresqlOverride) pgbouncer_override = jsonobject.ObjectProperty(PgbouncerOverride) # Mapping of host to list of databases to run pg_repack on pg_repack = jsonobject.DictProperty() @classmethod def wrap(cls, data): # for better validation error message PostgresqlOverride.wrap(data.get('postgres_override', {})) PgbouncerOverride.wrap(data.get('pgbouncer_override', {})) [LogicalReplicationOptions(_data) for _data in data.get('replications', [])] self = super(PostgresqlConfig, cls).wrap(data) for db in self.generate_postgresql_dbs(): if not db.user: db.user = DEFAULT_POSTGRESQL_USER if not db.password: db.password = DEFAULT_POSTGRESQL_PASSWORD return self def to_generated_variables(self, environment): data = self.to_json() del data['postgres_override'] del data['pgbouncer_override'] data['postgresql_dbs'] = data.pop('dbs') sorted_dbs = sorted( (db.to_json() for db in self.generate_postgresql_dbs()), key=lambda db: db['name'] ) data['postgresql_dbs']['all'] = sorted_dbs data.update(self.postgres_override.to_json()) data.update(self.pgbouncer_override.to_json()) # generate list of databases per host for use in pgbouncer and postgresql configuration all_pgbouncer_hosts = environment.groups.get('postgresql', []) if self.DEFAULT_POSTGRESQL_HOST not in all_pgbouncer_hosts: all_pgbouncer_hosts.append(self.DEFAULT_POSTGRESQL_HOST) all_pgbouncer_hosts.extend(environment.groups.get('citusdb_master', [])) all_pgbouncer_hosts.extend(environment.groups.get('pgbouncer', [])) dbs_by_host = defaultdict(list) for db in sorted_dbs: for pgbouncer_host in db['pgbouncer_hosts']: if pgbouncer_host in all_pgbouncer_hosts: dbs_by_host[pgbouncer_host].append(db) for host in environment.groups.get('pg_standby', []): root_pg_host = self._get_root_pg_host(host, environment) dbs_by_host[host] = dbs_by_host[root_pg_host] for host in environment.groups.get('citusdb_worker', []): citusdb_masters = set(environment.groups.get('citusdb_master', [])) pg_standbys = set(environment.groups.get('pg_standby', [])) citusdb_masters = list(citusdb_masters - pg_standbys) if not citusdb_masters: raise PGConfigException('no hosts in the "citusdb_master" group (excluding standbys)') if len(citusdb_masters) > 1: raise PGConfigException('more than one citus master configured (excluding standbys)') citusdb_master = citusdb_masters[0] citus_dbs = [] for db in sorted_dbs: if db['host'] == citusdb_master: db_config = copy.deepcopy(db) db_config['host'] = host db_config['pgbouncer_hosts'] = [host] db_config['pgbouncer_endpoint'] = host citus_dbs.append(db_config) dbs_by_host[host] = citus_dbs data['postgresql_dbs']['by_pgbouncer_host'] = dict(dbs_by_host) return data def _get_root_pg_host(self, standby_host, env): standby_host = env.translate_host(standby_host, env.paths.inventory_source) vars = env.get_host_vars(standby_host) standby_master = vars.get('hot_standby_master') if not standby_master: raise PGConfigException('{} has not root PG host'.format(standby_host)) standby_master = env.translate_host(standby_master, env.paths.inventory_source) potential_masters = env.groups['postgresql'] + env.groups.get('citusdb',[]) if standby_master in potential_masters: return standby_master return self._get_root_pg_host(standby_master, env) def replace_hosts(self, environment): if self.DEFAULT_POSTGRESQL_HOST is None: self.DEFAULT_POSTGRESQL_HOST = environment.groups['postgresql'][0] elif self.DEFAULT_POSTGRESQL_HOST != '127.0.0.1': self.DEFAULT_POSTGRESQL_HOST = environment.translate_host( self.DEFAULT_POSTGRESQL_HOST, environment.paths.postgresql_yml) host_settings = { environment.translate_host(host, environment.paths.postgresql_yml): value for host, value in self.host_settings.items() } all_dbs = self.generate_postgresql_dbs() for db in all_dbs: if db.host is None: db.host = self.DEFAULT_POSTGRESQL_HOST elif db.host != '127.0.0.1': db.host = environment.translate_host(db.host, environment.paths.postgresql_yml) if not db.pgbouncer_hosts: db.pgbouncer_hosts = [db.host] db.pgbouncer_endpoint = db.host else: db.pgbouncer_hosts = [ environment.translate_host(pgbouncer_host, environment.paths.postgresql_yml) for pgbouncer_host in db.pgbouncer_hosts ] db.pgbouncer_endpoint = environment.translate_host( db.pgbouncer_endpoint, environment.paths.postgresql_yml) if db.port is None: if db.host in host_settings: db.port = host_settings[db.host].port else: db.port = DEFAULT_PORT pg_repack = { environment.translate_host(host, environment.paths.postgresql_yml): databases for host, databases in self.pg_repack.items() } self.pg_repack = pg_repack for replication in self.replications: replication.source_host = environment.translate_host(replication.source_host, environment.paths.postgresql_yml) replication.target_host = environment.translate_host(replication.target_host, environment.paths.postgresql_yml) for entry in self.postgres_override.postgresql_hba_entries: netmask = entry.get('netmask') if netmask and not re.match(r'(\d+\.?){4}', netmask): host, mask = netmask.split('/') host = environment.translate_host(host, environment.paths.postgresql_yml) entry['netmask'] = '{}/{}'.format(host, mask) all_dbs_by_alias = {db.django_alias: db for db in all_dbs} for db in self.dbs.standby: if not db.name and db.master in all_dbs_by_alias: db.name = all_dbs_by_alias[db.master].name def generate_postgresql_dbs(self): return [_f for _f in [ self.dbs.main, self.dbs.synclogs, ] + ( self.dbs.form_processing.get_db_list() if self.dbs.form_processing else [] ) + [self.dbs.ucr, self.dbs.formplayer] + self.dbs.custom + self.dbs.standby if _f] def _check_reporting_databases(self): referenced_django_aliases = set() defined_django_aliases = {db.django_alias for db in self.generate_postgresql_dbs() if db.django_alias is not None} for reporting_alias, value in self.REPORTING_DATABASES.items(): if isinstance(value, six.string_types): referenced_django_aliases.add(value) else: # value is {WRITE: alias, READ: [(alias, weight)...]} referenced_django_aliases.add(value['WRITE']) for alias, _ in value['READ']: referenced_django_aliases.add(alias) assert referenced_django_aliases - defined_django_aliases == set(), \ ("REPORTING_DATABASES must refer only to defined django aliases: {} not in {}" .format(', '.join(sorted(referenced_django_aliases - defined_django_aliases)), ', '.join(sorted(defined_django_aliases)))) def _check_shards(self): if self.dbs.form_processing: validate_shards({name: db.shards for name, db in self.dbs.form_processing.partitions.items()}) def _check_standbys(self): if self.dbs.standby: defined_django_aliases = { db.django_alias: db for db in self.generate_postgresql_dbs() if db.django_alias is not None } for db in self.dbs.standby: master_db = defined_django_aliases.get(db.master) assert master_db, \ 'Standby databases reference missing masters: {}'.format(db.master) assert master_db.name == db.name, \ 'Master and standby have different names: {}'.format(db.django_alias) def check(self): self._check_reporting_databases() self._check_shards() self._check_standbys() assert (self.SEPARATE_SYNCLOGS_DB if self.dbs.synclogs is not None else not self.SEPARATE_SYNCLOGS_DB), \ 'synclogs should be None if and only if SEPARATE_SYNCLOGS_DB is False' assert (self.SEPARATE_FORM_PROCESSING_DBS if self.dbs.form_processing is not None else not self.SEPARATE_FORM_PROCESSING_DBS), \ 'form_processing should be None if and only if SEPARATE_FORM_PROCESSING_DBS is False'
class PostgresqlConfig(jsonobject.JsonObject): _allow_dynamic_properties = False SEPARATE_SYNCLOGS_DB = jsonobject.BooleanProperty(default=True) SEPARATE_FORM_PROCESSING_DBS = jsonobject.BooleanProperty(default=True) DEFAULT_POSTGRESQL_HOST = jsonobject.StringProperty(default=None) DEFAULT_POSTGRESQL_USER = jsonobject.StringProperty( default="{{ secrets.POSTGRES_USERS.commcare.username }}") DEFAULT_POSTGRESQL_PASSWORD = jsonobject.StringProperty( default="{{ secrets.POSTGRES_USERS.commcare.password }}") REPORTING_DATABASES = jsonobject.DictProperty( default=lambda: {"ucr": "ucr"}) LOAD_BALANCED_APPS = jsonobject.DictProperty(default={}) dbs = jsonobject.ObjectProperty(lambda: SmartDBConfig) override = jsonobject.ObjectProperty(PostgresqlOverride) @classmethod def wrap(cls, data): # for better validation error message PostgresqlOverride.wrap(data.get('override', {})) self = super(PostgresqlConfig, cls).wrap(data) for db in self.generate_postgresql_dbs(): if not db.user: db.user = self.DEFAULT_POSTGRESQL_USER if not db.password: db.password = self.DEFAULT_POSTGRESQL_PASSWORD return self def to_generated_variables(self): data = self.to_json() del data['dbs'] del data['override'] data['postgresql_dbs'] = sorted( (db.to_json() for db in self.generate_postgresql_dbs()), key=lambda db: db['name']) data.update(self.override.to_json()) return data def replace_hosts(self, environment): if self.DEFAULT_POSTGRESQL_HOST is None: self.DEFAULT_POSTGRESQL_HOST = environment.groups['postgresql'][0] elif self.DEFAULT_POSTGRESQL_HOST != '127.0.0.1': self.DEFAULT_POSTGRESQL_HOST = environment.translate_host( self.DEFAULT_POSTGRESQL_HOST, environment.paths.postgresql_yml) for db in self.generate_postgresql_dbs(): if db.host is None: db.host = self.DEFAULT_POSTGRESQL_HOST elif db.host != '127.0.0.1': db.host = environment.translate_host( db.host, environment.paths.postgresql_yml) def generate_postgresql_dbs(self): return filter(None, [ self.dbs.main, self.dbs.synclogs, ] + (self.dbs.form_processing.get_db_list() if self.dbs.form_processing else []) + [self.dbs.ucr, self.dbs.formplayer] + self.dbs.custom + self.dbs.standby) def _check_reporting_databases(self): referenced_django_aliases = set() defined_django_aliases = { db.django_alias for db in self.generate_postgresql_dbs() if db.django_alias is not None } for reporting_alias, value in self.REPORTING_DATABASES.items(): if isinstance(value, six.string_types): referenced_django_aliases.add(value) else: # value is {WRITE: alias, READ: [(alias, weight)...]} referenced_django_aliases.add(value['WRITE']) for alias, _ in value['READ']: referenced_django_aliases.add(alias) assert referenced_django_aliases - defined_django_aliases == set(), \ ("REPORTING_DATABASES must refer only to defined django aliases: {} not in {}" .format(', '.join(sorted(referenced_django_aliases - defined_django_aliases)), ', '.join(sorted(defined_django_aliases)))) def _check_shards(self): if self.dbs.form_processing: validate_shards({ name: db.shards for name, db in self.dbs.form_processing.partitions.items() }) def check(self): self._check_reporting_databases() self._check_shards() assert (self.SEPARATE_SYNCLOGS_DB if self.dbs.synclogs is not None else not self.SEPARATE_SYNCLOGS_DB), \ 'synclogs should be None if and only if SEPARATE_SYNCLOGS_DB is False' assert (self.SEPARATE_FORM_PROCESSING_DBS if self.dbs.form_processing is not None else not self.SEPARATE_FORM_PROCESSING_DBS), \ 'form_processing should be None if and only if SEPARATE_FORM_PROCESSING_DBS is False'