Esempio n. 1
0
class AppProcessesConfig(jsonobject.JsonObject):
    _allow_dynamic_properties = False
    environment = jsonobject.StringProperty()
    django_bind = IpAddressProperty()
    django_port = PortProperty()
    flower_port = PortProperty()
    gunicorn_workers_factor = jsonobject.IntegerProperty()
    gunicorn_workers_static_factor = jsonobject.IntegerProperty()
    jython_memory = MemorySpecProperty()
    formplayer_memory = MemorySpecProperty()
    http_proxy = IpAddressAndPortProperty()
    newrelic_javaagent = jsonobject.BooleanProperty()
    additional_no_proxy_hosts = CommaSeparatedStrings()

    service_blacklist = jsonobject.ListProperty(unicode)
    celery_processes = jsonobject.DictProperty(
        jsonobject.DictProperty(CeleryOptions))
    pillows = jsonobject.DictProperty(jsonobject.DictProperty())

    def check(self):
        validate_app_processes_config(self)

    def check_and_translate_hosts(self, environment):
        self.celery_processes = check_and_translate_hosts(
            environment, self.celery_processes)
        self.pillows = check_and_translate_hosts(environment, self.pillows)
Esempio n. 2
0
class AppProcessesConfig(jsonobject.JsonObject):
    _allow_dynamic_properties = False
    django_bind = IpAddressProperty()
    django_port = PortProperty()
    flower_port = PortProperty()
    gunicorn_workers_factor = jsonobject.IntegerProperty()
    gunicorn_workers_static_factor = jsonobject.IntegerProperty()
    formplayer_memory = MemorySpecProperty()
    http_proxy = IpAddressAndPortProperty()
    newrelic_djangoagent = jsonobject.BooleanProperty()
    newrelic_javaagent = jsonobject.BooleanProperty()
    django_command_prefix = jsonobject.StringProperty()
    celery_command_prefix = jsonobject.StringProperty()
    datadog_pythonagent = jsonobject.BooleanProperty()
    additional_no_proxy_hosts = CommaSeparatedStrings()

    service_blacklist = jsonobject.ListProperty(unicode)
    celery_processes = jsonobject.DictProperty(
        jsonobject.DictProperty(CeleryOptions))
    pillows = jsonobject.DictProperty(jsonobject.DictProperty())

    def check(self):
        validate_app_processes_config(self)

    def check_and_translate_hosts(self, environment):
        self.celery_processes = check_and_translate_hosts(
            environment, self.celery_processes)
        self.pillows = check_and_translate_hosts(environment, self.pillows)
        _validate_all_required_machines_mentioned(environment, self)

    def get_celery_heartbeat_thresholds(self):
        celery_queues = set()
        for host, celery_options in self.celery_processes.items():
            if host == 'None':
                continue
            for process_group in celery_options.keys():
                celery_queues.update(process_group.split(','))

        return {
            p.name: p.blockage_threshold
            for p in CELERY_PROCESSES if p.is_queue and p.name in celery_queues
        }

    def to_generated_variables(self):
        flower_host, = [
            machine
            for machine, queues_config in self.celery_processes.items()
            if 'flower' in queues_config
        ]
        return {
            'CELERY_FLOWER_URL':
            "http://{flower_host}:5555".format(flower_host=flower_host),
            'app_processes_config':
            self.to_json(),
            'celery_queues':
            CELERY_PROCESS_NAMES,
            'CELERY_HEARTBEAT_THRESHOLDS':
            self.get_celery_heartbeat_thresholds()
        }
class ExtraSchedulingOptions(jsonobject.JsonObject):
    active = jsonobject.BooleanProperty()
    include_descendant_locations = jsonobject.BooleanProperty()
    default_language_code = jsonobject.StringProperty()
    custom_metadata = jsonobject.DictProperty(six.text_type)
    use_utc_as_default_timezone = jsonobject.BooleanProperty()
    user_data_filter = jsonobject.DictProperty(jsonobject.ListProperty(six.text_type))
    stop_date_case_property_name = jsonobject.StringProperty()
Esempio n. 4
0
class SampleSchema(jsonobject.JsonObject):
    stringfield = jsonobject.StringProperty()
    intfield = jsonobject.IntegerProperty()
    dictfield = jsonobject.DictProperty()
    arrayfield = jsonobject.ListProperty()
    documentarrayfield = jsonobject.ListProperty(item_type=SubSchema)
    documentfield = jsonobject.DictProperty(item_type=SubSchema)
    datefield = jsonobject.DateProperty()
    datetimefield = jsonobject.DateTimeProperty()
Esempio n. 5
0
class FormMetadata(JsonObject):
    user_id = jsonobject.StringProperty()
    received_on = jsonobject.DateTimeProperty()
    app_id = jsonobject.StringProperty()
    build_id = jsonobject.StringProperty()
    attachments = jsonobject.ListProperty(str)
    auth_context = jsonobject.DictProperty()
Esempio n. 6
0
class Recipe(Base):
    label = jsonobject.StringProperty(required=True)
    id = jsonobject.StringProperty(validators=_id_prefix_valid)
    options = jsonobject.ListProperty(unicode)
    custom_options = jsonobject.ListProperty(RecipeOption)
    match = jsonobject.DictProperty(validators=validate_filter)
    filters = jsonobject.ListProperty(dict, validators=validate_filters)
class BranchConfig(jsonobject.JsonObject):
    trunk = jsonobject.StringProperty()
    name = jsonobject.StringProperty()
    branches = jsonobject.ListProperty(str)
    submodules = jsonobject.DictProperty(lambda: BranchConfig)
    pull_requests = jsonobject.ListProperty(str)

    def normalize(self):
        for submodule, subconfig in self.submodules.items():
            subconfig.trunk = subconfig.trunk or self.trunk
            subconfig.name = subconfig.name or self.name
            subconfig.normalize()

    def span_configs(self, path=('.', )):
        for submodule, subconfig in self.submodules.items():
            for item in subconfig.span_configs(path + (submodule, )):
                yield item
        yield os.path.join(*path), self

    def check_trunk_is_recent(self):
        # if it doesn't match our tag format
        if re.match(r'[\d-]+_[\d\.]+-\w+-deploy', self.trunk) is None:
            return True

        return self.trunk in git_recent_tags()
Esempio n. 8
0
class RdsInstanceConfig(jsonobject.JsonObject):
    _allow_dynamic_properties = False
    identifier = jsonobject.StringProperty(required=True)
    engine_version = jsonobject.StringProperty(default='9.6.6')
    instance_type = jsonobject.StringProperty(
        required=True)  # should start with 'db.'
    multi_az = jsonobject.BooleanProperty(default=False)
    storage = jsonobject.IntegerProperty(required=True)
    max_storage = jsonobject.IntegerProperty(default=0)
    create = jsonobject.BooleanProperty(default=True)
    username = "******"
    backup_window = "06:27-06:57"
    backup_retention = 30
    maintenance_window = "sat:08:27-sat:08:57"
    port = 5432
    params = jsonobject.DictProperty()

    _default_params = {
        'pg_stat_statements.track': 'all',
        'pg_stat_statements.max': 10000,
        'track_activity_query_size': 2048,
    }

    @classmethod
    def wrap(cls, data):
        if 'params' not in data:
            data['params'] = {}
        params = data['params']
        for name, value in cls._default_params.items():
            if name not in params:
                params[name] = value
        return super(RdsInstanceConfig, cls).wrap(data)
Esempio n. 9
0
class DBOptions(jsonobject.JsonObject):
    _allow_dynamic_properties = False

    name = jsonobject.StringProperty(required=True)
    host = jsonobject.StringProperty()
    pgbouncer_hosts = jsonobject.ListProperty(str)
    pgbouncer_endpoint = jsonobject.StringProperty(default=None)
    port = jsonobject.IntegerProperty(default=None)
    user = jsonobject.StringProperty()
    password = jsonobject.StringProperty()
    options = jsonobject.DictProperty(six.text_type)
    django_alias = jsonobject.StringProperty()
    django_migrate = jsonobject.BooleanProperty(default=True)
    query_stats = jsonobject.BooleanProperty(default=False)
    create = jsonobject.BooleanProperty(default=True)

    # config values to be set at the database level
    pg_config = jsonobject.ListProperty(lambda: PGConfigItem)

    @classmethod
    def wrap(cls, data):
        if 'pgbouncer_host' in data:
            assert 'pgbouncer_hosts' not in data and 'pgbouncer_endpoint' not in data
            pgbouncer_host = data.pop('pgbouncer_host')
            data['pgbouncer_hosts'] = [pgbouncer_host]
            data['pgbouncer_endpoint'] = pgbouncer_host

        return super(DBOptions, cls).wrap(data)
Esempio n. 10
0
class PrometheusConfig(jsonobject.JsonObject):
    prometheus_monitoring_enabled = jsonobject.BooleanProperty(required=True)
    grafana_security = jsonobject.DictProperty()

    def to_generated_variables(self):
        variables = self.to_json()
        return variables
Esempio n. 11
0
class SimInfo(jo.JsonObject):
    id = jo.StringProperty(required=True)
    name = jo.StringProperty(required=True)
    line_style = jo.DictProperty()
    # Mitigation group
    group = jo.StringProperty()
    # Ignored property
    sim = IgnoredProperty()
class AwsConfig(StrictJsonObject):
    pem = jsonobject.StringProperty()
    ami = jsonobject.StringProperty()
    type = jsonobject.StringProperty()
    key_name = jsonobject.StringProperty()
    security_group_id = jsonobject.StringProperty()
    subnet = jsonobject.StringProperty()
    data_volume = jsonobject.DictProperty(exclude_if_none=True)
    boot_volume = jsonobject.DictProperty(exclude_if_none=True)

    @classmethod
    def wrap(cls, data):
        if 'boot_volume' in data:
            assert data['boot_volume']['DeviceName'] == '/dev/sda1', (
                "AWS EC2 instances always use /dev/sda1 as the boot volume, "
                "so please set your spec's boot_volume.DeviceName to '/dev/sda1'."
            )
        return super(AwsConfig, cls).wrap(data)
class SimpleSMSDailyScheduleWithTime(jsonobject.JsonObject):
    schedule_type = SIMPLE_SMS_DAILY_SCHEDULE_WITH_TIME
    time = jsonobject.TimeProperty()
    message = jsonobject.DictProperty(six.text_type)
    total_iterations = jsonobject.IntegerProperty()
    start_offset = jsonobject.IntegerProperty()
    start_day_of_week = jsonobject.IntegerProperty()
    extra_options = jsonobject.ObjectProperty(ExtraSchedulingOptions)
    repeat_every = jsonobject.IntegerProperty()
Esempio n. 14
0
class BaseJsonCaseChange(jsonobject.JsonObject):
    case_name = jsonobject.StringProperty()
    case_type = jsonobject.StringProperty(name='@case_type')
    external_id = jsonobject.StringProperty()
    user_id = jsonobject.StringProperty(required=True)
    owner_id = jsonobject.StringProperty(name='@owner_id')
    properties = jsonobject.DictProperty(validators=[is_simple_dict], default={})
    indices = jsonobject.DictProperty(JsonIndex)
    _is_case_creation = False

    _allow_dynamic_properties = False

    class Meta(object):
        # prevent JsonObject from auto-converting dates etc.
        string_conversions = ()

    @classmethod
    def wrap(self, obj):
        for attr, _ in obj.items():
            if attr not in self._properties_by_key:
                # JsonObject will raise an exception here anyways, but we need
                # a user-friendly error message
                raise BadValueError(f"'{attr}' is not a valid field.")
        return super().wrap(obj)

    def get_caseblock(self):

        def _if_specified(value):
            return value if value is not None else CaseBlock.undefined

        return CaseBlock(
            case_id=self.get_case_id(),
            user_id=self.user_id,
            case_type=_if_specified(self.case_type),
            case_name=_if_specified(self.case_name),
            external_id=_if_specified(self.external_id),
            owner_id=_if_specified(self.owner_id),
            create=self._is_case_creation,
            update=dict(self.properties),
            index={
                name: IndexAttrs(index.case_type, index.case_id, index.relationship)
                for name, index in self.indices.items()
            },
        ).as_text()
Esempio n. 15
0
class MetaConfig(jsonobject.JsonObject):
    _allow_dynamic_properties = False
    deploy_env = jsonobject.StringProperty(required=True)
    always_deploy_formplayer = jsonobject.BooleanProperty(default=False)
    env_monitoring_id = jsonobject.StringProperty(required=True)
    users = jsonobject.ListProperty(unicode, required=True)
    slack_alerts_channel = jsonobject.StringProperty()
    bare_non_cchq_environment = jsonobject.BooleanProperty(default=False)
    git_repositories = jsonobject.ListProperty(GitRepository)
    deploy_keys = jsonobject.DictProperty(unicode)
Esempio n. 16
0
class AppProcessesConfig(jsonobject.JsonObject):
    _allow_dynamic_properties = False
    django_bind = IpAddressProperty()
    django_port = PortProperty()
    flower_port = PortProperty()
    gunicorn_workers_factor = jsonobject.IntegerProperty()
    gunicorn_workers_static_factor = jsonobject.IntegerProperty()
    jython_memory = MemorySpecProperty()
    formplayer_memory = MemorySpecProperty()
    http_proxy = IpAddressAndPortProperty()
    newrelic_djangoagent = jsonobject.BooleanProperty()
    newrelic_javaagent = jsonobject.BooleanProperty()
    django_command_prefix = jsonobject.StringProperty()
    datadog_pythonagent = jsonobject.BooleanProperty()
    additional_no_proxy_hosts = CommaSeparatedStrings()

    service_blacklist = jsonobject.ListProperty(unicode)
    celery_processes = jsonobject.DictProperty(
        jsonobject.DictProperty(CeleryOptions))
    pillows = jsonobject.DictProperty(jsonobject.DictProperty())

    def check(self):
        validate_app_processes_config(self)

    def check_and_translate_hosts(self, environment):
        self.celery_processes = check_and_translate_hosts(
            environment, self.celery_processes)
        self.pillows = check_and_translate_hosts(environment, self.pillows)
        _validate_all_required_machines_mentioned(environment, self)

    def to_generated_variables(self):
        flower_host, = [
            machine
            for machine, queues_config in self.celery_processes.items()
            if 'flower' in queues_config
        ]
        return {
            'CELERY_FLOWER_URL':
            "http://{flower_host}:5555".format(flower_host=flower_host),
            'app_processes_config':
            self.to_json(),
        }
class Inventory(StrictJsonObject):
    """
    This is an internal representation of the info we'll put in an ansible inventory file

    It's not structured the same way ansible inventory files are,
    because conceptually we treat host "groups" (just a way to name individual hosts)
    differently from "actual" groups (which we use to define roles).

    """
    all_hosts = jsonobject.ListProperty(lambda: Host)
    all_groups = jsonobject.DictProperty(lambda: Group)
class DBOptions(jsonobject.JsonObject):
    _allow_dynamic_properties = False

    name = jsonobject.StringProperty(required=True)
    host = jsonobject.StringProperty()
    port = jsonobject.IntegerProperty(default=6432)
    user = jsonobject.StringProperty()
    password = jsonobject.StringProperty()
    options = jsonobject.DictProperty(unicode)
    django_alias = jsonobject.StringProperty()
    django_migrate = jsonobject.BooleanProperty(default=True)
    query_stats = jsonobject.BooleanProperty(default=False)
    create = jsonobject.BooleanProperty(default=True)
Esempio n. 19
0
def jsl_field_to_jsonobject_property(
        prop: jsl.BaseField) -> jsonobject.JsonProperty:
    if isinstance(prop, jsl.DateTimeField):
        return jsonobject.DateTimeProperty(name=prop.name,
                                           required=prop.required)
    if isinstance(prop, jsl.StringField):
        return jsonobject.StringProperty(name=prop.name,
                                         required=prop.required)
    if isinstance(prop, jsl.IntField):
        return jsonobject.IntegerProperty(name=prop.name,
                                          required=prop.required)
    if isinstance(prop, jsl.DictField):
        return jsonobject.DictProperty(name=prop.name, required=prop.required)
    if isinstance(prop, jsl.NumberField):
        return jsonobject.FloatProperty(name=prop.name, required=prop.required)
    if isinstance(prop, jsl.BooleanField):
        return jsonobject.BooleanProperty(name=prop.name,
                                          required=prop.required)
    if isinstance(prop, jsl.DocumentField):
        if prop.document_cls:
            subtype = jsl_to_jsonobject(prop.document_cls)
            return jsonobject.DictProperty(name=prop.name,
                                           item_type=subtype,
                                           required=prop.required)
        return jsonobject.DictProperty(name=prop.name, required=prop.required)
    if isinstance(prop, jsl.ArrayField):
        if prop.items:
            if isinstance(prop.items, jsl.DocumentField):
                subtype = jsl_to_jsonobject(prop.items.document_cls)
            elif isinstance(prop.items, jsl.BaseField):
                subtype = jsl_field_to_jsonobject_property(prop.items)
            else:
                raise KeyError(prop.items)
            return jsonobject.ListProperty(item_type=subtype,
                                           required=prop.required)
        return jsonobject.ListProperty(name=prop.name, required=prop.required)

    raise KeyError(prop)
Esempio n. 20
0
class ShardDetails(jsonobject.JsonObject):
    node = jsonobject.StringProperty()
    db_name = jsonobject.StringProperty()

    # shards/c0000000-dfffffff/commcarehq.1541009837
    shard_name = jsonobject.StringProperty()
    engine = jsonobject.StringProperty()
    doc_count = jsonobject.IntegerProperty()
    doc_del_count = jsonobject.IntegerProperty()
    purge_seq = jsonobject.IntegerProperty()
    compact_running = jsonobject.BooleanProperty()
    sizes = jsonobject.DictProperty()
    disk_size = jsonobject.IntegerProperty()
    data_size = jsonobject.IntegerProperty()
    other = jsonobject.DictProperty()
    instance_start_time = jsonobject.StringProperty()
    disk_format_version = jsonobject.IntegerProperty()
    committed_update_seq = jsonobject.IntegerProperty()
    compacted_seq = jsonobject.IntegerProperty()
    uuid = jsonobject.StringProperty()

    @property
    def shard_name_short(self):
        return self.shard_name.split('/')[1]
Esempio n. 21
0
class FormProcessingConfig(jsonobject.JsonObject):
    _allow_dynamic_properties = False
    proxy = jsonobject.ObjectProperty(lambda: FormProcessingProxyDBOptions, required=True)
    partitions = jsonobject.DictProperty(lambda: StrictPartitionDBOptions, required=True)

    @classmethod
    def wrap(cls, data):
        for i, (django_alias, db) in enumerate(data['partitions'].items()):
            db['django_alias'] = db.get('django_alias', django_alias)
            db['name'] = db.get('name', 'commcarehq_{}'.format(db['django_alias']))
        self = super(FormProcessingConfig, cls).wrap(data)
        return self

    def get_db_list(self):
        return [self.proxy] + sorted(self.partitions.values(),
                                     key=lambda db: alphanum_key(db.django_alias))
Esempio n. 22
0
class DBOptions(jsonobject.JsonObject):
    _allow_dynamic_properties = False

    name = jsonobject.StringProperty(required=True)
    host = jsonobject.StringProperty()
    pgbouncer_host = jsonobject.StringProperty(default=None)
    port = jsonobject.IntegerProperty(default=None)
    user = jsonobject.StringProperty()
    password = jsonobject.StringProperty()
    options = jsonobject.DictProperty(unicode)
    django_alias = jsonobject.StringProperty()
    django_migrate = jsonobject.BooleanProperty(default=True)
    query_stats = jsonobject.BooleanProperty(default=False)
    create = jsonobject.BooleanProperty(default=True)

    # config values to be set at the database level
    pg_config = jsonobject.ListProperty(lambda: PGConfigItem)
Esempio n. 23
0
class BranchConfig(jsonobject.JsonObject):
    trunk = jsonobject.StringProperty()
    name = jsonobject.StringProperty()
    branches = jsonobject.ListProperty(unicode)
    submodules = jsonobject.DictProperty(lambda: BranchConfig)

    def normalize(self):
        for submodule, subconfig in self.submodules.items():
            subconfig.trunk = subconfig.trunk or self.trunk
            subconfig.name = subconfig.name or self.name
            subconfig.normalize()

    def span_configs(self, path=('.', )):
        for submodule, subconfig in self.submodules.items():
            for item in subconfig.span_configs(path + (submodule, )):
                yield item
        yield os.path.join(*path), self
Esempio n. 24
0
class MetaConfig(jsonobject.JsonObject):
    _allow_dynamic_properties = False
    deploy_env = jsonobject.StringProperty(required=True)
    always_deploy_formplayer = jsonobject.BooleanProperty(default=False)
    env_monitoring_id = jsonobject.StringProperty(required=True)
    users = jsonobject.ListProperty(six.text_type, required=True)
    slack_alerts_channel = jsonobject.StringProperty()
    bare_non_cchq_environment = jsonobject.BooleanProperty(default=False)
    git_repositories = jsonobject.ListProperty(GitRepository)
    deploy_keys = jsonobject.DictProperty(six.text_type)
    secrets_backend = jsonobject.StringProperty(
        choices=list(all_secrets_backends_by_name),
        default='ansible-vault',
    )

    def get_secrets_backend_class(self):
        # guaranteed to succeed because of the validation above on secrets_backend
        return all_secrets_backends_by_name[self.secrets_backend]
class Spec(StrictJsonObject):
    """
    Parser for spec files

    These files declare how many machines should be allocated for each role.
    See specs/example_spec.yml for an example.

    """
    aws_config = jsonobject.ObjectProperty(lambda: AwsConfig)
    allocations = jsonobject.DictProperty(lambda: Allocation)

    @classmethod
    def wrap(cls, obj):
        allocations = {
            key: {'count': value} if isinstance(value, int) else value
            for key, value in obj.get('allocations', {}).items()
        }
        obj['allocations'] = allocations
        return super(Spec, cls).wrap(obj)
Esempio n. 26
0
class FabSettingsConfig(jsonobject.JsonObject):
    _allow_dynamic_properties = False
    sudo_user = jsonobject.StringProperty()
    default_branch = jsonobject.StringProperty()
    home = jsonobject.StringProperty()
    project = jsonobject.StringProperty()
    code_repo = GitUriProperty()
    timing_log = jsonobject.StringProperty()
    keepalive = jsonobject.IntegerProperty()
    ignore_kafka_checkpoint_warning = jsonobject.BooleanProperty()
    acceptable_maintenance_window = jsonobject.ObjectProperty(
        lambda: AcceptableMaintenanceWindow)
    email_enabled = jsonobject.BooleanProperty()
    tag_deploy_commits = jsonobject.BooleanProperty(default=False)
    use_shared_dir_for_staticfiles = jsonobject.BooleanProperty(default=False)
    shared_dir_for_staticfiles = jsonobject.StringProperty(default=None)
    deploy_event_url = jsonobject.StringProperty(default=None)
    generate_deploy_diffs = jsonobject.BooleanProperty(default=True)
    custom_deploy_details = jsonobject.DictProperty()

    @classmethod
    def wrap(cls, data):
        for deprecated_property in ('py3_include_venv', 'py3_run_deploy'):
            if deprecated_property in data:
                print("{} {} {}".format(
                    color_notice("The property"),
                    color_code(deprecated_property),
                    color_notice("is deprecated and has no effect.")))
                print(
                    color_notice(
                        "Feel free to remove it from your fab-settings.yml."))
                del data[deprecated_property]

        obj = super(FabSettingsConfig, cls).wrap(data)
        if obj.use_shared_dir_for_staticfiles:
            assert obj.shared_dir_for_staticfiles, \
                "Cannot have use_shared_dir_for_staticfiles without shared_dir_for_staticfiles"
        return obj
class SimpleSMSAlertSchedule(jsonobject.JsonObject):
    schedule_type = SIMPLE_SMS_ALERT_SCHEDULE
    message = jsonobject.DictProperty(six.text_type)
    extra_options = jsonobject.ObjectProperty(ExtraSchedulingOptions)
Esempio n. 28
0
class DocumentBase(DocumentSchema):

    _id = jsonobject.StringProperty(exclude_if_none=True)
    _rev = jsonobject.StringProperty(exclude_if_none=True)
    _attachments = jsonobject.DictProperty(exclude_if_none=True, default=None)

    _db = None

    # The rest of this class is mostly copied from couchdbkit 0.5.7

    @classmethod
    def set_db(cls, db):
        """ Set document db"""
        cls._db = db

    @classmethod
    def get_db(cls):
        """ get document db"""
        db = getattr(cls, '_db', None)
        if db is None:
            raise TypeError("doc database required to save document")
        return db

    def save(self, **params):
        """ Save document in database.

        @params db: couchdbkit.core.Database instance
        """
        self.validate()
        db = self.get_db()

        doc = self.to_json()
        db.save_doc(doc, **params)
        if '_id' in doc and '_rev' in doc:
            self._doc.update(doc)
        elif '_id' in doc:
            self._doc.update({'_id': doc['_id']})

    store = save

    @classmethod
    def save_docs(cls, docs, use_uuids=True):
        """ Save multiple documents in database.

        @params docs: list of couchdbkit.schema.Document instance
        @param use_uuids: add _id in doc who don't have it already set.
        """
        db = cls.get_db()
        if any(doc._doc_type != cls._doc_type for doc in docs):
            raise ValueError(
                "one of your documents does not have the correct type")
        db.bulk_save(docs, use_uuids=use_uuids)

    bulk_save = save_docs

    @classmethod
    def delete_docs(cls, docs, empty_on_delete=False):
        """ Bulk delete documents in a database

        @params docs: list of couchdbkit.schema.Document instance
        @param empty_on_delete: default is False if you want to make
        sure the doc is emptied and will not be stored as is in Apache
        CouchDB.
        """
        db = cls.get_db()
        if any(doc._doc_type != cls._doc_type for doc in docs):
            raise ValueError(
                "one of your documents does not have the correct type")
        db.bulk_delete(docs, empty_on_delete=empty_on_delete)

    bulk_delete = delete_docs

    @classmethod
    def get(cls, docid, rev=None, db=None, dynamic_properties=True):
        """ get document with `docid`
        """
        if db is None:
            db = cls.get_db()
        cls._allow_dynamic_properties = dynamic_properties
        return db.get(docid, rev=rev, wrapper=cls.wrap)

    @classmethod
    def get_or_create(cls,
                      docid=None,
                      db=None,
                      dynamic_properties=True,
                      **params):
        """ get  or create document with `docid` """

        if db is not None:
            cls.set_db(db)
        cls._allow_dynamic_properties = dynamic_properties
        db = cls.get_db()

        if docid is None:
            obj = cls()
            obj.save(**params)
            return obj

        rev = params.pop('rev', None)

        try:
            return db.get(docid, rev=rev, wrapper=cls.wrap, **params)
        except ResourceNotFound:
            obj = cls()
            obj._id = docid
            obj.save(**params)
            return obj

    new_document = property(lambda self: self._doc.get('_rev') is None)

    def delete(self):
        """ Delete document from the database.
        @params db: couchdbkit.core.Database instance
        """
        if self.new_document:
            raise TypeError("the document is not saved")

        db = self.get_db()

        # delete doc
        db.delete_doc(self._id)

        # reinit document
        del self._doc['_id']
        del self._doc['_rev']
Esempio n. 29
0
class PostgresqlConfig(jsonobject.JsonObject):
    _allow_dynamic_properties = False

    SEPARATE_SYNCLOGS_DB = jsonobject.BooleanProperty(default=True)
    SEPARATE_FORM_PROCESSING_DBS = jsonobject.BooleanProperty(default=True)
    DEFAULT_POSTGRESQL_HOST = jsonobject.StringProperty(default=None)
    REPORTING_DATABASES = jsonobject.DictProperty(default=lambda: {"ucr": "ucr"})
    LOAD_BALANCED_APPS = jsonobject.DictProperty(default={})
    host_settings = jsonobject.DictProperty(lambda: HostSettings)
    dbs = jsonobject.ObjectProperty(lambda: SmartDBConfig)
    replications = jsonobject.ListProperty(lambda: LogicalReplicationOptions, required=False)

    postgres_override = jsonobject.ObjectProperty(PostgresqlOverride)
    pgbouncer_override = jsonobject.ObjectProperty(PgbouncerOverride)

    # Mapping of host to list of databases to run pg_repack on
    pg_repack = jsonobject.DictProperty()

    @classmethod
    def wrap(cls, data):
        # for better validation error message
        PostgresqlOverride.wrap(data.get('postgres_override', {}))
        PgbouncerOverride.wrap(data.get('pgbouncer_override', {}))
        [LogicalReplicationOptions(_data) for _data in data.get('replications', [])]
        self = super(PostgresqlConfig, cls).wrap(data)
        for db in self.generate_postgresql_dbs():
            if not db.user:
                db.user = DEFAULT_POSTGRESQL_USER
            if not db.password:
                db.password = DEFAULT_POSTGRESQL_PASSWORD
        return self

    def to_generated_variables(self, environment):
        data = self.to_json()
        del data['postgres_override']
        del data['pgbouncer_override']
        data['postgresql_dbs'] = data.pop('dbs')

        sorted_dbs = sorted(
            (db.to_json() for db in self.generate_postgresql_dbs()),
            key=lambda db: db['name']
        )
        data['postgresql_dbs']['all'] = sorted_dbs
        data.update(self.postgres_override.to_json())
        data.update(self.pgbouncer_override.to_json())

        # generate list of databases per host for use in pgbouncer and postgresql configuration
        all_pgbouncer_hosts = environment.groups.get('postgresql', [])
        if self.DEFAULT_POSTGRESQL_HOST not in all_pgbouncer_hosts:
            all_pgbouncer_hosts.append(self.DEFAULT_POSTGRESQL_HOST)
        all_pgbouncer_hosts.extend(environment.groups.get('citusdb_master', []))
        all_pgbouncer_hosts.extend(environment.groups.get('pgbouncer', []))

        dbs_by_host = defaultdict(list)
        for db in sorted_dbs:
            for pgbouncer_host in db['pgbouncer_hosts']:
                if pgbouncer_host in all_pgbouncer_hosts:
                    dbs_by_host[pgbouncer_host].append(db)

        for host in environment.groups.get('pg_standby', []):
            root_pg_host = self._get_root_pg_host(host, environment)
            dbs_by_host[host] = dbs_by_host[root_pg_host]

        for host in environment.groups.get('citusdb_worker', []):
            citusdb_masters = set(environment.groups.get('citusdb_master', []))
            pg_standbys = set(environment.groups.get('pg_standby', []))
            citusdb_masters = list(citusdb_masters - pg_standbys)
            if not citusdb_masters:
                raise PGConfigException('no hosts in the "citusdb_master" group (excluding standbys)')
            if len(citusdb_masters) > 1:
                raise PGConfigException('more than one citus master configured (excluding standbys)')

            citusdb_master = citusdb_masters[0]
            citus_dbs = []
            for db in sorted_dbs:
                if db['host'] == citusdb_master:
                    db_config = copy.deepcopy(db)
                    db_config['host'] = host
                    db_config['pgbouncer_hosts'] = [host]
                    db_config['pgbouncer_endpoint'] = host
                    citus_dbs.append(db_config)

            dbs_by_host[host] = citus_dbs

        data['postgresql_dbs']['by_pgbouncer_host'] = dict(dbs_by_host)
        return data

    def _get_root_pg_host(self, standby_host, env):
        standby_host = env.translate_host(standby_host, env.paths.inventory_source)
        vars = env.get_host_vars(standby_host)
        standby_master = vars.get('hot_standby_master')
        if not standby_master:
            raise PGConfigException('{} has not root PG host'.format(standby_host))
        standby_master = env.translate_host(standby_master, env.paths.inventory_source)
        potential_masters = env.groups['postgresql'] + env.groups.get('citusdb',[])
        if standby_master in potential_masters:
            return standby_master
        return self._get_root_pg_host(standby_master, env)

    def replace_hosts(self, environment):
        if self.DEFAULT_POSTGRESQL_HOST is None:
            self.DEFAULT_POSTGRESQL_HOST = environment.groups['postgresql'][0]
        elif self.DEFAULT_POSTGRESQL_HOST != '127.0.0.1':
            self.DEFAULT_POSTGRESQL_HOST = environment.translate_host(
                self.DEFAULT_POSTGRESQL_HOST, environment.paths.postgresql_yml)

        host_settings = {
            environment.translate_host(host, environment.paths.postgresql_yml): value
            for host, value in self.host_settings.items()
        }

        all_dbs = self.generate_postgresql_dbs()
        for db in all_dbs:
            if db.host is None:
                db.host = self.DEFAULT_POSTGRESQL_HOST
            elif db.host != '127.0.0.1':
                db.host = environment.translate_host(db.host, environment.paths.postgresql_yml)

            if not db.pgbouncer_hosts:
                db.pgbouncer_hosts = [db.host]
                db.pgbouncer_endpoint = db.host
            else:
                db.pgbouncer_hosts = [
                    environment.translate_host(pgbouncer_host, environment.paths.postgresql_yml)
                    for pgbouncer_host in db.pgbouncer_hosts
                ]
                db.pgbouncer_endpoint = environment.translate_host(
                    db.pgbouncer_endpoint, environment.paths.postgresql_yml)
            if db.port is None:
                if db.host in host_settings:
                    db.port = host_settings[db.host].port
                else:
                    db.port = DEFAULT_PORT

        pg_repack = {
            environment.translate_host(host, environment.paths.postgresql_yml): databases
            for host, databases in self.pg_repack.items()
        }
        self.pg_repack = pg_repack

        for replication in self.replications:
            replication.source_host = environment.translate_host(replication.source_host, environment.paths.postgresql_yml)
            replication.target_host = environment.translate_host(replication.target_host, environment.paths.postgresql_yml)

        for entry in self.postgres_override.postgresql_hba_entries:
            netmask = entry.get('netmask')
            if netmask and not re.match(r'(\d+\.?){4}', netmask):
                host, mask = netmask.split('/')
                host = environment.translate_host(host, environment.paths.postgresql_yml)
                entry['netmask'] = '{}/{}'.format(host, mask)

        all_dbs_by_alias = {db.django_alias: db for db in all_dbs}
        for db in self.dbs.standby:
            if not db.name and db.master in all_dbs_by_alias:
                db.name = all_dbs_by_alias[db.master].name

    def generate_postgresql_dbs(self):
        return [_f for _f in [
            self.dbs.main, self.dbs.synclogs,
        ] + (
            self.dbs.form_processing.get_db_list() if self.dbs.form_processing else []
        ) + [self.dbs.ucr, self.dbs.formplayer] + self.dbs.custom + self.dbs.standby if _f]

    def _check_reporting_databases(self):
        referenced_django_aliases = set()
        defined_django_aliases = {db.django_alias for db in self.generate_postgresql_dbs()
                                  if db.django_alias is not None}
        for reporting_alias, value in self.REPORTING_DATABASES.items():
            if isinstance(value, six.string_types):
                referenced_django_aliases.add(value)
            else:
                # value is {WRITE: alias, READ: [(alias, weight)...]}
                referenced_django_aliases.add(value['WRITE'])
                for alias, _ in value['READ']:
                    referenced_django_aliases.add(alias)
        assert referenced_django_aliases - defined_django_aliases == set(), \
            ("REPORTING_DATABASES must refer only to defined django aliases: {} not in {}"
             .format(', '.join(sorted(referenced_django_aliases - defined_django_aliases)),
                     ', '.join(sorted(defined_django_aliases))))

    def _check_shards(self):
        if self.dbs.form_processing:
            validate_shards({name: db.shards
                             for name, db in self.dbs.form_processing.partitions.items()})

    def _check_standbys(self):
        if self.dbs.standby:
            defined_django_aliases = {
                db.django_alias: db for db in self.generate_postgresql_dbs()
                if db.django_alias is not None
            }
            for db in self.dbs.standby:
                master_db = defined_django_aliases.get(db.master)
                assert master_db, \
                    'Standby databases reference missing masters: {}'.format(db.master)
                assert master_db.name == db.name, \
                    'Master and standby have different names: {}'.format(db.django_alias)

    def check(self):
        self._check_reporting_databases()
        self._check_shards()
        self._check_standbys()
        assert (self.SEPARATE_SYNCLOGS_DB if self.dbs.synclogs is not None
                else not self.SEPARATE_SYNCLOGS_DB), \
            'synclogs should be None if and only if SEPARATE_SYNCLOGS_DB is False'
        assert (self.SEPARATE_FORM_PROCESSING_DBS if self.dbs.form_processing is not None
                else not self.SEPARATE_FORM_PROCESSING_DBS), \
            'form_processing should be None if and only if SEPARATE_FORM_PROCESSING_DBS is False'
Esempio n. 30
0
class PostgresqlConfig(jsonobject.JsonObject):
    _allow_dynamic_properties = False

    SEPARATE_SYNCLOGS_DB = jsonobject.BooleanProperty(default=True)
    SEPARATE_FORM_PROCESSING_DBS = jsonobject.BooleanProperty(default=True)
    DEFAULT_POSTGRESQL_HOST = jsonobject.StringProperty(default=None)
    DEFAULT_CONN_MAX_AGE = jsonobject.IntegerProperty(default=None)
    REPORTING_DATABASES = jsonobject.DictProperty(
        default=lambda: {"ucr": "ucr"})
    LOAD_BALANCED_APPS = jsonobject.DictProperty(default={})
    host_settings = jsonobject.DictProperty(lambda: HostSettings)
    dbs = jsonobject.ObjectProperty(lambda: SmartDBConfig)

    postgres_override = jsonobject.ObjectProperty(PostgresqlOverride)
    pgbouncer_override = jsonobject.ObjectProperty(PgbouncerOverride)

    @classmethod
    def wrap(cls, data):
        # for better validation error message
        PostgresqlOverride.wrap(data.get('postgres_override', {}))
        PgbouncerOverride.wrap(data.get('pgbouncer_override', {}))
        self = super(PostgresqlConfig, cls).wrap(data)
        for db in self.generate_postgresql_dbs():
            if not db.user:
                db.user = DEFAULT_POSTGRESQL_USER
            if not db.password:
                db.password = DEFAULT_POSTGRESQL_PASSWORD
        return self

    def to_generated_variables(self, environment):
        data = self.to_json()
        del data['postgres_override']
        del data['pgbouncer_override']
        data['postgresql_dbs'] = data.pop('dbs')

        sorted_dbs = sorted(
            (db.to_json() for db in self.generate_postgresql_dbs()),
            key=lambda db: db['name'])
        data['postgresql_dbs']['all'] = sorted_dbs
        data.update(self.postgres_override.to_json())
        data.update(self.pgbouncer_override.to_json())

        # generate list of databases per host for use in pgbouncer and postgresql configuration
        postgresql_hosts = environment.groups.get('postgresql', [])
        if self.DEFAULT_POSTGRESQL_HOST not in postgresql_hosts:
            postgresql_hosts.append(self.DEFAULT_POSTGRESQL_HOST)

        dbs_by_host = defaultdict(list)
        for db in sorted_dbs:
            if db['pgbouncer_host'] in postgresql_hosts:
                dbs_by_host[db['pgbouncer_host']].append(db)

        for host in environment.groups.get('pg_standby', []):
            root_pg_host = self._get_root_pg_host(host, environment)
            dbs_by_host[host] = dbs_by_host[root_pg_host]

        data['postgresql_dbs']['by_host'] = dict(dbs_by_host)
        return data

    def _get_root_pg_host(self, standby_host, env):
        vars = env.get_host_vars(standby_host)
        standby_master = vars.get('hot_standby_master')
        if not standby_master:
            raise PGConfigException(
                '{} has not root pg host'.format(standby_host))
        if standby_master in env.groups['postgresql']:
            return standby_master
        return self._get_root_pg_host(standby_master, env)

    def replace_hosts(self, environment):
        if self.DEFAULT_POSTGRESQL_HOST is None:
            self.DEFAULT_POSTGRESQL_HOST = environment.groups['postgresql'][0]
        elif self.DEFAULT_POSTGRESQL_HOST != '127.0.0.1':
            self.DEFAULT_POSTGRESQL_HOST = environment.translate_host(
                self.DEFAULT_POSTGRESQL_HOST, environment.paths.postgresql_yml)

        host_settings = {
            environment.translate_host(host, environment.paths.postgresql_yml):
            value
            for host, value in self.host_settings.items()
        }

        for db in self.generate_postgresql_dbs():
            if db.host is None:
                db.host = self.DEFAULT_POSTGRESQL_HOST
            elif db.host != '127.0.0.1':
                db.host = environment.translate_host(
                    db.host, environment.paths.postgresql_yml)

            if db.pgbouncer_host is None:
                db.pgbouncer_host = db.host
            else:
                db.pgbouncer_host = environment.translate_host(
                    db.pgbouncer_host, environment.paths.postgresql_yml)
            if db.port is None:
                if db.host in host_settings:
                    db.port = host_settings[db.host].port
                else:
                    db.port = DEFAULT_PORT

            if db.conn_max_age is None:
                db.conn_max_age = self.DEFAULT_CONN_MAX_AGE

        for entry in self.postgres_override.postgresql_hba_entries:
            netmask = entry.get('netmask')
            if netmask and not re.match(r'(\d+\.?){4}', netmask):
                host, mask = netmask.split('/')
                host = environment.translate_host(
                    host, environment.paths.postgresql_yml)
                entry['netmask'] = '{}/{}'.format(host, mask)

    def generate_postgresql_dbs(self):
        return filter(None, [
            self.dbs.main,
            self.dbs.synclogs,
        ] + (self.dbs.form_processing.get_db_list() if self.dbs.form_processing
             else []) + [self.dbs.ucr, self.dbs.formplayer] + self.dbs.custom +
                      self.dbs.standby)

    def _check_reporting_databases(self):
        referenced_django_aliases = set()
        defined_django_aliases = {
            db.django_alias
            for db in self.generate_postgresql_dbs()
            if db.django_alias is not None
        }
        for reporting_alias, value in self.REPORTING_DATABASES.items():
            if isinstance(value, six.string_types):
                referenced_django_aliases.add(value)
            else:
                # value is {WRITE: alias, READ: [(alias, weight)...]}
                referenced_django_aliases.add(value['WRITE'])
                for alias, _ in value['READ']:
                    referenced_django_aliases.add(alias)
        assert referenced_django_aliases - defined_django_aliases == set(), \
            ("REPORTING_DATABASES must refer only to defined django aliases: {} not in {}"
             .format(', '.join(sorted(referenced_django_aliases - defined_django_aliases)),
                     ', '.join(sorted(defined_django_aliases))))

    def _check_shards(self):
        if self.dbs.form_processing:
            validate_shards({
                name: db.shards
                for name, db in self.dbs.form_processing.partitions.items()
            })

    def check(self):
        self._check_reporting_databases()
        self._check_shards()
        assert (self.SEPARATE_SYNCLOGS_DB if self.dbs.synclogs is not None
                else not self.SEPARATE_SYNCLOGS_DB), \
            'synclogs should be None if and only if SEPARATE_SYNCLOGS_DB is False'
        assert (self.SEPARATE_FORM_PROCESSING_DBS if self.dbs.form_processing is not None
                else not self.SEPARATE_FORM_PROCESSING_DBS), \
            'form_processing should be None if and only if SEPARATE_FORM_PROCESSING_DBS is False'