Exemple #1
0
    def read_jans_schema(self, others=[]):
        self.jans_attributes = []

        for schema_fn_ in ['jans_schema.json', 'custom_schema.json'] + others:
            schema_fn = schema_fn_ if schema_fn_.startswith('/') else os.path.join(Config.install_dir, 'schema', schema_fn_)
            schema = base.readJsonFile(schema_fn)
            self.jans_attributes += schema['attributeTypes']

        self.ldap_sql_data_type_mapping = base.readJsonFile(os.path.join(Config.static_rdbm_dir, 'ldap_sql_data_type_mapping.json'))
        self.sql_data_types = base.readJsonFile(os.path.join(Config.static_rdbm_dir, 'sql_data_types.json'))
        self.opendj_attributes_syntax = base.readJsonFile(os.path.join(Config.static_rdbm_dir, 'opendj_attributes_syntax.json'))
    def update_backend(self):
        oxtrust_config = base.readJsonFile(self.oxtrust_config_fn)
        self.dbUtils.set_oxTrustConfApplication(oxtrust_config)

        self.dbUtils.add_client2script('2DAF-F9A5', Config.scim_rp_client_id)
        self.dbUtils.add_client2script('2DAF-F995', Config.scim_rp_client_id)
        self.dbUtils.enable_service('gluuScimEnabled')
Exemple #3
0
    def create_subtables(self):

        sub_tables_fn = os.path.join(Config.static_rdbm_dir, 'sub_tables.json')
        sub_tables = base.readJsonFile(sub_tables_fn)

        for subtable in sub_tables.get(Config.rdbm_type, {}):
            subtable_columns = []
            attrname, data_type = sub_tables['spanner'][subtable]
            sql_cmd = 'CREATE TABLE `{0}_{1}` (`doc_id` STRING(64) NOT NULL, `dict_doc_id` INT64, `{1}` {2}) PRIMARY KEY (`doc_id`, `dict_doc_id`), INTERLEAVE IN PARENT `{0}` ON DELETE CASCADE'.format(subtable, attrname, data_type)
            self.dbUtils.spanner.create_table(sql_cmd)
            sql_cmd_index = 'CREATE INDEX `{0}_{1}Idx` ON `{0}_{1}` (`{1}`)'.format(subtable, attrname)
            self.dbUtils.spanner.create_table(sql_cmd_index)
Exemple #4
0
    def __init__(self):
        opendjTypesFn = os.path.join(paths.INSTALL_DIR, 'schema/opendj_types.json')
        self.attribTypes = base.readJsonFile(opendjTypesFn)

        for v in syntaxType.values():
            if not v in self.attribTypes:
                self.attribTypes[v] = []

        if 'json' not in self.attribTypes:
            self.attribTypes['json'] = []

        self.processJansSchema()
    def update_backend(self):

        self.dbUtils.enable_service('gluuPassportEnabled')

        for inum in ['2FDB-CF02', 'D40C-1CA4', '2DAF-F9A5']:
            self.dbUtils.enable_script(inum)

        passport_oxtrust_config = base.readJsonFile(self.passport_oxtrust_config_fn)

        self.dbUtils.set_oxTrustConfApplication(passport_oxtrust_config)
        self.dbUtils.set_configuration('gluuPassportEnabled', 'true')
        self.dbUtils.add_client2script('2DAF-F9A5', Config.passport_rp_client_id)
        self.dbUtils.add_client2script('2DAF-F995', Config.passport_rp_client_id)
    def import_openbanking_certificate(self):
        self.logIt("Importing openbanking ssl certificate")
        oxauth_config_json = base.readJsonFile(self.oxauth_config_json)
        jwksUri = oxauth_config_json['jwksUri']
        o = urlparse(jwksUri)
        jwks_addr = o.netloc
        ssl_cmd = shutil.which('openssl')
        random_crt_fn = os.path.join(self.output_folder,
                                     '{}.crt'.format(os.urandom(3).hex()))
        cmd = "echo -n | {} s_client -connect {}:443 | sed -ne '/-BEGIN CERTIFICATE-/,/-END CERTIFICATE-/p' > {}".format(
            ssl_cmd, jwks_addr, random_crt_fn)
        self.run(cmd, shell=True)
        alias = jwks_addr.replace('.', '_')

        self.run([
            Config.cmd_keytool, '-import', '-trustcacerts', '-keystore',
            Config.defaultTrustStoreFN, '-storepass', 'changeit', '-noprompt',
            '-alias', alias, '-file', random_crt_fn
        ])
Exemple #7
0
    def prepare_opendj_schema(self):
        sys.path.append(os.path.join(Config.install_dir, 'schema'))
        import manager as schemaManager

        self.logIt("Creating OpenDJ schema")

        json_files =  glob.glob(os.path.join(Config.install_dir, 'schema/*.json'))
        for jsf in json_files:
            data = base.readJsonFile(jsf)
            if 'schemaFile' in data:
                out_file = os.path.join(Config.install_dir, 'static/opendj', data['schemaFile'])
                schemaManager.generate(jsf, 'opendj', out_file)

        opendj_schema_files = glob.glob(os.path.join(Config.install_dir, 'static/opendj/*.ldif'))
        for schema_file in opendj_schema_files:
            self.copyFile(schema_file, self.openDjSchemaFolder)

        self.run([paths.cmd_chmod, '-R', 'a+rX', Config.ldapBaseFolder])
        self.run([paths.cmd_chown, '-R', 'ldap:ldap', Config.ldapBaseFolder])

        self.logIt("Re-starting OpenDj after schema update")
        self.stop()
        self.start()
Exemple #8
0
    def processJansSchema(self):

        jansSchemaFn = os.path.join(paths.INSTALL_DIR, 'schema/jans_schema.json')
        jansSchema = base.readJsonFile(jansSchemaFn)
        jansAtrribs = jansSchema['attributeTypes']

        for attrib in jansAtrribs:
            if attrib.get('json'):
                atype = 'json'
            elif  attrib['syntax'] in syntaxType:
                atype = syntaxType[attrib['syntax']]
            else:
                atype = 'string'
                
            for name in attrib['names']:
                self.attribTypes[atype].append(name)

        for obj_type in ['objectClasses', 'attributeTypes']:
            for obj in jansSchema[obj_type]:
                if obj.get('multivalued'):
                    for name in obj['names']:
                        if not name in self.listAttributes:
                            self.listAttributes.append(name)
Exemple #9
0
class JettyInstaller(BaseInstaller, SetupUtils):

    # let's borrow these variables from Config
    jetty_home = Config.jetty_home
    jetty_base = Config.jetty_base
    jetty_app_configuration = base.readJsonFile(os.path.join(paths.DATA_DIR, 'jetty_app_configuration.json'), ordered=True)

    def __init__(self):
        setattr(base.current_app, self.__class__.__name__, self)
        self.service_name = 'jetty'
        self.needdb = False # we don't need backend connection in this class
        self.install_var = 'installJetty'
        self.app_type = AppType.APPLICATION
        self.install_type = InstallOption.MONDATORY
        if not base.snap:
            self.register_progess()
        self.jetty_user_home = '/home/jetty'
        self.jetty_user_home_lib = os.path.join(self.jetty_user_home, 'lib')

        self.app_custom_changes = {
            'jetty' : {
                'name' : 'jetty',
                'files' : [
                    {
                        'path' : os.path.join(self.jetty_home, 'etc/webdefault.xml'),
                        'replace' : [
                            {
                                'pattern' : r'(\<param-name\>dirAllowed<\/param-name\>)(\s*)(\<param-value\>)true(\<\/param-value\>)',
                                'update' : r'\1\2\3false\4'
                            }
                        ]
                    },
                    {
                        'path' : os.path.join(self.jetty_home, 'etc/jetty.xml'),
                        'replace' : [
                            {
                                'pattern' : '<New id="DefaultHandler" class="org.eclipse.jetty.server.handler.DefaultHandler"/>',
                                'update' : '<New id="DefaultHandler" class="org.eclipse.jetty.server.handler.DefaultHandler">\n\t\t\t\t <Set name="showContexts">false</Set>\n\t\t\t </New>'
                            }
                        ]
                    }
                ]
            }
        }


    def install(self):

        self.createUser('jetty', self.jetty_user_home)
        self.addUserToGroup('jans', 'jetty')
        self.run([paths.cmd_mkdir, '-p', self.jetty_user_home_lib])

        jettyArchive, jetty_dist = self.get_jetty_info()

        jettyTemp = os.path.join(jetty_dist, 'temp')
        self.run([paths.cmd_mkdir, '-p', jettyTemp])
        self.run([paths.cmd_chown, '-R', 'jetty:jetty', jettyTemp])

        try:
            self.logIt("Extracting %s into /opt/jetty" % jettyArchive)
            self.run(['tar', '-xzf', jettyArchive, '-C', jetty_dist, '--no-xattrs', '--no-same-owner', '--no-same-permissions'])
        except:
            self.logIt("Error encountered while extracting archive %s" % jettyArchive)

        jettyDestinationPath = max(glob.glob(os.path.join(jetty_dist, 'jetty-distribution-*')))

        self.run([paths.cmd_ln, '-sf', jettyDestinationPath, self.jetty_home])
        self.run([paths.cmd_chmod, '-R', "755", "%s/bin/" % jettyDestinationPath])

        self.applyChangesInFiles(self.app_custom_changes['jetty'])

        self.run([paths.cmd_chown, '-R', 'jetty:jetty', jettyDestinationPath])
        self.run([paths.cmd_chown, '-h', 'jetty:jetty', self.jetty_home])

        self.run([paths.cmd_mkdir, '-p', self.jetty_base])
        self.run([paths.cmd_chown, '-R', 'jetty:jetty', self.jetty_base])

        jettyRunFolder = '/var/run/jetty'
        self.run([paths.cmd_mkdir, '-p', jettyRunFolder])
        self.run([paths.cmd_chmod, '-R', '775', jettyRunFolder])
        self.run([paths.cmd_chgrp, '-R', 'jetty', jettyRunFolder])

        self.run(['rm', '-rf', '/opt/jetty/bin/jetty.sh'])
        self.copyFile("%s/system/initd/jetty.sh" % Config.staticFolder, "%s/bin/jetty.sh" % self.jetty_home)
        self.run([paths.cmd_chown, '-R', 'jetty:jetty', "%s/bin/jetty.sh" % self.jetty_home])
        self.run([paths.cmd_chmod, '-R', '755', "%s/bin/jetty.sh" % self.jetty_home])

    def get_jetty_info(self):
        jetty_archive_list = glob.glob(os.path.join(Config.distAppFolder, 'jetty-distribution-*.tar.gz'))

        if not jetty_archive_list:
            self.logIt("Jetty archive not found in {}. Exiting...".format(Config.distAppFolder), True, True)

        jettyArchive = max(jetty_archive_list)

        jettyArchive_fn = os.path.basename(jettyArchive)
        jetty_regex = re.search('jetty-distribution-(\d*\.\d*)', jettyArchive_fn)

        if not jetty_regex:
            self.logIt("Can't determine Jetty version", True, True)

        jetty_dist = '/opt/jetty-' + jetty_regex.groups()[0]
        Config.templateRenderingDict['jetty_dist'] = jetty_dist

        return jettyArchive, jetty_dist


    def installJettyService(self, serviceConfiguration, supportCustomizations=False, supportOnlyPageCustomizations=False):
        serviceName = serviceConfiguration['name']

        self.logIt("Installing jetty service %s..." % serviceName)

        jettyServiceBase = '%s/%s' % (self.jetty_base, serviceName)
        jettyModules = serviceConfiguration['jetty']['modules']
        jettyModulesList = jettyModules.split(',')
        
        if base.snap:
            Config.templateRenderingDict['jetty_dist'] = self.jetty_base
        else:
            # we need this, because this method may be called externally
            jettyArchive, jetty_dist = self.get_jetty_info()

        self.logIt("Preparing %s service base folders" % serviceName)
        self.run([paths.cmd_mkdir, '-p', jettyServiceBase])

        # Create ./ext/lib folder for custom libraries only if installed Jetty "ext" module
        if "ext" in jettyModulesList:
            self.run([paths.cmd_mkdir, '-p', "%s/lib/ext" % jettyServiceBase])

        # Create ./custom/pages and ./custom/static folders for custom pages and static resources, only if application supports them
        if supportCustomizations:
            if not os.path.exists("%s/custom" % jettyServiceBase):
                self.run([paths.cmd_mkdir, '-p', "%s/custom" % jettyServiceBase])
            self.run([paths.cmd_mkdir, '-p', "%s/custom/pages" % jettyServiceBase])

            if not supportOnlyPageCustomizations:
                self.run([paths.cmd_mkdir, '-p', "%s/custom/i18n" % jettyServiceBase])
                self.run([paths.cmd_mkdir, '-p', "%s/custom/static" % jettyServiceBase])
                self.run([paths.cmd_mkdir, '-p', "%s/custom/libs" % jettyServiceBase])

        self.logIt("Preparing %s service base configuration" % serviceName)
        jettyEnv = os.environ.copy()
        jettyEnv['PATH'] = '%s/bin:' % Config.jre_home + jettyEnv['PATH']

        self.run([Config.cmd_java, '-jar', '%s/start.jar' % self.jetty_home, 'jetty.home=%s' % self.jetty_home, 'jetty.base=%s' % jettyServiceBase, '--add-to-start=%s' % jettyModules], None, jettyEnv)
        self.run([paths.cmd_chown, '-R', 'jetty:jetty', jettyServiceBase])

        # make variables of this class accesible from Config
        self.update_rendering_dict()

        try:
            self.renderTemplateInOut(serviceName, '%s/jetty' % Config.templateFolder, '%s/jetty' % Config.outputFolder)
        except:
            self.logIt("Error rendering service '%s' defaults" % serviceName, True)

        jettyServiceConfiguration = '%s/jetty/%s' % (Config.outputFolder, serviceName)
        self.copyFile(jettyServiceConfiguration, Config.osDefault)
        self.run([paths.cmd_chown, 'root:root', os.path.join(Config.osDefault, serviceName)])

        # Render web eources file
        try:
            web_resources = '%s_web_resources.xml' % serviceName
            if os.path.exists('%s/jetty/%s' % (Config.templateFolder, web_resources)):
                self.renderTemplateInOut(web_resources, '%s/jetty' % Config.templateFolder, '%s/jetty' % Config.outputFolder)
                self.copyFile('%s/jetty/%s' % (Config.outputFolder, web_resources), "%s/%s/webapps" % (self.jetty_base, serviceName))
        except:
            self.logIt("Error rendering service '%s' web_resources.xml" % serviceName, True)

        # Render web context file
        try:
            web_context = '%s.xml' % serviceName
            if os.path.exists('%s/jetty/%s' % (Config.templateFolder, web_context)):
                self.renderTemplateInOut(web_context, '%s/jetty' % Config.templateFolder, '%s/jetty' % Config.outputFolder)
                self.copyFile('%s/jetty/%s' % (Config.outputFolder, web_context), "%s/%s/webapps" % (self.jetty_base, serviceName))
        except:
            self.logIt("Error rendering service '%s' context xml" % serviceName, True)

        initscript_fn = os.path.join(self.jetty_home, 'bin/jetty.sh')
        self.fix_init_scripts(serviceName, initscript_fn)

        if not base.snap:
            tmpfiles_base = '/usr/lib/tmpfiles.d'
            if Config.os_initdaemon == 'systemd' and os.path.exists(tmpfiles_base):
                self.logIt("Creating 'jetty.conf' tmpfiles daemon file")
                jetty_tmpfiles_src = '%s/jetty.conf.tmpfiles.d' % Config.templateFolder
                jetty_tmpfiles_dst = '%s/jetty.conf' % tmpfiles_base
                self.copyFile(jetty_tmpfiles_src, jetty_tmpfiles_dst)
                self.run([paths.cmd_chown, 'root:root', jetty_tmpfiles_dst])
                self.run([paths.cmd_chmod, '644', jetty_tmpfiles_dst])
            
            self.copyFile(os.path.join(self.jetty_home, 'bin/jetty.sh'), os.path.join(Config.distAppFolder, serviceName))

        serviceConfiguration['installed'] = True

        # don't send header to server
        self.set_jetty_param(serviceName, 'jetty.httpConfig.sendServerVersion', 'false')

        if base.snap:
            run_dir = os.path.join(jettyServiceBase, 'run')
            if not os.path.exists(run_dir):
                self.run([paths.cmd_mkdir, '-p', run_dir])

    def set_jetty_param(self, jettyServiceName, jetty_param, jetty_val):

        self.logIt("Seeting jetty parameter {0}={1} for service {2}".format(jetty_param, jetty_val, jettyServiceName))

        service_fn = os.path.join(self.jetty_base, jettyServiceName, 'start.ini')
        start_ini = self.readFile(service_fn)
        start_ini_list = start_ini.splitlines()
        param_ln = jetty_param + '=' + jetty_val

        for i, l in enumerate(start_ini_list[:]):
            if jetty_param in l and l[0]=='#':
                start_ini_list[i] = param_ln 
                break
            elif l.strip().startswith(jetty_param):
                start_ini_list[i] = param_ln
                break
        else:
            start_ini_list.append(param_ln)

        self.writeFile(service_fn, '\n'.join(start_ini_list))

    def calculate_aplications_memory(self, application_max_ram, jetty_app_configuration, installedComponents):
        self.logIt("Calculating memory setting for applications")
        allowedApplicationsMemory = {}
        application_max_ram = int(application_max_ram)
        application_max_ram -= len(installedComponents) * 128

        retVal = True
        usedRatio = 0.001
        for installedComponent in installedComponents:
            usedRatio += installedComponent['memory']['ratio']

        ratioMultiplier = 1.0 + (1.0 - usedRatio)/usedRatio

        for installedComponent in installedComponents:
            allowedRatio = installedComponent['memory']['ratio'] * ratioMultiplier
            allowedMemory = int(round(allowedRatio * int(application_max_ram)))

            if allowedMemory > installedComponent['memory']['max_allowed_mb']:
                allowedMemory = installedComponent['memory']['max_allowed_mb']

            allowedApplicationsMemory[installedComponent['name']] = allowedMemory

        # Iterate through all components into order to prepare all keys
        for applicationName, applicationConfiguration in jetty_app_configuration.items():
            if applicationName in allowedApplicationsMemory:
                applicationMemory = allowedApplicationsMemory.get(applicationName)
            else:
                # We uses this dummy value to render template properly of not installed application
                applicationMemory = 256

            Config.templateRenderingDict["%s_max_mem" % applicationName] = applicationMemory

            if 'jvm_heap_ration' in applicationConfiguration['memory']:
                jvmHeapRation = applicationConfiguration['memory']['jvm_heap_ration']

                minHeapMem = 256
                maxHeapMem = int(applicationMemory * jvmHeapRation)
                if maxHeapMem < minHeapMem:
                    minHeapMem = maxHeapMem

                Config.templateRenderingDict["%s_max_heap_mem" % applicationName] = maxHeapMem
                Config.templateRenderingDict["%s_min_heap_mem" % applicationName] = minHeapMem

                if maxHeapMem < 256 and applicationName in allowedApplicationsMemory:    
                    retVal = False

        return retVal

    def write_webapps_xml(self, jans_app_path=None, jans_apps=None):
        if not jans_app_path:
            jans_app_path = '/'+self.service_name
        if not jans_apps:
            jans_apps = self.service_name+'.war'

        web_apps_xml_fn = os.path.join(Config.templateFolder, 'jans-app.xml')
        web_apps_xml = self.readFile(web_apps_xml_fn)
        web_apps_xml = self.fomatWithDict(web_apps_xml, {'jans_app_path': jans_app_path, 'jans_apps': jans_apps})
        out_filename = os.path.join(self.jetty_base, self.service_name, 'webapps', self.service_name+'.xml')
        self.writeFile(out_filename, web_apps_xml)

    def calculate_selected_aplications_memory(self):
        Config.pbar.progress("jans", "Calculating application memory")

        installedComponents = []

        # Jetty apps
        for config_var, service in [('installOxAuth', 'jans-auth'),
                                    ('installScimServer', 'jans-scim'),
                                    ('installFido2', 'jans-fido2'),
                                    ('installConfigApi', 'jans-config-api'),
                                    ('installEleven', 'jans-eleven')]:

            if Config.get(config_var):
                installedComponents.append(self.jetty_app_configuration[service])

        return self.calculate_aplications_memory(Config.application_max_ram, self.jetty_app_configuration, installedComponents)
    def create_indexes(self):

        indexes = []

        sql_indexes_fn = os.path.join(Config.static_rdbm_dir,
                                      Config.rdbm_type + '_index.json')
        sql_indexes = base.readJsonFile(sql_indexes_fn)

        if Config.rdbm_type == 'spanner':
            tables = self.dbUtils.spanner.get_tables()
            for tblCls in tables:
                tbl_fields = sql_indexes.get(tblCls, {}).get(
                    'fields', []) + sql_indexes['__common__']['fields']

                tbl_data = self.dbUtils.spanner.exec_sql(
                    'SELECT * FROM {} LIMIT 1'.format(tblCls))

                for attr in tbl_data.get('fields', []):
                    if attr['name'] == 'doc_id':
                        continue
                    attr_name = attr['name']
                    ind_name = self.get_index_name(attr['name'])
                    data_type = attr['type']

                    if data_type == 'ARRAY':
                        #TODO: How to index for ARRAY types in spanner?
                        pass

                    elif attr_name in tbl_fields:
                        sql_cmd = 'CREATE INDEX `{1}_{0}Idx` ON `{1}` (`{2}`)'.format(
                            ind_name, tblCls, attr_name)
                        self.dbUtils.spanner.create_table(sql_cmd)

                for i, custom_index in enumerate(
                        sql_indexes.get(tblCls, {}).get('custom', [])):
                    sql_cmd = 'CREATE INDEX `{0}_CustomIdx{1}` ON {0} ({2})'.format(
                        tblCls, i + 1, custom_index)
                    self.dbUtils.spanner.create_table(sql_cmd)

        else:
            for tblCls in self.dbUtils.Base.classes.keys():
                tblObj = self.dbUtils.Base.classes[tblCls]()
                tbl_fields = sql_indexes.get(tblCls, {}).get(
                    'fields', []) + sql_indexes['__common__']['fields']

                for attr in tblObj.__table__.columns:
                    if attr.name == 'doc_id':
                        continue
                    ind_name = self.get_index_name(attr.name)
                    data_type = self.get_sql_col_type(attr, tblCls)
                    data_type = data_type.replace('VARCHAR', 'CHAR')

                    if isinstance(attr.type,
                                  self.dbUtils.json_dialects_instance):

                        if attr.name in tbl_fields:
                            for i, ind_str in enumerate(
                                    sql_indexes['__common__']['JSON']):
                                tmp_str = Template(ind_str)
                                if Config.rdbm_type == 'mysql':
                                    sql_cmd = 'ALTER TABLE {0}.{1} ADD INDEX `{2}_json_{3}`(({4}));'.format(
                                        Config.rdbm_db, tblCls, ind_name,
                                        i + 1,
                                        tmp_str.safe_substitute({
                                            'field':
                                            attr.name,
                                            'data_type':
                                            data_type
                                        }))
                                    self.dbUtils.exec_rdbm_query(sql_cmd)
                                elif Config.rdbm_type == 'pgsql':
                                    sql_cmd = 'CREATE INDEX ON "{}" (({}));'.format(
                                        tblCls,
                                        tmp_str.safe_substitute({
                                            'field':
                                            attr.name,
                                            'data_type':
                                            data_type
                                        }))
                                    self.dbUtils.exec_rdbm_query(sql_cmd)

                    elif attr.name in tbl_fields:
                        if Config.rdbm_type == 'mysql':
                            sql_cmd = 'ALTER TABLE {0}.{1} ADD INDEX `{1}_{2}` (`{3}`);'.format(
                                Config.rdbm_db, tblCls, ind_name, attr.name)
                            self.dbUtils.exec_rdbm_query(sql_cmd)
                        elif Config.rdbm_type == 'pgsql':
                            sql_cmd = 'CREATE INDEX ON "{}" ("{}");'.format(
                                tblCls, attr.name)
                            self.dbUtils.exec_rdbm_query(sql_cmd)

                for i, custom_index in enumerate(
                        sql_indexes.get(tblCls, {}).get('custom', [])):
                    if Config.rdbm_type == 'mysql':
                        sql_cmd = 'ALTER TABLE {0}.{1} ADD INDEX `{2}` ({3});'.format(
                            Config.rdbm_db, tblCls,
                            '{}_CustomIdx{}'.format(tblCls,
                                                    i + 1), custom_index)
                        self.dbUtils.exec_rdbm_query(sql_cmd)
                    elif Config.rdbm_type == 'pgsql':
                        sql_cmd = 'CREATE INDEX ON "{}" ("{}");'.format(
                            tblCls, custom_index)
                        self.dbUtils.exec_rdbm_query(sql_cmd)
    def create_tables(self, jans_schema_files):
        self.logIt("Creating tables for {}".format(jans_schema_files))
        qchar = '`' if Config.rdbm_type in ('mysql', 'spanner') else '"'
        tables = []
        all_schema = {}
        all_attribs = {}
        column_add = 'COLUMN ' if Config.rdbm_type == 'spanner' else ''
        alter_table_sql_cmd = 'ALTER TABLE %s{}%s ADD %s{};' % (qchar, qchar,
                                                                column_add)

        for jans_schema_fn in jans_schema_files:
            jans_schema = base.readJsonFile(jans_schema_fn)
            for obj in jans_schema['objectClasses']:
                all_schema[obj['names'][0]] = obj
            for attr in jans_schema['attributeTypes']:
                all_attribs[attr['names'][0]] = attr

        subtable_attrs = {}
        for stbl in self.dbUtils.sub_tables.get(Config.rdbm_type):
            subtable_attrs[stbl] = [
                scol[0]
                for scol in self.dbUtils.sub_tables[Config.rdbm_type][stbl]
            ]

        for obj_name in all_schema:
            obj = all_schema[obj_name]

            if obj.get('sql', {}).get('ignore'):
                continue

            sql_tbl_name = obj['names'][0]
            sql_tbl_cols = []

            attr_list = obj['may']
            if 'sql' in obj:
                attr_list += obj['sql'].get('include', [])
                if 'includeObjectClass' in obj['sql']:
                    for incobjcls in obj['sql']['includeObjectClass']:
                        attr_list += all_schema[incobjcls]['may']

            for s in obj['sup']:
                if s == 'top':
                    continue
                attr_list += all_schema[s]['may']

            cols_ = []
            for attrname in attr_list:
                if attrname in cols_:
                    continue

                if attrname in subtable_attrs.get(sql_tbl_name, []):
                    continue

                cols_.append(attrname)
                data_type = self.get_sql_col_type(attrname, sql_tbl_name)

                col_def = '{0}{1}{0} {2}'.format(qchar, attrname, data_type)
                sql_tbl_cols.append(col_def)

            if not self.dbUtils.table_exists(sql_tbl_name):
                doc_id_type = self.get_sql_col_type('doc_id', sql_tbl_name)
                if Config.rdbm_type == 'pgsql':
                    sql_cmd = 'CREATE TABLE "{}" (doc_id {} NOT NULL UNIQUE, "objectClass" VARCHAR(48), dn VARCHAR(128), {}, PRIMARY KEY (doc_id));'.format(
                        sql_tbl_name, doc_id_type, ', '.join(sql_tbl_cols))
                if Config.rdbm_type == 'spanner':
                    sql_cmd = 'CREATE TABLE `{}` (`doc_id` {} NOT NULL, `objectClass` STRING(48), dn STRING(128), {}) PRIMARY KEY (`doc_id`)'.format(
                        sql_tbl_name, doc_id_type, ', '.join(sql_tbl_cols))
                else:
                    sql_cmd = 'CREATE TABLE `{}` (`doc_id` {} NOT NULL UNIQUE, `objectClass` VARCHAR(48), dn VARCHAR(128), {}, PRIMARY KEY (`doc_id`));'.format(
                        sql_tbl_name, doc_id_type, ', '.join(sql_tbl_cols))
                self.dbUtils.exec_rdbm_query(sql_cmd)
                tables.append(sql_cmd)

        for attrname in all_attribs:
            attr = all_attribs[attrname]
            if attr.get('sql', {}).get('add_table'):
                data_type = self.get_sql_col_type(attrname, sql_tbl_name)
                col_def = '{0}{1}{0} {2}'.format(qchar, attrname, data_type)
                sql_cmd = alter_table_sql_cmd.format(attr['sql']['add_table'],
                                                     col_def)

                if Config.rdbm_type == 'spanner':
                    req = self.dbUtils.spanner.create_table(sql_cmd.strip(';'))
                else:
                    self.dbUtils.exec_rdbm_query(sql_cmd)
                tables.append(sql_cmd)

        self.writeFile(os.path.join(self.output_dir, 'jans_tables.sql'),
                       '\n'.join(tables))
    def load_test_data(self):
        self.logIt("Re-binding database")
        self.dbUtils.bind(force=True)

        self.logIt("Checking Internet conncetion")
        socket.setdefaulttimeout(3)
        try:
            socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect(
                ("8.8.8.8", 443))
        except:
            self.logIt("Failed to connect 8.8.8.8:443.", True)
            print("Test data loader needs internet connection. Giving up ...")
            return

        if not base.current_app.ScimInstaller.installed():
            self.logIt("Scim was not installed. Installing")
            Config.installScimServer = True
            base.current_app.ScimInstaller.start_installation()

        self.encode_test_passwords()

        self.logIt("Rendering test templates")

        if Config.rdbm_type == 'spanner':
            Config.rdbm_password_enc = ''

        Config.templateRenderingDict[
            'config_oxauth_test_ldap'] = '# Not available'
        Config.templateRenderingDict[
            'config_oxauth_test_couchbase'] = '# Not available'

        config_oxauth_test_properties = self.fomatWithDict(
            'server.name=%(hostname)s\nconfig.oxauth.issuer=http://localhost:80\nconfig.oxauth.contextPath=http://localhost:80\nconfig.oxauth.salt=%(encode_salt)s\nconfig.persistence.type=%(persistence_type)s\n\n',
            self.merge_dicts(Config.__dict__, Config.templateRenderingDict))

        if self.getMappingType('ldap'):
            template_text = self.readFile(
                os.path.join(
                    self.template_base,
                    'jans-auth/server/config-oxauth-test-ldap.properties.nrnd')
            )
            rendered_text = self.fomatWithDict(
                template_text,
                self.merge_dicts(Config.__dict__,
                                 Config.templateRenderingDict))
            config_oxauth_test_properties += '#ldap\n' + rendered_text

        if self.getMappingType('couchbase'):
            couchbaseDict = base.current_app.CouchbaseInstaller.couchbaseDict()
            template_text = self.readFile(
                os.path.join(
                    self.template_base,
                    'jans-auth/server/config-oxauth-test-couchbase.properties.nrnd'
                ))
            rendered_text = self.fomatWithDict(
                template_text,
                self.merge_dicts(Config.__dict__, Config.templateRenderingDict,
                                 couchbaseDict))
            config_oxauth_test_properties += '\n#couchbase\n' + rendered_text

        if self.getMappingType('rdbm'):
            base.current_app.RDBMInstaller.server_time_zone()
            template_text = self.readFile(
                os.path.join(
                    self.template_base,
                    'jans-auth/server/config-oxauth-test-sql.properties.nrnd'))
            rendered_text = self.fomatWithDict(
                template_text,
                self.merge_dicts(Config.__dict__,
                                 Config.templateRenderingDict))
            config_oxauth_test_properties += '\n#sql\n' + rendered_text

            self.logIt("Adding custom attributs and indexes")

            schema2json(
                os.path.join(Config.templateFolder,
                             'test/jans-auth/schema/102-oxauth_test.ldif'),
                os.path.join(Config.outputFolder, 'test/jans-auth/schema/'))
            schema2json(
                os.path.join(Config.templateFolder,
                             'test/scim-client/schema/103-scim_test.ldif'),
                os.path.join(Config.outputFolder, 'test/scim-client/schema/'),
            )

            oxauth_json_schema_fn = os.path.join(
                Config.outputFolder,
                'test/jans-auth/schema/102-oxauth_test.json')
            scim_json_schema_fn = os.path.join(
                Config.outputFolder,
                'test/scim-client/schema/103-scim_test.json')
            jans_schema_json_files = [
                oxauth_json_schema_fn, scim_json_schema_fn
            ]

            scim_schema = base.readJsonFile(scim_json_schema_fn)
            may_list = []

            for attribute in scim_schema['attributeTypes']:
                may_list += attribute['names']

            jansPerson = {
                'kind': 'STRUCTURAL',
                'may': may_list,
                'must': ['objectclass'],
                'names': ['jansPerson'],
                'oid': 'jansObjClass',
                'sup': ['top'],
                'x_origin': 'Jans created objectclass'
            }
            scim_schema['objectClasses'].append(jansPerson)

            with open(scim_json_schema_fn, 'w') as w:
                json.dump(scim_schema, w, indent=2)

            self.dbUtils.read_jans_schema(others=jans_schema_json_files)

            base.current_app.RDBMInstaller.create_tables(
                jans_schema_json_files)
            if Config.rdbm_type != 'spanner':
                self.dbUtils.rdm_automapper()

        self.writeFile(
            os.path.join(
                Config.outputFolder,
                'test/jans-auth/server/config-oxauth-test.properties'),
            config_oxauth_test_properties)

        self.render_templates_folder(self.template_base)

        self.logIt("Loading test ldif files")

        ox_auth_test_ldif = os.path.join(
            Config.outputFolder, 'test/jans-auth/data/oxauth-test-data.ldif')
        ox_auth_test_user_ldif = os.path.join(
            Config.outputFolder,
            'test/jans-auth/data/oxauth-test-data-user.ldif')

        scim_test_ldif = os.path.join(
            Config.outputFolder, 'test/scim-client/data/scim-test-data.ldif')
        scim_test_user_ldif = os.path.join(
            Config.outputFolder,
            'test/scim-client/data/scim-test-data-user.ldif')

        ldif_files = (ox_auth_test_ldif, scim_test_ldif,
                      ox_auth_test_user_ldif, scim_test_user_ldif)
        self.dbUtils.import_ldif(ldif_files)

        apache_user = '******' if base.clone_type == 'deb' else 'apache'

        # Client keys deployment
        base.download(
            'https://raw.githubusercontent.com/JanssenProject/jans-auth-server/master/client/src/test/resources/jans_test_client_keys.zip',
            '/var/www/html/jans_test_client_keys.zip')
        self.run([
            paths.cmd_unzip, '-o', '/var/www/html/jans_test_client_keys.zip',
            '-d', '/var/www/html/'
        ])
        self.run([paths.cmd_rm, '-rf', 'jans_test_client_keys.zip'])
        self.run([
            paths.cmd_chown, '-R', 'root:' + apache_user,
            '/var/www/html/jans-auth-client'
        ])

        oxAuthConfDynamic_changes = {
            'dynamicRegistrationCustomObjectClass':
            'jansClntCustomAttributes',
            'dynamicRegistrationCustomAttributes': [
                "jansTrustedClnt", "myCustomAttr1", "myCustomAttr2",
                "jansInclClaimsInIdTkn"
            ],
            'dynamicRegistrationExpirationTime':
            86400,
            'dynamicGrantTypeDefault': [
                "authorization_code", "implicit", "password",
                "client_credentials", "refresh_token",
                "urn:ietf:params:oauth:grant-type:uma-ticket",
                "urn:openid:params:grant-type:ciba",
                "urn:ietf:params:oauth:grant-type:device_code"
            ],
            'legacyIdTokenClaims':
            True,
            'authenticationFiltersEnabled':
            True,
            'clientAuthenticationFiltersEnabled':
            True,
            'keyRegenerationEnabled':
            True,
            'openidScopeBackwardCompatibility':
            False,
            'forceOfflineAccessScopeToEnableRefreshToken':
            False,
            'dynamicRegistrationPasswordGrantTypeEnabled':
            True,
            'cibaEnabled':
            True,
            'backchannelAuthenticationRequestSigningAlgValuesSupported': [
                "RS256", "RS384", "RS512", "ES256", "ES384", "ES512", "PS256",
                "PS384", "PS512"
            ],
            'backchannelClientId':
            '123-123-123',
            'backchannelUserCodeParameterSupported':
            True,
            'tokenEndpointAuthSigningAlgValuesSupported': [
                'HS256', 'HS384', 'HS512', 'RS256', 'RS384', 'RS512', 'ES256',
                'ES384', 'ES512', 'PS256', 'PS384', 'PS512'
            ],
            'userInfoSigningAlgValuesSupported': [
                'none', 'HS256', 'HS384', 'HS512', 'RS256', 'RS384', 'RS512',
                'ES256', 'ES384', 'ES512', 'PS256', 'PS384', 'PS512'
            ],
            'consentGatheringScriptBackwardCompatibility':
            False,
            'claimsParameterSupported':
            True,
            'grantTypesSupported': [
                'urn:openid:params:grant-type:ciba', 'authorization_code',
                'urn:ietf:params:oauth:grant-type:uma-ticket',
                'urn:ietf:params:oauth:grant-type:device_code',
                'client_credentials', 'implicit', 'refresh_token', 'password'
            ],
            'idTokenSigningAlgValuesSupported': [
                'none', 'HS256', 'HS384', 'HS512', 'RS256', 'RS384', 'RS512',
                'ES256', 'ES384', 'ES512', 'PS256', 'PS384', 'PS512'
            ],
            'requestObjectSigningAlgValuesSupported': [
                'none', 'HS256', 'HS384', 'HS512', 'RS256', 'RS384', 'RS512',
                'ES256', 'ES384', 'ES512', 'PS256', 'PS384', 'PS512'
            ],
            'softwareStatementValidationClaimName':
            'jwks_uri',
            'softwareStatementValidationType':
            'jwks_uri',
            'umaGrantAccessIfNoPolicies':
            True,
            'rejectJwtWithNoneAlg':
            False,
            'removeRefreshTokensForClientOnLogout':
            True,
            'fapiCompatibility':
            False,
            'forceIdTokenHintPrecense':
            False,
            'introspectionScriptBackwardCompatibility':
            False,
            'spontaneousScopeLifetime':
            0,
            'tokenEndpointAuthMethodsSupported': [
                'client_secret_basic', 'client_secret_post',
                'client_secret_jwt', 'private_key_jwt', 'tls_client_auth',
                'self_signed_tls_client_auth', 'none'
            ],
            'sessionIdRequestParameterEnabled':
            True,
            'skipRefreshTokenDuringRefreshing':
            False,
            'enabledComponents': [
                'unknown', 'health_check', 'userinfo', 'clientinfo',
                'id_generation', 'registration', 'introspection',
                'revoke_token', 'revoke_session', 'end_session',
                'status_session', 'jans_configuration', 'ciba', 'uma', 'u2f',
                'device_authz', 'stat'
            ]
        }

        if Config.get('config_patch_creds'):
            data = None
            datajs = None
            patch_url = 'https://ox.gluu.org/protected/jans-auth/jans-auth-test-config-patch.json'
            req = urllib.request.Request(patch_url)
            credentials = Config.get('config_patch_creds')
            encoded_credentials = base64.b64encode(credentials.encode('ascii'))
            req.add_header('Authorization',
                           'Basic %s' % encoded_credentials.decode("ascii"))
            self.logIt("Retreiving auto test ciba patch from " + patch_url)

            try:
                resp = urllib.request.urlopen(req)
                data = resp.read()
                self.logIt("Auto test ciba patch retreived")
            except:
                self.logIt("Can't retreive auto test ciba patch", True)

            if data:
                try:
                    datajs = json.loads(data.decode())
                except:
                    self.logIt("Can't decode json for auto test ciba patch",
                               True)

            if datajs:
                oxAuthConfDynamic_changes.update(datajs)
                self.logIt(
                    "oxAuthConfDynamic was updated with auto test ciba patch")

        custom_scripts = ('2DAF-F995', '2DAF-F996', '4BBE-C6A8', 'A51E-76DA')

        self.dbUtils.set_oxAuthConfDynamic(oxAuthConfDynamic_changes)

        # Enable custom scripts
        for inum in custom_scripts:
            self.dbUtils.enable_script(inum)

        if self.dbUtils.moddb == static.BackendTypes.LDAP:
            # Update LDAP schema
            openDjSchemaFolder = os.path.join(Config.ldapBaseFolder,
                                              'config/schema/')
            self.copyFile(
                os.path.join(Config.outputFolder,
                             'test/jans-auth/schema/102-oxauth_test.ldif'),
                openDjSchemaFolder)
            self.copyFile(
                os.path.join(Config.outputFolder,
                             'test/scim-client/schema/103-scim_test.ldif'),
                openDjSchemaFolder)

            schema_fn = os.path.join(openDjSchemaFolder,
                                     '77-customAttributes.ldif')

            obcl_parser = myLdifParser(schema_fn)
            obcl_parser.parse()

            for i, o in enumerate(obcl_parser.entries[0][1]['objectClasses']):
                objcl = ObjectClass(o)
                if 'jansCustomPerson' in objcl.tokens['NAME']:
                    may_list = list(objcl.tokens['MAY'])
                    for a in ('scimCustomFirst', 'scimCustomSecond',
                              'scimCustomThird'):
                        if not a in may_list:
                            may_list.append(a)

                    objcl.tokens['MAY'] = tuple(may_list)
                    obcl_parser.entries[0][1]['objectClasses'][
                        i] = objcl.getstr()

            tmp_fn = '/tmp/77-customAttributes.ldif'
            with open(tmp_fn, 'wb') as w:
                ldif_writer = LDIFWriter(w)
                for dn, entry in obcl_parser.entries:
                    ldif_writer.unparse(dn, entry)

            self.copyFile(tmp_fn, openDjSchemaFolder)

            self.logIt("Making opndj listen all interfaces")
            ldap_operation_result = self.dbUtils.ldap_conn.modify(
                'cn=LDAPS Connection Handler,cn=Connection Handlers,cn=config',
                {'ds-cfg-listen-address': [ldap3.MODIFY_REPLACE, '0.0.0.0']})

            if not ldap_operation_result:
                self.logIt("Ldap modify operation failed {}".format(
                    str(self.ldap_conn.result)))
                self.logIt(
                    "Ldap modify operation failed {}".format(
                        str(self.ldap_conn.result)), True)

            self.dbUtils.ldap_conn.unbind()

            self.logIt("Re-starting opendj")
            self.restart('opendj')

            self.logIt("Re-binding opendj")
            # try 5 times to re-bind opendj
            for i in range(5):
                time.sleep(5)
                self.logIt("Try binding {} ...".format(i + 1))
                bind_result = self.dbUtils.ldap_conn.bind()
                if bind_result:
                    self.logIt("Binding to opendj was successful")
                    break
                self.logIt("Re-try in 5 seconds")
            else:
                self.logIt("Re-binding opendj FAILED")
                sys.exit("Re-binding opendj FAILED")

            for atr in ('myCustomAttr1', 'myCustomAttr2'):

                dn = 'ds-cfg-attribute={},cn=Index,ds-cfg-backend-id={},cn=Backends,cn=config'.format(
                    atr, 'userRoot')
                entry = {
                    'objectClass': ['top', 'ds-cfg-backend-index'],
                    'ds-cfg-attribute': [atr],
                    'ds-cfg-index-type': ['equality'],
                    'ds-cfg-index-entry-limit': ['4000']
                }
                self.logIt("Creating Index {}".format(dn))
                ldap_operation_result = self.dbUtils.ldap_conn.add(
                    dn, attributes=entry)
                if not ldap_operation_result:
                    self.logIt("Ldap modify operation failed {}".format(
                        str(self.dbUtils.ldap_conn.result)))
                    self.logIt(
                        "Ldap modify operation failed {}".format(
                            str(self.dbUtils.ldap_conn.result)), True)

        elif self.dbUtils.moddb in (static.BackendTypes.MYSQL,
                                    static.BackendTypes.PGSQL):
            pass

        elif self.dbUtils.moddb == static.BackendTypes.COUCHBASE:
            self.dbUtils.cbm.exec_query(
                'CREATE INDEX def_{0}_myCustomAttr1 ON `{0}`(myCustomAttr1) USING GSI WITH {{"defer_build":true}}'
                .format(Config.couchbase_bucket_prefix))
            self.dbUtils.cbm.exec_query(
                'CREATE INDEX def_{0}_myCustomAttr2 ON `{0}`(myCustomAttr2) USING GSI WITH {{"defer_build":true}}'
                .format(Config.couchbase_bucket_prefix))
            self.dbUtils.cbm.exec_query(
                'BUILD INDEX ON `{0}` (def_{0}_myCustomAttr1, def_{0}_myCustomAttr2)'
                .format(Config.couchbase_bucket_prefix))

        if self.dbUtils.moddb == static.BackendTypes.LDAP:
            self.dbUtils.ldap_conn.bind()

            result = self.dbUtils.search(
                'ou=configuration,o=jans',
                search_filter='(&(jansDbAuth=*)(objectClass=jansAppConf))',
                search_scope=ldap3.BASE)
            oxIDPAuthentication = json.loads(result['jansDbAuth'])
            oxIDPAuthentication['config']['servers'] = [
                '{0}:{1}'.format(Config.hostname, Config.ldaps_port)
            ]
            oxIDPAuthentication_js = json.dumps(oxIDPAuthentication, indent=2)
            self.dbUtils.set_configuration('jansDbAuth',
                                           oxIDPAuthentication_js)

        self.create_test_client_keystore()

        # Disable token binding module
        if base.os_name in ('ubuntu18', 'ubuntu20'):
            self.run(['a2dismod', 'mod_token_binding'])
            self.restart('apache2')

        self.restart('jans-auth')

        if Config.installEleven:
            eleven_tokens_package = os.path.join(
                Config.staticFolder, 'eleven/jans-eleven-tokens.tar.gz')
            target_dir = '/var/lib/softhsm/tokens/'
            if not os.path.exists(target_dir):
                os.makedirs(target_dir)
            self.run([
                paths.cmd_tar, '-zxf', eleven_tokens_package, '-C', target_dir
            ])
Exemple #13
0
 def update_backend(self):
     self.dbUtils.enable_service('gluuSamlEnabled')
     oxtrust_conf = base.readJsonFile(self.oxtrust_conf_fn)
     self.dbUtils.set_oxTrustConfApplication(oxtrust_conf)
Exemple #14
0
    def create_indexes(self):

        indexes = []

        sql_indexes_fn = os.path.join(Config.static_rdbm_dir, 'sql_index.json')
        sql_indexes = base.readJsonFile(sql_indexes_fn)

        cb_indexes = base.readJsonFile(
            base.current_app.CouchbaseInstaller.couchbaseIndexJson)

        cb_fields = []

        for bucket in cb_indexes:
            bucket_indexes = cb_indexes[bucket]
            if 'attributes' in bucket_indexes:
                for atr_list in bucket_indexes['attributes']:
                    for field in atr_list:
                        if not field in cb_fields:
                            cb_fields.append(field)

            if 'static' in bucket_indexes:
                for atr_list in bucket_indexes['static']:
                    for field in atr_list[0]:
                        if not field in cb_fields and not '(' in field:
                            cb_fields.append(field)

        if 'objectClass' in cb_fields:
            cb_fields.remove('objectClass')

        for tblCls in self.dbUtils.Base.classes.keys():
            tblObj = self.dbUtils.Base.classes[tblCls]()
            tbl_fields = sql_indexes[Config.rdbm_type].get(tblCls, {}).get(
                'fields', []) + sql_indexes[
                    Config.rdbm_type]['__common__']['fields'] + cb_fields

            for attr in tblObj.__table__.columns:
                if attr.name == 'doc_id':
                    continue
                ind_name = re.sub(r'[^0-9a-zA-Z\s]+', '_', attr.name)
                data_type = self.get_sql_col_type(attr, tblCls)
                data_type = data_type.replace('VARCHAR', 'CHAR')

                if isinstance(attr.type, self.dbUtils.json_dialects_instance):

                    if attr.name in tbl_fields:
                        for i, ind_str in enumerate(sql_indexes[
                                Config.rdbm_type]['__common__']['JSON']):
                            tmp_str = Template(ind_str)
                            if Config.rdbm_type == 'mysql':
                                sql_cmd = 'ALTER TABLE {0}.{1} ADD INDEX `{2}_json_{3}`(({4}));'.format(
                                    Config.rdbm_db, tblCls, ind_name, i + 1,
                                    tmp_str.safe_substitute({
                                        'field':
                                        attr.name,
                                        'data_type':
                                        data_type
                                    }))
                            elif Config.rdbm_type == 'pgsql':
                                sql_cmd = 'CREATE INDEX ON "{}" (({}));'.format(
                                    tblCls,
                                    tmp_str.safe_substitute({
                                        'field':
                                        attr.name,
                                        'data_type':
                                        data_type
                                    }))
                            self.dbUtils.exec_rdbm_query(sql_cmd)

                elif attr.name in tbl_fields:
                    if Config.rdbm_type == 'mysql':
                        sql_cmd = 'ALTER TABLE {0}.{1} ADD INDEX `{1}_{2}` (`{3}`);'.format(
                            Config.rdbm_db, tblCls, ind_name, attr.name)
                    elif Config.rdbm_type == 'pgsql':
                        sql_cmd = 'CREATE INDEX ON "{}" ("{}");'.format(
                            tblCls, attr.name)

                    self.dbUtils.exec_rdbm_query(sql_cmd)

            for i, custom_index in enumerate(
                    sql_indexes[Config.rdbm_type]['__common__'].get(
                        tblCls, {}).get('custom', [])):
                if Config.rdbm_type == 'mysql':
                    sql_cmd = 'ALTER TABLE {0}.{1} ADD INDEX `{2}` (({3}));'.format(
                        Config.rdbm_db, tblCls, 'custom_{}'.format(i + 1),
                        custom_index)
                elif Config.rdbm_type == 'pgsql':
                    sql_cmd = 'CREATE INDEX ON "{}" ("{}");'.format(
                        tblCls, custom_index)

                self.dbUtils.exec_rdbm_query(sql_cmd)
Exemple #15
0
    def create_tables(self, jans_schema_files):
        qchar = '`' if Config.rdbm_type == 'mysql' else '"'
        tables = []
        all_schema = {}
        all_attribs = {}
        alter_table_sql_cmd = 'ALTER TABLE %s{}%s ADD {};' % (qchar, qchar)

        for jans_schema_fn in jans_schema_files:
            jans_schema = base.readJsonFile(jans_schema_fn)
            for obj in jans_schema['objectClasses']:
                all_schema[obj['names'][0]] = obj
            for attr in jans_schema['attributeTypes']:
                all_attribs[attr['names'][0]] = attr

        for obj_name in all_schema:
            obj = all_schema[obj_name]

            if obj.get('sql', {}).get('ignore'):
                continue

            sql_tbl_name = obj['names'][0]
            sql_tbl_cols = []

            attr_list = obj['may']
            if 'sql' in obj:
                attr_list += obj['sql'].get('include', [])
                if 'includeObjectClass' in obj['sql']:
                    for incobjcls in obj['sql']['includeObjectClass']:
                        attr_list += all_schema[incobjcls]['may']
            cols_ = []
            for attrname in attr_list:
                if attrname in cols_:
                    continue

                cols_.append(attrname)
                data_type = self.get_sql_col_type(attrname, sql_tbl_name)

                col_def = '{0}{1}{0} {2}'.format(qchar, attrname, data_type)
                sql_tbl_cols.append(col_def)

            if self.dbUtils.table_exists(sql_tbl_name):
                for tbl_col in sql_tbl_cols:
                    sql_cmd = alter_table_sql_cmd.format(sql_tbl_name, tbl_col)
                    self.dbUtils.exec_rdbm_query(sql_cmd)
                    tables.append(sql_cmd)
            else:
                doc_id_type = self.get_sql_col_type('doc_id', sql_tbl_name)
                if Config.rdbm_type == 'pgsql':
                    sql_cmd = 'CREATE TABLE "{}" (doc_id {} NOT NULL UNIQUE, "objectClass" VARCHAR(48), dn VARCHAR(128), {}, PRIMARY KEY  (doc_id));'.format(
                        sql_tbl_name, doc_id_type, ', '.join(sql_tbl_cols))
                else:
                    sql_cmd = 'CREATE TABLE `{}` (`doc_id` {} NOT NULL UNIQUE, `objectClass` VARCHAR(48), dn VARCHAR(128), {}, PRIMARY KEY  (`doc_id`));'.format(
                        sql_tbl_name, doc_id_type, ', '.join(sql_tbl_cols))
                self.dbUtils.exec_rdbm_query(sql_cmd)
                tables.append(sql_cmd)

        for attrname in all_attribs:
            attr = all_attribs[attrname]
            if attr.get('sql', {}).get('add_table'):
                data_type = self.get_sql_col_type(attrname, sql_tbl_name)
                col_def = '{0}{1}{0} {2}'.format(qchar, attrname, data_type)
                sql_cmd = alter_table_sql_cmd.format(attr['sql']['add_table'],
                                                     col_def)
                self.dbUtils.exec_rdbm_query(sql_cmd)
                tables.append(sql_cmd)

        self.writeFile(os.path.join(self.output_dir, 'jans_tables.sql'),
                       '\n'.join(tables))