def test_rule_engine_2309(self): corefile = lib.get_core_re_dir() + "/core.re" coredvm = lib.get_core_re_dir() + "/core.dvm" with lib.file_backed_up(coredvm): lib.prepend_string_to_file('oprType||rei->doinp->oprType\n', coredvm) with lib.file_backed_up(corefile): initial_size_of_server_log = lib.get_log_size('server') rules_to_prepend = ''' acSetNumThreads() { writeLine("serverLog","test_rule_engine_2309: put: acSetNumThreads oprType [$oprType]"); } ''' time.sleep(1) # remove once file hash fix is commited #2279 lib.prepend_string_to_file(rules_to_prepend, corefile) time.sleep(1) # remove once file hash fix is commited #2279 trigger_file = 'file_to_trigger_acSetNumThreads' lib.make_file(trigger_file, 4 * pow(10, 7)) self.admin.assert_icommand('iput {0}'.format(trigger_file)) assert 1 == lib.count_occurrences_of_string_in_log( 'server', 'writeLine: inString = test_rule_engine_2309: put: acSetNumThreads oprType [1]', start_index=initial_size_of_server_log) assert 0 == lib.count_occurrences_of_string_in_log( 'server', 'RE_UNABLE_TO_READ_SESSION_VAR', start_index=initial_size_of_server_log) os.unlink(trigger_file) with lib.file_backed_up(corefile): initial_size_of_server_log = lib.get_log_size('server') rules_to_prepend = ''' acSetNumThreads() { writeLine("serverLog","test_rule_engine_2309: get: acSetNumThreads oprType [$oprType]"); } ''' time.sleep(1) # remove once file hash fix is commited #2279 lib.prepend_string_to_file(rules_to_prepend, corefile) time.sleep(1) # remove once file hash fix is commited #2279 self.admin.assert_icommand('iget {0}'.format(trigger_file), use_unsafe_shell=True) assert 1 == lib.count_occurrences_of_string_in_log( 'server', 'writeLine: inString = test_rule_engine_2309: get: acSetNumThreads oprType [2]', start_index=initial_size_of_server_log) assert 0 == lib.count_occurrences_of_string_in_log( 'server', 'RE_UNABLE_TO_READ_SESSION_VAR', start_index=initial_size_of_server_log) os.unlink(trigger_file)
def test_authentication_PAM_with_server_params(self): lib.run_command('openssl genrsa -out server.key') lib.run_command('openssl req -batch -new -key server.key -out server.csr') lib.run_command('openssl req -batch -new -x509 -key server.key -out chain.pem -days 365') lib.run_command('openssl dhparam -2 -out dhparams.pem 100') # normally 2048, but smaller size here for speed service_account_environment_file_path = os.path.expanduser('~/.irods/irods_environment.json') with lib.file_backed_up(service_account_environment_file_path): server_update = { 'irods_ssl_certificate_chain_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/chain.pem'), 'irods_ssl_certificate_key_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/server.key'), 'irods_ssl_dh_params_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/dhparams.pem'), 'irods_ssl_verify_server': 'none', } lib.update_json_file_from_dict(service_account_environment_file_path, server_update) client_update = { 'irods_ssl_certificate_chain_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/chain.pem'), 'irods_ssl_certificate_key_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/server.key'), 'irods_ssl_dh_params_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/dhparams.pem'), 'irods_ssl_verify_server': 'none', 'irods_authentication_scheme': 'PaM', 'irods_client_server_policy': 'CS_NEG_REQUIRE', } auth_session_env_backup = copy.deepcopy(self.auth_session.environment_file_contents) self.auth_session.environment_file_contents.update(client_update) server_config_filename = lib.get_irods_config_dir() + '/server_config.json' with lib.file_backed_up(server_config_filename): server_config_update = { 'pam_password_length': 20, 'pam_no_extend': False, 'pam_password_min_time': 121, 'pam_password_max_time': 1209600, } lib.update_json_file_from_dict(server_config_filename, server_config_update) lib.restart_irods_server() # the test self.auth_session.assert_icommand(['iinit', self.auth_session.password]) self.auth_session.assert_icommand("icd") self.auth_session.assert_icommand("ils -L", 'STDOUT_SINGLELINE', "home") self.auth_session.environment_file_contents = auth_session_env_backup for file in ['tests/pydevtest/server.key', 'tests/pydevtest/chain.pem', 'tests/pydevtest/dhparams.pem']: os.unlink(os.path.join(lib.get_irods_top_level_dir(), file)) lib.restart_irods_server()
def test_authentication_PAM_with_server_params(self): lib.run_command('openssl genrsa -out server.key') lib.run_command('openssl req -batch -new -key server.key -out server.csr') lib.run_command('openssl req -batch -new -x509 -key server.key -out chain.pem -days 365') lib.run_command('openssl dhparam -2 -out dhparams.pem 1024') # normally 2048, but smaller size here for speed service_account_environment_file_path = os.path.expanduser('~/.irods/irods_environment.json') with lib.file_backed_up(service_account_environment_file_path): server_update = { 'irods_ssl_certificate_chain_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/chain.pem'), 'irods_ssl_certificate_key_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/server.key'), 'irods_ssl_dh_params_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/dhparams.pem'), 'irods_ssl_verify_server': 'none', } lib.update_json_file_from_dict(service_account_environment_file_path, server_update) client_update = { 'irods_ssl_certificate_chain_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/chain.pem'), 'irods_ssl_certificate_key_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/server.key'), 'irods_ssl_dh_params_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/dhparams.pem'), 'irods_ssl_verify_server': 'none', 'irods_authentication_scheme': 'PaM', 'irods_client_server_policy': 'CS_NEG_REQUIRE', } auth_session_env_backup = copy.deepcopy(self.auth_session.environment_file_contents) self.auth_session.environment_file_contents.update(client_update) server_config_filename = lib.get_irods_config_dir() + '/server_config.json' with lib.file_backed_up(server_config_filename): server_config_update = { 'pam_password_length': 20, 'pam_no_extend': False, 'pam_password_min_time': 121, 'pam_password_max_time': 1209600, } lib.update_json_file_from_dict(server_config_filename, server_config_update) lib.restart_irods_server() # the test self.auth_session.assert_icommand(['iinit', self.auth_session.password]) self.auth_session.assert_icommand("icd") self.auth_session.assert_icommand("ils -L", 'STDOUT_SINGLELINE', "home") self.auth_session.environment_file_contents = auth_session_env_backup for file in ['tests/pydevtest/server.key', 'tests/pydevtest/chain.pem', 'tests/pydevtest/dhparams.pem']: os.unlink(os.path.join(lib.get_irods_top_level_dir(), file)) lib.restart_irods_server()
def test_iquota__3044(self): myfile = 'quotafile' corefile = lib.get_core_re_dir() + "/core.re" with lib.file_backed_up(corefile): rules_to_prepend = 'acRescQuotaPolicy {msiSetRescQuotaPolicy("on"); }\n' time.sleep(2) # remove once file hash fix is commited #2279 lib.prepend_string_to_file(rules_to_prepend, corefile) time.sleep(2) # remove once file hash fix is commited #2279 for quotatype in [['suq',self.admin.username], ['sgq','public']]: # user and group for quotaresc in [self.testresc, 'total']: # resc and total cmd = 'iadmin {0} {1} {2} 8000'.format(quotatype[0], quotatype[1], quotaresc) # set high quota self.admin.assert_icommand(cmd.split()) cmd = 'irepl -R {0} {1}'.format(self.testresc, self.testfile) self.admin.assert_icommand(cmd.split()) cmd = 'iadmin cu' # calculate, update db self.admin.assert_icommand(cmd.split()) cmd = 'iquota' self.admin.assert_icommand(cmd.split(), 'STDOUT_SINGLELINE', 'Nearing quota') # not over yet cmd = 'iadmin {0} {1} {2} 40'.format(quotatype[0], quotatype[1], quotaresc) # set low quota self.admin.assert_icommand(cmd.split()) cmd = 'iquota' self.admin.assert_icommand(cmd.split(), 'STDOUT_SINGLELINE', 'OVER QUOTA') # confirm it's over lib.make_file(myfile, 30, contents='arbitrary') cmd = 'iput -R {0} {1}'.format(self.testresc, myfile) # should fail self.admin.assert_icommand(cmd.split(), 'STDERR_SINGLELINE', 'SYS_RESC_QUOTA_EXCEEDED') cmd = 'iadmin {0} {1} {2} 0'.format(quotatype[0], quotatype[1], quotaresc) # remove quota self.admin.assert_icommand(cmd.split()) cmd = 'iadmin cu' # update db self.admin.assert_icommand(cmd.split()) cmd = 'iput -R {0} {1}'.format(self.testresc, myfile) # should succeed again self.admin.assert_icommand(cmd.split()) cmd = 'irm -rf {0}'.format(myfile) # clean up self.admin.assert_icommand(cmd.split()) time.sleep(2) # remove once file hash fix is commited #2279
def test_ssl_iput_with_rods_env(self): lib.run_command('openssl genrsa -out server.key') lib.run_command('openssl req -batch -new -key server.key -out server.csr') lib.run_command('openssl req -batch -new -x509 -key server.key -out chain.pem -days 365') lib.run_command('openssl dhparam -2 -out dhparams.pem 100') # normally 2048, but smaller size here for speed service_account_environment_file_path = os.path.expanduser('~/.irods/irods_environment.json') with lib.file_backed_up(service_account_environment_file_path): server_update = { 'irods_ssl_certificate_chain_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/chain.pem'), 'irods_ssl_certificate_key_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/server.key'), 'irods_ssl_dh_params_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/dhparams.pem'), } lib.update_json_file_from_dict(service_account_environment_file_path, server_update) client_update = { 'irods_client_server_policy': 'CS_NEG_REQUIRE', 'irods_ssl_verify_server': 'none', } session_env_backup = copy.deepcopy(self.admin.environment_file_contents) self.admin.environment_file_contents.update(client_update) filename = 'encryptedfile.txt' filepath = lib.create_local_testfile(filename) self.admin.assert_icommand(['iinit', self.admin.password]) self.admin.assert_icommand(['iput', filename]) self.admin.assert_icommand(['ils', '-L', filename], 'STDOUT', filename) self.admin.environment_file_contents = session_env_backup for f in ['server.key', 'server.csr', 'chain.pem', 'dhparams.pem']: os.unlink(f) lib.restart_irods_server()
def test_acPostProcForPut_replicate_to_multiple_resources(self): # create new resources hostname = socket.gethostname() self.admin.assert_icommand( "iadmin mkresc r1 unixfilesystem " + hostname + ":/tmp/irods/r1", 'STDOUT_SINGLELINE', "Creating") self.admin.assert_icommand( "iadmin mkresc r2 unixfilesystem " + hostname + ":/tmp/irods/r2", 'STDOUT_SINGLELINE', "Creating") corefile = os.path.join(lib.get_core_re_dir(), 'core.re') with lib.file_backed_up(corefile): time.sleep(2) # remove once file hash fix is commited #2279 lib.prepend_string_to_file( '\nacPostProcForPut { replicateMultiple( \"r1,r2\" ); }\n', corefile) time.sleep(2) # remove once file hash fix is commited #2279 # add new rule to end of core.re newrule = """ # multiple replication rule replicateMultiple(*destRgStr) { *destRgList = split(*destRgStr, ','); writeLine("serverLog", " acPostProcForPut multiple replicate $objPath $filePath -> *destRgStr"); foreach (*destRg in *destRgList) { writeLine("serverLog", " acPostProcForPut replicate $objPath $filePath -> *destRg"); *e = errorcode(msiSysReplDataObj(*destRg,"null")); if (*e != 0) { if(*e == -808000) { writeLine("serverLog", "$objPath cannot be found"); $status = 0; succeed; } else { fail(*e); } } } } """ time.sleep(2) # remove once file hash fix is commited #2279 lib.prepend_string_to_file(newrule, corefile) time.sleep(2) # remove once file hash fix is commited #2279 # put data tfile = "rulebasetestfile" lib.touch(tfile) self.admin.assert_icommand(['iput', tfile]) # check replicas self.admin.assert_icommand(['ils', '-L', tfile], 'STDOUT_MULTILINE', [' demoResc ', ' r1 ', ' r2 ']) # clean up and remove new resources self.admin.assert_icommand("irm -rf " + tfile) self.admin.assert_icommand("iadmin rmresc r1") self.admin.assert_icommand("iadmin rmresc r2") time.sleep(2) # remove once file hash fix is commited #2279
def test_configuration_schema_validation_from_file(self): schemas_git_dir = tempfile.mkdtemp(prefix='irods-test_configuration_schema_validation_from_file-git') with lib.directory_deleter(schemas_git_dir): schemas_repo = 'https://github.com/irods/irods_schema_configuration' lib.run_command(['git', 'clone', schemas_repo, schemas_git_dir]) schemas_branch = 'v3' lib.run_command(['git', 'checkout', schemas_branch], cwd=schemas_git_dir) schemas_deploy_dir = tempfile.mkdtemp(prefix='irods-test_configuration_schema_validation_from_file-schemas') with lib.directory_deleter(schemas_deploy_dir): lib.assert_command(['python', os.path.join(schemas_git_dir, 'deploy_schemas_locally.py'), '--output_directory_base', schemas_deploy_dir]) with lib.file_backed_up(os.path.join(lib.get_irods_config_dir(), 'server_config.json')) as server_config_filename: with open(server_config_filename) as f: server_config = json.load(f) server_config['schema_validation_base_uri'] = 'file://' + schemas_deploy_dir lib.update_json_file_from_dict(server_config_filename, server_config) irodsctl_fullpath = os.path.join(lib.get_irods_top_level_dir(), 'iRODS', 'irodsctl') if lib.is_jsonschema_installed(): expected_lines = ['Validating [{0}]... Success'.format(os.path.expanduser('~/.irods/irods_environment.json')), 'Validating [{0}/server_config.json]... Success'.format(lib.get_irods_config_dir()), 'Validating [{0}/VERSION.json]... Success'.format(lib.get_irods_top_level_dir()), 'Validating [{0}/hosts_config.json]... Success'.format(lib.get_irods_config_dir()), 'Validating [{0}/host_access_control_config.json]... Success'.format(lib.get_irods_config_dir())] if not configuration.TOPOLOGY_FROM_RESOURCE_SERVER: expected_lines.append('Validating [{0}/database_config.json]... Success'.format(lib.get_irods_config_dir())) lib.assert_command([irodsctl_fullpath, 'restart'], 'STDOUT_MULTILINE', expected_lines) else: lib.assert_command([irodsctl_fullpath, 'restart'], 'STDERR_SINGLELINE', 'jsonschema not installed', desired_rc=0)
def test_auth_pep(self): rules_to_prepend = """ pep_resource_resolve_hierarchy_pre(*A,*B,*OUT,*E,*F,*G,*H){ *OUT = "THIS IS AN OUT VARIABLE" } pep_resource_resolve_hierarchy_post(*A,*B,*OUT,*E,*F,*G,*H){ writeLine( 'serverLog', '*OUT') } """ corefile = lib.get_core_re_dir() + "/core.re" with lib.file_backed_up(corefile): time.sleep(1) # remove once file hash fix is commited #2279 lib.prepend_string_to_file(rules_to_prepend, corefile) time.sleep(1) # remove once file hash fix is commited #2279 initial_size_of_server_log = lib.get_log_size('server') filename = "test_re_serialization.txt" lib.make_file(filename, 1000) self.admin.assert_icommand("iput -f --metadata ATTR;VALUE;UNIT " + filename) out_count = lib.count_occurrences_of_string_in_log( 'server', 'THIS IS AN OUT VARIABLE', start_index=initial_size_of_server_log) output = commands.getstatusoutput('rm ' + filename) print("counts: " + str(out_count)) assert 1 == out_count
def test_api_plugin(self): rules_to_prepend = """ pep_rs_hello_world_pre(*INST,*OUT,*COMM,*HELLO_IN,*HELLO_OUT) { writeLine("serverLog", "pep_rs_hello_world_pre - *INST *OUT *HELLO_IN, *HELLO_OUT"); } pep_rs_hello_world_post(*INST,*OUT,*COMM,*HELLO_IN,*HELLO_OUT) { writeLine("serverLog", "pep_rs_hello_world_post - *INST *OUT *HELLO_IN, *HELLO_OUT"); } """ corefile = lib.get_core_re_dir() + "/core.re" with lib.file_backed_up(corefile): time.sleep(1) # remove once file hash fix is commited #2279 lib.prepend_string_to_file(rules_to_prepend, corefile) time.sleep(1) # remove once file hash fix is commited #2279 initial_size_of_server_log = lib.get_log_size('server') self.admin.assert_icommand("iapitest", 'STDOUT_SINGLELINE', 'this') pre_count = lib.count_occurrences_of_string_in_log( 'server', 'pep_rs_hello_world_pre - api_instance <unconvertible> that=hello, world.++++this=42, null_value', start_index=initial_size_of_server_log) hello_count = lib.count_occurrences_of_string_in_log( 'server', 'HELLO WORLD', start_index=initial_size_of_server_log) post_count = lib.count_occurrences_of_string_in_log( 'server', 'pep_rs_hello_world_post - api_instance <unconvertible> that=hello, world.++++this=42, that=hello, world.++++this=42++++value=128', start_index=initial_size_of_server_log) assert 1 == pre_count assert 1 == hello_count assert 1 == post_count
def test_acPostProcForPut_replicate_to_multiple_resources(self): # create new resources hostname = socket.gethostname() self.admin.assert_icommand("iadmin mkresc r1 unixfilesystem " + hostname + ":/tmp/irods/r1", 'STDOUT_SINGLELINE', "Creating") self.admin.assert_icommand("iadmin mkresc r2 unixfilesystem " + hostname + ":/tmp/irods/r2", 'STDOUT_SINGLELINE', "Creating") corefile = os.path.join(lib.get_core_re_dir(), 'core.re') with lib.file_backed_up(corefile): time.sleep(2) # remove once file hash fix is commited #2279 lib.prepend_string_to_file('\nacPostProcForPut { replicateMultiple( \"r1,r2\" ); }\n', corefile) time.sleep(2) # remove once file hash fix is commited #2279 # add new rule to end of core.re newrule = """ # multiple replication rule replicateMultiple(*destRgStr) { *destRgList = split(*destRgStr, ','); writeLine("serverLog", " acPostProcForPut multiple replicate $objPath $filePath -> *destRgStr"); foreach (*destRg in *destRgList) { writeLine("serverLog", " acPostProcForPut replicate $objPath $filePath -> *destRg"); *e = errorcode(msiSysReplDataObj(*destRg,"null")); if (*e != 0) { if(*e == -808000) { writeLine("serverLog", "$objPath cannot be found"); $status = 0; succeed; } else { fail(*e); } } } } """ time.sleep(2) # remove once file hash fix is commited #2279 lib.prepend_string_to_file(newrule, corefile) time.sleep(2) # remove once file hash fix is commited #2279 # put data tfile = "rulebasetestfile" lib.touch(tfile) self.admin.assert_icommand(['iput', tfile]) # check replicas self.admin.assert_icommand(['ils', '-L', tfile], 'STDOUT_MULTILINE', [' demoResc ', ' r1 ', ' r2 ']) # clean up and remove new resources self.admin.assert_icommand("irm -rf " + tfile) self.admin.assert_icommand("iadmin rmresc r1") self.admin.assert_icommand("iadmin rmresc r2") time.sleep(2) # remove once file hash fix is commited #2279
def test_rulebase_update__2585(self): rule_file = 'my_rule.r' test_re = os.path.join(lib.get_core_re_dir(), 'test.re') my_rule = """ my_rule { delay("<PLUSET>1s</PLUSET>") { do_some_stuff(); } } INPUT null OUTPUT ruleExecOut """ with open(rule_file, 'w') as f: f.write(my_rule) server_config_filename = lib.get_irods_config_dir() + '/server_config.json' with lib.file_backed_up(server_config_filename): # write new rule file to config dir test_rule = 'do_some_stuff() { writeLine( "serverLog", "TEST_STRING_TO_FIND_1_2585" ); }' with open(test_re, 'w') as f: f.write(test_rule) # update server config with additional rule file server_config_update = { "re_rulebase_set": [{"filename": "test"}, {"filename": "core"}] } lib.update_json_file_from_dict(server_config_filename, server_config_update) time.sleep(35) # wait for delay rule engine to wake # checkpoint log to know where to look for the string initial_log_size = lib.get_log_size('re') self.admin.assert_icommand('irule -F ' + rule_file) time.sleep(35) # wait for test to fire assert lib.count_occurrences_of_string_in_log('re', 'TEST_STRING_TO_FIND_1_2585', start_index=initial_log_size) # repave rule with new string test_rule = 'do_some_stuff() { writeLine( "serverLog", "TEST_STRING_TO_FIND_2_2585" ); }' os.unlink(test_re) with open(test_re, 'w') as f: f.write(test_rule) time.sleep(35) # wait for delay rule engine to wake # checkpoint log to know where to look for the string initial_log_size = lib.get_log_size('re') self.admin.assert_icommand('irule -F ' + rule_file) time.sleep(35) # wait for test to fire assert lib.count_occurrences_of_string_in_log('re', 'TEST_STRING_TO_FIND_2_2585', start_index=initial_log_size) # cleanup os.unlink(test_re) os.unlink(rule_file)
def test_configuration_schema_validation_from_file(self): schemas_git_dir = tempfile.mkdtemp( prefix='irods-test_configuration_schema_validation_from_file-git') with lib.directory_deleter(schemas_git_dir): schemas_repo = 'https://github.com/irods/irods_schema_configuration' lib.run_command(['git', 'clone', schemas_repo, schemas_git_dir]) schemas_branch = 'v3' lib.run_command(['git', 'checkout', schemas_branch], cwd=schemas_git_dir) schemas_deploy_dir = tempfile.mkdtemp( prefix= 'irods-test_configuration_schema_validation_from_file-schemas') with lib.directory_deleter(schemas_deploy_dir): lib.assert_command([ 'python', os.path.join(schemas_git_dir, 'deploy_schemas_locally.py'), '--output_directory_base', schemas_deploy_dir ]) with lib.file_backed_up( os.path.join( lib.get_irods_config_dir(), 'server_config.json')) as server_config_filename: with open(server_config_filename) as f: server_config = json.load(f) server_config[ 'schema_validation_base_uri'] = 'file://' + schemas_deploy_dir lib.update_json_file_from_dict(server_config_filename, server_config) irodsctl_fullpath = os.path.join( lib.get_irods_top_level_dir(), 'iRODS', 'irodsctl') if lib.is_jsonschema_installed(): expected_lines = [ 'Validating [/var/lib/irods/.irods/irods_environment.json]... Success', 'Validating [/etc/irods/server_config.json]... Success', 'Validating [/var/lib/irods/VERSION.json]... Success', 'Validating [/etc/irods/hosts_config.json]... Success', 'Validating [/etc/irods/host_access_control_config.json]... Success' ] if not configuration.TOPOLOGY_FROM_RESOURCE_SERVER: expected_lines.append( 'Validating [/etc/irods/database_config.json]... Success' ) lib.assert_command([irodsctl_fullpath, 'restart'], 'STDOUT_MULTILINE', expected_lines) else: lib.assert_command([irodsctl_fullpath, 'restart'], 'STDERR_SINGLELINE', 'jsonschema not installed', desired_rc=0)
def test_client_server_negotiation__2564(self): corefile = lib.get_core_re_dir() + "/core.re" with lib.file_backed_up(corefile): client_update = { 'irods_client_server_policy': 'CS_NEG_REFUSE' } session_env_backup = copy.deepcopy(self.admin.environment_file_contents) self.admin.environment_file_contents.update(client_update) time.sleep(2) # remove once file hash fix is commited #2279 lib.prepend_string_to_file('\nacPreConnect(*OUT) { *OUT="CS_NEG_REQUIRE"; }\n', corefile) time.sleep(2) # remove once file hash fix is commited #2279 self.admin.assert_icommand( 'ils','STDERR_SINGLELINE','CLIENT_NEGOTIATION_ERROR') self.admin.environment_file_contents = session_env_backup
def test_authentication_PAM_without_negotiation(self): lib.run_command('openssl genrsa -out server.key') lib.run_command('openssl req -batch -new -key server.key -out server.csr') lib.run_command('openssl req -batch -new -x509 -key server.key -out chain.pem -days 365') lib.run_command('openssl dhparam -2 -out dhparams.pem 1024') # normally 2048, but smaller size here for speed service_account_environment_file_path = os.path.expanduser('~/.irods/irods_environment.json') with lib.file_backed_up(service_account_environment_file_path): server_update = { 'irods_ssl_certificate_chain_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/chain.pem'), 'irods_ssl_certificate_key_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/server.key'), 'irods_ssl_dh_params_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/dhparams.pem'), 'irods_ssl_verify_server': 'none', } lib.update_json_file_from_dict(service_account_environment_file_path, server_update) client_update = { 'irods_ssl_certificate_chain_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/chain.pem'), 'irods_ssl_certificate_key_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/server.key'), 'irods_ssl_dh_params_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/dhparams.pem'), 'irods_ssl_verify_server': 'none', 'irods_authentication_scheme': 'PaM', } # now the actual test auth_session_env_backup = copy.deepcopy(self.auth_session.environment_file_contents) self.auth_session.environment_file_contents.update(client_update) # server reboot to pick up new irodsEnv settings lib.restart_irods_server() # do the reauth self.auth_session.assert_icommand(['iinit', self.auth_session.password]) # connect and list some files self.auth_session.assert_icommand('icd') self.auth_session.assert_icommand('ils -L', 'STDOUT_SINGLELINE', 'home') # reset client environment to original self.auth_session.environment_file_contents = auth_session_env_backup # clean up for file in ['tests/pydevtest/server.key', 'tests/pydevtest/chain.pem', 'tests/pydevtest/dhparams.pem']: os.unlink(os.path.join(lib.get_irods_top_level_dir(), file)) # server reboot to pick up new irodsEnv and server settings lib.restart_irods_server()
def test_authentication_PAM_without_negotiation(self): lib.run_command('openssl genrsa -out server.key') lib.run_command('openssl req -batch -new -key server.key -out server.csr') lib.run_command('openssl req -batch -new -x509 -key server.key -out chain.pem -days 365') lib.run_command('openssl dhparam -2 -out dhparams.pem 100') # normally 2048, but smaller size here for speed service_account_environment_file_path = os.path.expanduser('~/.irods/irods_environment.json') with lib.file_backed_up(service_account_environment_file_path): server_update = { 'irods_ssl_certificate_chain_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/chain.pem'), 'irods_ssl_certificate_key_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/server.key'), 'irods_ssl_dh_params_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/dhparams.pem'), 'irods_ssl_verify_server': 'none', } lib.update_json_file_from_dict(service_account_environment_file_path, server_update) client_update = { 'irods_ssl_certificate_chain_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/chain.pem'), 'irods_ssl_certificate_key_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/server.key'), 'irods_ssl_dh_params_file': os.path.join(lib.get_irods_top_level_dir(), 'tests/pydevtest/dhparams.pem'), 'irods_ssl_verify_server': 'none', 'irods_authentication_scheme': 'PaM', } # now the actual test auth_session_env_backup = copy.deepcopy(self.auth_session.environment_file_contents) self.auth_session.environment_file_contents.update(client_update) # server reboot to pick up new irodsEnv settings lib.restart_irods_server() # do the reauth self.auth_session.assert_icommand(['iinit', self.auth_session.password]) # connect and list some files self.auth_session.assert_icommand('icd') self.auth_session.assert_icommand('ils -L', 'STDOUT_SINGLELINE', 'home') # reset client environment to original self.auth_session.environment_file_contents = auth_session_env_backup # clean up for file in ['tests/pydevtest/server.key', 'tests/pydevtest/chain.pem', 'tests/pydevtest/dhparams.pem']: os.unlink(os.path.join(lib.get_irods_top_level_dir(), file)) # server reboot to pick up new irodsEnv and server settings lib.restart_irods_server()
def test_re_serialization(self): rules_to_prepend = """ pep_resource_resolve_hierarchy_pre(*A,*B,*OUT,*E,*F,*G,*H){ writeLine("serverLog", "pep_resource_resolve_hierarchy_pre - [*A] [*B] [*OUT] [*E] [*F] [*G] [*H]"); } """ corefile = lib.get_core_re_dir() + "/core.re" with lib.file_backed_up(corefile): time.sleep(1) # remove once file hash fix is commited #2279 lib.prepend_string_to_file(rules_to_prepend, corefile) time.sleep(1) # remove once file hash fix is commited #2279 initial_size_of_server_log = lib.get_log_size('server') filename = "test_re_serialization.txt" lib.make_file(filename, 1000) self.admin.assert_icommand("iput -f --metadata ATTR;VALUE;UNIT " + filename) auth_count = lib.count_occurrences_of_string_in_log( 'server', 'user_auth_info_auth_flag=5', start_index=initial_size_of_server_log) zone_count = lib.count_occurrences_of_string_in_log( 'server', 'user_rods_zone=tempZone', start_index=initial_size_of_server_log) user_count = lib.count_occurrences_of_string_in_log( 'server', 'user_user_name=otherrods', start_index=initial_size_of_server_log) mdata_count = lib.count_occurrences_of_string_in_log( 'server', 'ATTR;VALUE;UNIT', start_index=initial_size_of_server_log) output = commands.getstatusoutput('rm ' + filename) print("counts: " + str(auth_count) + " " + str(zone_count) + " " + str(user_count) + " " + str(mdata_count)) assert 1 == auth_count assert 1 == zone_count assert 1 == user_count assert 1 == mdata_count
def test_iput_bulk_check_acpostprocforput__2841(self): # prepare test directory number_of_files = 5 dirname = self.admin.local_session_dir + '/files' corefile = lib.get_core_re_dir() + "/core.re" # files less than 4200000 were failing to trigger the writeLine for filesize in range(5000, 6000000, 500000): files = lib.make_large_local_tmp_dir(dirname, number_of_files, filesize) # manipulate core.re and check the server log with lib.file_backed_up(corefile): initial_size_of_server_log = lib.get_log_size('server') rules_to_prepend = ''' acBulkPutPostProcPolicy { msiSetBulkPutPostProcPolicy("on"); } acPostProcForPut { writeLine("serverLog", "acPostProcForPut called for $objPath"); } ''' time.sleep(1) # remove once file hash fix is committed #2279 lib.prepend_string_to_file(rules_to_prepend, corefile) time.sleep(1) # remove once file hash fix is committed #2279 self.admin.assert_icommand(['iput', '-frb', dirname]) assert number_of_files == lib.count_occurrences_of_string_in_log( 'server', 'writeLine: inString = acPostProcForPut called for', start_index=initial_size_of_server_log) shutil.rmtree(dirname)
def test_rulebase_update__2585(self): rule_file = 'my_rule.r' test_re = os.path.join(lib.get_core_re_dir(), 'test.re') my_rule = """ my_rule { delay("<PLUSET>1s</PLUSET>") { do_some_stuff(); } } INPUT null OUTPUT ruleExecOut """ with open(rule_file, 'w') as f: f.write(my_rule) server_config_filename = lib.get_irods_config_dir( ) + '/server_config.json' with lib.file_backed_up(server_config_filename): # write new rule file to config dir test_rule = 'do_some_stuff() { writeLine( "serverLog", "TEST_STRING_TO_FIND_1_2585" ); }' with open(test_re, 'w') as f: f.write(test_rule) # update server config with additional rule file server_config_update = { "re_rulebase_set": [{ "filename": "test" }, { "filename": "core" }] } lib.update_json_file_from_dict(server_config_filename, server_config_update) time.sleep(35) # wait for delay rule engine to wake # checkpoint log to know where to look for the string initial_log_size = lib.get_log_size('re') self.admin.assert_icommand('irule -F ' + rule_file) time.sleep(35) # wait for test to fire assert lib.count_occurrences_of_string_in_log( 're', 'TEST_STRING_TO_FIND_1_2585', start_index=initial_log_size) # repave rule with new string test_rule = 'do_some_stuff() { writeLine( "serverLog", "TEST_STRING_TO_FIND_2_2585" ); }' os.unlink(test_re) with open(test_re, 'w') as f: f.write(test_rule) time.sleep(35) # wait for delay rule engine to wake # checkpoint log to know where to look for the string initial_log_size = lib.get_log_size('re') self.admin.assert_icommand('irule -F ' + rule_file) time.sleep(35) # wait for test to fire assert lib.count_occurrences_of_string_in_log( 're', 'TEST_STRING_TO_FIND_2_2585', start_index=initial_log_size) # cleanup os.unlink(test_re) os.unlink(rule_file)
def test_acPostProcForOpen__3024(self): test_re = os.path.join(lib.get_core_re_dir(), 'test.re') server_config_filename = lib.get_irods_config_dir() + '/server_config.json' # get PEP name from function name pep_name = inspect.stack()[0][3].split('_')[1] # user session sesh = self.user0 testfile = self.testfile target_obj = os.path.join(sesh.session_collection, testfile) # prepare rule file rule_file = "test_rule_file.r" rule_string = ''' test_acPostProcForCreate__3024 {{ msiDataObjOpen("{target_obj}",*FD); msiDataObjClose(*FD,*Status); }} INPUT null OUTPUT ruleExecOut '''.format(**locals()) with open(rule_file, 'w') as f: f.write(rule_string) # query for resource properties columns = ('RESC_ZONE_NAME, ' 'RESC_FREE_SPACE, ' 'RESC_STATUS, ' 'RESC_ID, ' 'RESC_NAME, ' 'RESC_TYPE_NAME, ' 'RESC_LOC, ' 'RESC_CLASS_NAME, ' 'RESC_VAULT_PATH, ' 'RESC_INFO, ' 'RESC_COMMENT, ' 'RESC_CREATE_TIME, ' 'RESC_MODIFY_TIME') resource = sesh.default_resource query = '''iquest "SELECT {columns} WHERE RESC_NAME ='{resource}'"'''.format(**locals()) result = sesh.run_icommand(query)[1] # last line is iquest default formatting separator resource_property_list = result.splitlines()[:-1] with lib.file_backed_up(server_config_filename): # prepare rule # rule will write PEP name as well as # resource related rule session vars to server log rule_body = 'writeLine("serverLog", "{pep_name}");'.format(**locals()) rule_body += ('writeLine("serverLog", $KVPairs.zoneName);' 'writeLine("serverLog", $KVPairs.freeSpace);' 'writeLine("serverLog", $KVPairs.quotaLimit);' 'writeLine("serverLog", $KVPairs.rescStatus);' 'writeLine("serverLog", $KVPairs.rescId);' 'writeLine("serverLog", $KVPairs.rescName);' 'writeLine("serverLog", $KVPairs.rescType);' 'writeLine("serverLog", $KVPairs.rescLoc);' 'writeLine("serverLog", $KVPairs.rescClass);' 'writeLine("serverLog", $KVPairs.rescVaultPath);' 'writeLine("serverLog", $KVPairs.rescInfo);' 'writeLine("serverLog", $KVPairs.rescComments);' 'writeLine("serverLog", $KVPairs.rescCreate);' 'writeLine("serverLog", $KVPairs.rescModify);') test_rule = '{pep_name} {{ {rule_body} }}'.format(**locals()) # write new rule file with open(test_re, 'w') as f: f.write(test_rule) # update server config with additional rule file server_config_update = { "re_rulebase_set": [{"filename": "test"}, {"filename": "core"}] } lib.update_json_file_from_dict(server_config_filename, server_config_update) # iput test file sesh.assert_icommand('iput -f {testfile}'.format(**locals())) # checkpoint log to know where to look for the string initial_log_size = lib.get_log_size('server') # invoke irule to trigger PEP sesh.assert_icommand('irule -F {rule_file}'.format(**locals())) # confirm that PEP was hit by looking for pep name in server log assert lib.count_occurrences_of_string_in_log('server', pep_name, start_index=initial_log_size) # check that resource session vars were written to the server log for line in resource_property_list: column = line.rsplit('=', 1)[0].strip() property = line.rsplit('=', 1)[1].strip() if property: if column != 'RESC_MODIFY_TIME': assert lib.count_occurrences_of_string_in_log('server', property, start_index=initial_log_size) else: # for resource modify time skip last 2 second digits assert lib.count_occurrences_of_string_in_log('server', property[:-2], start_index=initial_log_size) # cleanup sesh.run_icommand('irm -f {target_obj}'.format(**locals())) os.unlink(rule_file) os.unlink(test_re)
def test_acPostProcForFilePathReg__3024(self): test_re = os.path.join(lib.get_core_re_dir(), 'test.re') server_config_filename = lib.get_irods_config_dir() + '/server_config.json' # get PEP name from function name pep_name = inspect.stack()[0][3].split('_')[1] # user session # use admin to be allowed to register stuff sesh = self.admin # test file for ireg username = sesh.username resc_vault_path = lib.get_vault_path(sesh) testfile = '{resc_vault_path}/home/{username}/foo.txt'.format(**locals()) open(testfile, 'a').close() # query for resource properties columns = ('RESC_ZONE_NAME, ' 'RESC_FREE_SPACE, ' 'RESC_STATUS, ' 'RESC_ID, ' 'RESC_NAME, ' 'RESC_TYPE_NAME, ' 'RESC_LOC, ' 'RESC_CLASS_NAME, ' 'RESC_VAULT_PATH, ' 'RESC_INFO, ' 'RESC_COMMENT, ' 'RESC_CREATE_TIME, ' 'RESC_MODIFY_TIME') resource = sesh.default_resource query = '''iquest "SELECT {columns} WHERE RESC_NAME ='{resource}'"'''.format(**locals()) result = sesh.run_icommand(query)[1] # last line is iquest default formatting separator resource_property_list = result.splitlines()[:-1] with lib.file_backed_up(server_config_filename): # prepare rule # rule will write PEP name as well as # resource related rule session vars to server log rule_body = 'writeLine("serverLog", "{pep_name}");'.format(**locals()) rule_body += ('writeLine("serverLog", $KVPairs.zoneName);' 'writeLine("serverLog", $KVPairs.freeSpace);' 'writeLine("serverLog", $KVPairs.quotaLimit);' 'writeLine("serverLog", $KVPairs.rescStatus);' 'writeLine("serverLog", $KVPairs.rescId);' 'writeLine("serverLog", $KVPairs.rescName);' 'writeLine("serverLog", $KVPairs.rescType);' 'writeLine("serverLog", $KVPairs.rescLoc);' 'writeLine("serverLog", $KVPairs.rescClass);' 'writeLine("serverLog", $KVPairs.rescVaultPath);' 'writeLine("serverLog", $KVPairs.rescInfo);' 'writeLine("serverLog", $KVPairs.rescComments);' 'writeLine("serverLog", $KVPairs.rescCreate);' 'writeLine("serverLog", $KVPairs.rescModify);') test_rule = '{pep_name} {{ {rule_body} }}'.format(**locals()) # write new rule file with open(test_re, 'w') as f: f.write(test_rule) # update server config with additional rule file server_config_update = { "re_rulebase_set": [{"filename": "test"}, {"filename": "core"}] } lib.update_json_file_from_dict(server_config_filename, server_config_update) # checkpoint log to know where to look for the string initial_log_size = lib.get_log_size('server') # ireg test file to trigger PEP target_obj = os.path.join(sesh.home_collection, os.path.basename(testfile)) sesh.assert_icommand('ireg {testfile} {target_obj}'.format(**locals())) # confirm that PEP was hit by looking for pep name in server log assert lib.count_occurrences_of_string_in_log('server', pep_name, start_index=initial_log_size) # check that resource session vars were written to the server log for line in resource_property_list: column = line.rsplit('=', 1)[0].strip() property = line.rsplit('=', 1)[1].strip() if property: if column != 'RESC_MODIFY_TIME': assert lib.count_occurrences_of_string_in_log('server', property, start_index=initial_log_size) # cleanup sesh.run_icommand('irm -f {target_obj}'.format(**locals())) os.unlink(test_re)