def manual_process(cbf_path): # potential additions: # check to see if stills_process input = experiments.json works cbf_path = '/dls/i24/data/2017/nt14493-63/CuNIR/waylinCD/' reference_path = '/dls/i24/data/2017/nt14493-63/processing/stills_process/waylinCD/' wd = '/dls/i24/data/2017/nt14493-63/processing/scripts/experiment_refinement_process_test_folder/waylinCD/' experiments_json_list = nest.find_experiments_json_starts('waylinCD', doses=20, before_limit=5) all_out_files = utils.list_directory(wd) for file_num in experiments_json_list: diff = file_num % 20 start_num = file_num - diff final_num = start_num + 20 file_num = '%.5d' % file_num file_list = [ 'waylinCD0077_%.5d' % num for num in range(start_num, final_num) ] ref_exp_json = '%sidx-waylinCD0077_%s_refined_experiments.json' % ( reference_path, file_num ) # idx-waylinCD0077_46664_refined_experiments.json print ref_exp_json for file in file_list: num = file.split('_')[1] idx_file = utils.idx_file_check(all_out_files, num) if idx_file == []: p_file_name = sg.write_manual_process_file( wd, cbf_path, file, ref_exp_json) # run manual process utils.run_cluster_job_with_log(p_file_name) utils.check_log(job_lim=20) # default job limit = 50 # move_cluster_logs() if file == file_list[-1]: break
def test_strategy_find_uuid(app, caplog, code, oidc_provider, oidc_provider_jwkset, login_url, login_callback_url, simple_user): get_providers.cache.clear() has_providers.cache.clear() # no mapping please OIDCClaimMapping.objects.all().delete() oidc_provider.strategy = oidc_provider.STRATEGY_FIND_UUID oidc_provider.save() User = get_user_model() assert User.objects.count() == 1 response = app.get('/').maybe_follow() assert oidc_provider.name in response.content response = response.click(oidc_provider.name) location = urlparse.urlparse(response.location) query = check_simple_qs(urlparse.parse_qs(location.query)) # sub=john.doe, MUST not work with utils.check_log(caplog, 'cannot create user'): with oidc_provider_mock(oidc_provider, oidc_provider_jwkset, code): response = app.get(login_callback_url, params={'code': code, 'state': query['state']}) # sub=simple_user.uuid MUST work with utils.check_log(caplog, 'found user using UUID'): with oidc_provider_mock(oidc_provider, oidc_provider_jwkset, code, sub=simple_user.uuid): response = app.get(login_callback_url, params={'code': code, 'state': query['state']}) assert urlparse.urlparse(response['Location']).path == '/' assert User.objects.count() == 1 user = User.objects.get() # verify user was not modified assert user.username == 'user' assert user.first_name == u'Jôhn' assert user.last_name == u'Dôe' assert user.email == '*****@*****.**' assert user.attributes.first_name == u'Jôhn' assert user.attributes.last_name == u'Dôe' response = app.get(reverse('account_management')) with utils.check_log(caplog, 'revoked token from OIDC'): with oidc_provider_mock(oidc_provider, oidc_provider_jwkset, code): response = response.click(href='logout') assert 'https://idp.example.com/logout' in response.content
def auto_process(visit_directory, protein_name, chip_name, chip_run, output_directory, job_lim=20, iteration_lim=1000, wait_time=10, log_name='03102017_out.txt' ): #needs checking, auto_process need rewriting cbf_path = os.path.join(visit_directory, protein_name, chip_name) cbf_list = utils.list_directory_files_of_type(cbf_path, '.cbf') wd = os.path.join(visit_directory, 'processing/stills_process/', output_directory) all_out_files = utils.list_directory(wd) list_to_append = [] print cbf_path, wd for file in cbf_list: idx_file = utils.idx_file_check(all_out_files, file.split('.')[0]) if idx_file == []: list_to_append.append(file) job_length = len(list_to_append) if job_length >= 1 or file == cbf_path[-1]: print file initial_num = list_to_append[0].split('.')[0].split('_')[1] final_num = list_to_append[-1].split('.')[0].split('_')[1] if chip_run is None: chip_run = cbf_list[0].split('_')[0] #chipname+run number else: chip_run = "%.4d" % chip_run p_file_name = sg.write_multi_stills_file( wd, cbf_path, chip_run, initial_num, final_num) list_to_append = [] utils.check_log(log_file_name='03102017_out.txt', job_lim=20, i=0, iteration_lim=1000, wait_time=10) utils.run_cluster_job_with_log(p_file_name)
def jobSubmitter(sc,path): pickle_path = 'num_per_file_dict.pickle' """list of directories that have had file changes produced by spysnail May be worth trying to combine into one document """ # path = '/dls/i24/data/2017/nt14493-78/' # dir_paths_list = os.listdir(path) # dir_paths_list = [path + dir for dir in dir_paths_list] #dir_paths_list.remove('/dls/i24/data/2017/nt14493-65/adc/adrian78') # print(dir_paths_list) #dir_paths_list.append('/dls/i24/data/2017/nt14493-65/hewl/briony') #dir_paths_list.append('/dls/i24/data/2017/nt14493-65/hewl/carter') dir_paths_list = getDirListFromPickle(pickle_path) #dir_paths_list.reverse() for path in dir_paths_list: p_name, c_dir_name,dir_contents_list,idx_list = pathInitialiser(path) list_to_append = [] i=0 for file in dir_contents_list: name, c_name = getChipName(file) idx_file = utils.idx_file_check(idx_list, name) if idx_file == []: print(name) list_to_append.append(file) run_no, firstnum, lastnum, job_length = getRunNum(file, list_to_append) if job_length >= 100 or file == dir_contents_list[-1]: i+=100 p_file_name = write_process_file(path, firstnum, lastnum, c_name, c_dir_name, run_no, p_name) list_to_append = [] utils.check_log() utils.run_cluster_job(p_file_name) if i >= 400: break global_hawk.bulk_process()
def test_sso(app, caplog, code, oidc_provider, oidc_provider_jwkset, login_url, login_callback_url, hooks): OU = get_ou_model() cassis = OU.objects.create(name='Cassis', slug='cassis') OU.cached.cache.clear() response = app.get('/admin/').maybe_follow() assert oidc_provider.name in response.content response = response.click(oidc_provider.name) location = urlparse.urlparse(response.location) endpoint = urlparse.urlparse(oidc_provider.authorization_endpoint) assert location.scheme == endpoint.scheme assert location.netloc == endpoint.netloc assert location.path == endpoint.path query = check_simple_qs(urlparse.parse_qs(location.query)) assert query['state'] in app.session['auth_oidc'] assert query['response_type'] == 'code' assert query['client_id'] == str(oidc_provider.client_id) assert query['scope'] == 'openid' assert query['redirect_uri'] == 'http://testserver' + reverse('oidc-login-callback') User = get_user_model() assert User.objects.count() == 0 with utils.check_log(caplog, 'invalid token endpoint response'): with oidc_provider_mock(oidc_provider, oidc_provider_jwkset, code): response = app.get(login_callback_url, params={'code': 'yyyy', 'state': query['state']}) with utils.check_log(caplog, 'invalid id_token %r'): with oidc_provider_mock(oidc_provider, oidc_provider_jwkset, code, extra_id_token={'iss': None}): response = app.get(login_callback_url, params={'code': code, 'state': query['state']}) with utils.check_log(caplog, 'invalid id_token %r'): with oidc_provider_mock(oidc_provider, oidc_provider_jwkset, code, extra_id_token={'sub': None}): response = app.get(login_callback_url, params={'code': code, 'state': query['state']}) with utils.check_log(caplog, 'authentication is too old'): with oidc_provider_mock(oidc_provider, oidc_provider_jwkset, code, extra_id_token={'iat': 1}): response = app.get(login_callback_url, params={'code': code, 'state': query['state']}) with utils.check_log(caplog, 'id_token expired'): with oidc_provider_mock(oidc_provider, oidc_provider_jwkset, code, extra_id_token={'exp': 1}): response = app.get(login_callback_url, params={'code': code, 'state': query['state']}) with utils.check_log(caplog, 'invalid id_token audience'): with oidc_provider_mock(oidc_provider, oidc_provider_jwkset, code, extra_id_token={'aud': 'zz'}): response = app.get(login_callback_url, params={'code': code, 'state': query['state']}) assert not hooks.auth_oidc_backend_modify_user with utils.check_log(caplog, 'created user'): with oidc_provider_mock(oidc_provider, oidc_provider_jwkset, code): response = app.get(login_callback_url, params={'code': code, 'state': query['state']}) assert len(hooks.auth_oidc_backend_modify_user) == 1 assert set(hooks.auth_oidc_backend_modify_user[0]['kwargs']) >= set(['user', 'provider', 'user_info', 'id_token', 'access_token']) assert urlparse.urlparse(response['Location']).path == '/admin/' assert User.objects.count() == 1 user = User.objects.get() assert user.ou == get_default_ou() assert user.username == 'john.doe' assert user.first_name == 'John' assert user.last_name == 'Doe' assert user.email == '*****@*****.**' assert user.attributes.first_name == 'John' assert user.attributes.last_name == 'Doe' assert AttributeValue.objects.filter(content='John', verified=True).count() == 1 assert AttributeValue.objects.filter(content='Doe', verified=False).count() == 1 with oidc_provider_mock(oidc_provider, oidc_provider_jwkset, code, extra_user_info={'family_name_verified': True}): response = app.get(login_callback_url, params={'code': code, 'state': query['state']}) assert AttributeValue.objects.filter(content='Doe', verified=False).count() == 0 assert AttributeValue.objects.filter(content='Doe', verified=True).count() == 1 with oidc_provider_mock(oidc_provider, oidc_provider_jwkset, code, extra_user_info={'ou': 'cassis'}): response = app.get(login_callback_url, params={'code': code, 'state': query['state']}) assert User.objects.count() == 1 user = User.objects.get() assert user.ou == cassis with oidc_provider_mock(oidc_provider, oidc_provider_jwkset, code): response = app.get(login_callback_url, params={'code': code, 'state': query['state']}) assert User.objects.count() == 1 user = User.objects.get() assert user.ou == get_default_ou() last_modified = user.modified time.sleep(0.1) with oidc_provider_mock(oidc_provider, oidc_provider_jwkset, code): response = app.get(login_callback_url, params={'code': code, 'state': query['state']}) assert User.objects.count() == 1 user = User.objects.get() assert user.ou == get_default_ou() assert user.modified == last_modified response = app.get(reverse('account_management')) with utils.check_log(caplog, 'revoked token from OIDC'): with oidc_provider_mock(oidc_provider, oidc_provider_jwkset, code): response = response.click(href='logout') assert 'https://idp.example.com/logout' in response.content
test_jar = pickle.load(open(os.path.join(out_path,fid),'r')) print 'found int pickle in output folder' except StandardError, e: print 'couldnt find int pickle in output folder, copying file' subprocess.Popen(['cp','%s %s'%(os.path.join(stills_dir_path,fid),out_path)],stderr=subprocess.PIPE) continue #'merged',args.protein_name, args.chip_name utils.check_for_dir(input_directory,'merged') for i in range(1, doses+1): print 'dose'+ str(i) dose_dir_path, new_phil_file, data_path, run_no, title, process_file_name, process = set_variables(out_dir,i) write_new_phil(original_phil_file, new_phil_file, data_path, run_no,title) write_process_file(process_file_name,process) utils.run_cluster_job('%s%s'%(process_file_name,'.sh')) if i%3 == 0: utils.check_log(iteration_lim = 1000, wait_time = 10,log_file_name='merging_out.txt') pass def stills_merge(input_directory, chip_name, protein_name, run_number, output_directory, job_limit, iteration_limit, wait_time): dose_dir_path, new_phil_file, data_path, run_no, title, process_file_name, process = set_variables(out_dir,i) write_new_phil(original_phil_file, new_phil_file, data_path, run_no,title) write_process_file(process_file_name,process) utils.run_cluster_job('%s%s'%(process_file_name,'.sh')) pass def contiguous_merge(): doses=args.doses contiguous_limit = args.contiguous_limit chip_name = args.chip_name merged_dir_path = os.path.join(dir, chip_name)