def login_user(self): does_user_exist = self.check_user_exists() err_msg = "User '{0}' doesn't exist for '{1}'".format( config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY), self.idp_server) assert(does_user_exist), err_msg # Alway start with this method so as to dodge side effects. self.reset_browser() URL = "https://{0}/login".format(self.idp_server) OpenID = "https://{0}/esgf-idp/openid/".format(self.idp_server) self.load_page(URL, (By.ID, 'openid_identifier')) globals.browser.find_element_by_id('openid_identifier').send_keys(OpenID) globals.browser.find_element_by_xpath("//input[@value='Login']").click() self.wait_loading('load the login page', (By.ID, 'username'), (By.CLASS_NAME, 'error-box')) # After check_user_exists, the page is asking for the user's password. globals.browser.find_element_by_id('username')\ .send_keys(config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY)) globals.browser.find_element_by_id('password')\ .send_keys(config.get(config.ACCOUNT_SECTION, config.USER_PASSWORD_KEY)) globals.browser.find_element_by_xpath("//input[@value='SUBMIT']").click() msg = "login with user '{0}' for '{1}'"\ .format(config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY), self.idp_server) #text = 'Invalid OpenID and/or Password combination' self.wait_loading(msg, not_expected_element=(By.ID, 'null.errors'))
def create_user(self): # Alway start with this method so as to dodge side effects. self.reset_browser() URL = "https://{0}/user/add".format(self.idp_server) self.load_page(URL, (By.NAME, 'first_name')) # Filling the form for element_name in self.elements: globals.browser.find_element_by_name(element_name).send_keys(self.elements[element_name]) globals.browser.find_element_by_xpath("//input[@value='Submit']").click() msg = "create user '{0}' in {1}. May be the captcha is on. "\ "Check if USE_CAPTCHA in "\ "/usr/local/cog/cog_config/cog_settings.cfg is set to False and "\ "restart esg-node.".format(config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY), config.get(config.NODES_SECTION, config.IDP_NODE_KEY)) self.wait_loading(msg, not_expected_element=(By.CLASS_NAME, 'errorlist')) # message-box and Thank you for creating an account. You can now login.
def __init__(self): self.idp_server = config.get(config.NODES_SECTION, config.IDP_NODE_KEY) # Mapping user data to fit to web-fe user creation form self.elements = { 'first_name': config.get(config.ACCOUNT_SECTION, config.USER_FIRST_NAME_KEY), 'last_name': config.get(config.ACCOUNT_SECTION, config.USER_LAST_NAME_KEY), 'email': config.get(config.ACCOUNT_SECTION, config.USER_EMAIL_KEY), 'username': config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY), 'password': config.get(config.ACCOUNT_SECTION, config.USER_PASSWORD_KEY), 'confirm_password': config.get(config.ACCOUNT_SECTION, config.USER_PASSWORD_KEY), 'institution': config.get(config.ACCOUNT_SECTION, config.USER_INSTITUTION_KEY), 'city': config.get(config.ACCOUNT_SECTION, config.USER_CITY_KEY), 'country': config.get(config.ACCOUNT_SECTION, config.USER_COUNTRY_KEY) }
def create_user(self): # Alway start with this method so as to dodge side effects. self.reset_browser() URL = "https://{0}/user/add".format(self.idp_server) self.load_page(URL, (By.NAME, 'first_name')) # Filling the form for element_name in self.elements: try: globals.browser.find_element_by_name(element_name).send_keys( self.elements[element_name]) except NoSuchElementException: assert ( False ), "{0} is corrupted or not compliant with esgf-test-suite".format( URL) globals.browser.find_element_by_xpath( "//input[@value='Submit']").click() msg = "create user '{0}' in {1}. May be the captcha is on. "\ "Check if USE_CAPTCHA in "\ "/usr/local/cog/cog_config/cog_settings.cfg is set to False and "\ "restart esg-node.".format(config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY), config.get(config.NODES_SECTION, config.IDP_NODE_KEY)) self.wait_loading(msg, not_expected_element=(By.CLASS_NAME, 'errorlist'))
def __init__(self): AbstractBrowserBasedTest.__init__(self) self.idp_node = config.get(config.NODES_SECTION, config.IDP_NODE_KEY) self.username = config.get(config.SLCS_SECTION, config.ADMIN_USERNAME_KEY) self.password = config.get(config.SLCS_SECTION, config.ADMIN_PASSWORD_KEY)
def __init__(self): AbstractBrowserBasedTest.__init__(self) AbstractMyproxyBasedTest.__init__(self) self._tu = cat.ThreddsUtils() self._endpoints = self._tu.get_endpoints() self.data_node = config.get(config.NODES_SECTION, config.DATA_NODE_KEY) self.idp_node = config.get(config.NODES_SECTION, config.IDP_NODE_KEY) self.username = config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY) self.password = config.get(config.ACCOUNT_SECTION, config.USER_PASSWORD_KEY)
def __init__(self): AbstractBrowserBasedTest.__init__(self) AbstractMyproxyBasedTest.__init__(self) self._tu = cat.ThreddsUtils() # print("[DEBUG] starting endpoints computation") self._endpoints = self._tu.get_endpoints() self.data_node = config.get(config.NODES_SECTION, config.DATA_NODE_KEY) self.idp_node = config.get(config.NODES_SECTION, config.IDP_NODE_KEY) self.username = config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY) self.password = config.get(config.ACCOUNT_SECTION, config.USER_PASSWORD_KEY)
def get_credentials(self): #try: # Get credentials (and trustroots) # username = config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY) # password = config.get(config.ACCOUNT_SECTION, config.USER_PASSWORD_KEY) # self.credentials = self.myproxy.logon(username, password) #except MyProxyClientGetError as e1: # err_msg = "wrong username and/or password combination when getting credentials for user '{0}' (reason: {1})"\ # .format(username, e1) # assert(False), err_msg #except Exception as e2: # err = "unable to connect to myproxy server '{0}' (reason: {1})"\ # .format(self.idp_addr, e2) # assert (False), err # Write Credentials #with open(self.credsfile, 'w') as f: # f.write(self.credentials[0]+self.credentials[1]) # os.chmod(self.credsfile, self.myproxy.PROXY_FILE_PERMISSIONS) # WORKAROUND SSL-MYPROXYCLIENT PROBLEM username = config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY) password = config.get(config.ACCOUNT_SECTION, config.USER_PASSWORD_KEY) command = [ 'myproxy-logon', '-S', '-T', '-s', self.idp_addr, '-p', self.port, '-l', username, '-o', self.credsfile, '-b' ] process = subprocess.run(command, input=password.encode(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, timeout=60) assert ( process.returncode == 0 ), "fail to get the credentials for {0} (sdtout: {1} ; stderr: {2})".format( self.idp_addr, process.stdout, process.stderr) file_content = '' with open(self.credsfile, 'r') as file: for line in file.readlines(): file_content += line certs = re.findall('(-+BEGIN CERTIFICATE-+.+?-+END CERTIFICATE-+\\s+)', file_content, re.DOTALL) key = re.findall( '(-+BEGIN RSA PRIVATE KEY-+.+?-+END RSA PRIVATE KEY-+\\s+)', file_content, re.DOTALL) self.credentials = (certs[0], key[0], certs[1])
def test_dl_gridftp(self): self.gridftp_node = config.get(config.NODES_SECTION, config.GRIDFTP_NODE_KEY) gridftp_port = naming.GRIDFTP_PORT_NUMBER is_enable = networking.ping_tcp_port(self.gridftp_node, gridftp_port) err_msg = "gridftp server not found at {0} port {1} (reason: {2})"\ .format(self.gridftp_node, gridftp_port, is_enable[2]) assert (is_enable[0]), err_msg path = self._get_endpoint_path('GridFTP') url = "gsiftp://{0}:{1}//{2}".format(self.gridftp_node, gridftp_port, path) print(("url downloaded: {0}".format(url))) os.environ['X509_USER_PROXY'] = globals.myproxy_utils.credsfile os.environ['X509_CERT_DIR'] = globals.myproxy_utils.cacertdir command = [ 'globus-url-copy', '-b', url, TestDataDownload._DOWNLOADED_FILE_PATH ] process = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, timeout=60) assert ( process.returncode == 0 ), "fail to download by GridFTP (sdtout: {0} ; stderr: {1})".format( process.stdout, process.stderr)
def check_user_exists(self): # Alway start with this method so as to dodge side effects. self.reset_browser() URL = "https://{0}/login".format(self.idp_server) OpenID = "https://{0}/esgf-idp/openid/{1}".format( self.idp_server, config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY)) self.load_page(URL, (By.ID, 'openid_identifier')) try: globals.browser.find_element_by_id('openid_identifier').send_keys( OpenID) except NoSuchElementException: assert ( False ), "{0} is corrupted or not compliant with esgf-test-suite".format( URL) globals.browser.find_element_by_xpath( "//input[@value='Login']").click() self.wait_loading('load the login confirmation') #text = 'OpenID Discovery Error: unrecognized by the Identity Provider' try: element_present = globals.browser.find_element_by_class_name( 'error-box') does_user_exist = False except NoSuchElementException: does_user_exist = True return does_user_exist
def __init__(self): if config.get(config.TEST_SECTION, config.TYPE_KEY).lower() == config.DOCKER_TEST_SET_NAME: front_ends = TestWebFrontEnds._docker_front_ends else: front_ends = TestWebFrontEnds._classic_front_ends AbstractWebFrontEndTestClass.__init__(self, front_ends, config.IDP_NODE_KEY)
def __init__(self): if config.get(config.TEST_SECTION, config.TYPE_KEY).lower() == config.DOCKER_TEST_SET_NAME: front_ends=TestWebFrontEnds._front_ends_docker else: front_ends=TestWebFrontEnds._front_ends_installer AbstractWebFrontEndTestClass.__init__(self, front_ends, config.INDEX_NODE_KEY)
def test_cog_create_user(self): does_user_exist=self.usr.check_user_exists() if(does_user_exist): assert(False), "user '{0}' already exists".format(config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY)) else: self.usr.create_user() # Create user
def __get_arg(self): is_headless = config.get(config.BROWSER_SECTION, config.BROWSER_IS_HEADLESS_KEY).lower() == naming.TRUE if(is_headless): arg_value = '-headless' else: arg_value = '-foreground' return(arg_value)
def login_user(self): does_user_exist = self.check_user_exists() err_msg = "User '{0}' doesn't exist for '{1}'".format( config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY), self.idp_server) assert (does_user_exist), err_msg # Alway start with this method so as to dodge side effects. self.reset_browser() URL = "https://{0}/login".format(self.idp_server) OpenID = "https://{0}/esgf-idp/openid/".format(self.idp_server) self.load_page(URL, (By.ID, 'openid_identifier')) try: globals.browser.find_element_by_id('openid_identifier').send_keys( OpenID) except NoSuchElementException: assert ( False ), "{0} is corrupted or not compliant with esgf-test-suite".format( URL) globals.browser.find_element_by_xpath( "//input[@value='Login']").click() self.wait_loading('load the login page', (By.ID, 'username'), (By.CLASS_NAME, 'error-box')) # After check_user_exists, the page is asking for the user's password. globals.browser.find_element_by_id('username')\ .send_keys(config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY)) globals.browser.find_element_by_id('password')\ .send_keys(config.get(config.ACCOUNT_SECTION, config.USER_PASSWORD_KEY)) globals.browser.find_element_by_xpath( "//input[@value='SUBMIT']").click() msg = "login with user '{0}' for '{1}'"\ .format(config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY), self.idp_server) #text = 'Invalid OpenID and/or Password combination' self.wait_loading(msg, not_expected_element=(By.ID, 'null.errors'))
def __get_arg(self): is_headless = config.get( config.BROWSER_SECTION, config.BROWSER_IS_HEADLESS_KEY).lower() == naming.TRUE if (is_headless): arg_value = '-headless' else: arg_value = '-foreground' return (arg_value)
def __init__(self): if config.get(config.TEST_SECTION, config.TYPE_KEY).lower() == config.DOCKER_TEST_SET_NAME: front_ends = TestWebFrontEnds._docker_front_ends else: front_ends = TestWebFrontEnds._classic_front_ends AbstractWebFrontEndTestClass.__init__(self, front_ends, config.DATA_NODE_KEY)
def test_cog_root_login(self): # Alway start with this method so as to dodge side effects. self.reset_browser() index_node=config.get(config.NODES_SECTION, config.INDEX_NODE_KEY) url = "https://{0}/login2".format(index_node) self.load_page(url) globals.browser.find_element_by_id('id_username')\ .send_keys(config.get(config.COG_SECTION, config.ADMIN_USERNAME_KEY)) globals.browser.find_element_by_id('id_password')\ .send_keys(config.get(config.COG_SECTION, config.ADMIN_PASSWORD_KEY)) globals.browser.find_element_by_xpath("//input[@value='Login']").click() msg = "log onto the Cog admin page of '{0}'"\ .format(index_node) self.wait_loading(msg, not_expected_element=(By.CLASS_NAME, 'errornote'))
def __init__(self): self.idp_server = config.get(config.NODES_SECTION, config.IDP_NODE_KEY) # Mapping user data to fit to web-fe user creation form self.elements = {'first_name' : config.get(config.ACCOUNT_SECTION, config.USER_FIRST_NAME_KEY), 'last_name' : config.get(config.ACCOUNT_SECTION, config.USER_LAST_NAME_KEY), 'email' : config.get(config.ACCOUNT_SECTION, config.USER_EMAIL_KEY), 'username' : config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY), 'password' : config.get(config.ACCOUNT_SECTION, config.USER_PASSWORD_KEY), 'confirm_password' : config.get(config.ACCOUNT_SECTION, config.USER_PASSWORD_KEY), 'institution' : config.get(config.ACCOUNT_SECTION, config.USER_INSTITUTION_KEY), 'city' : config.get(config.ACCOUNT_SECTION, config.USER_CITY_KEY), 'country' : config.get(config.ACCOUNT_SECTION, config.USER_COUNTRY_KEY)}
def get_credentials(self): #try: # Get credentials (and trustroots) # username = config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY) # password = config.get(config.ACCOUNT_SECTION, config.USER_PASSWORD_KEY) # self.credentials = self.myproxy.logon(username, password) #except MyProxyClientGetError as e1: # err_msg = "wrong username and/or password combination when getting credentials for user '{0}' (reason: {1})"\ # .format(username, e1) # assert(False), err_msg #except Exception as e2: # err = "unable to connect to myproxy server '{0}' (reason: {1})"\ # .format(self.idp_addr, e2) # assert (False), err # Write Credentials #with open(self.credsfile, 'w') as f: # f.write(self.credentials[0]+self.credentials[1]) # os.chmod(self.credsfile, self.myproxy.PROXY_FILE_PERMISSIONS) # WORKAROUND SSL-MYPROXYCLIENT PROBLEM username = config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY) password = config.get(config.ACCOUNT_SECTION, config.USER_PASSWORD_KEY) command = ['myproxy-logon', '-S', '-T', '-s', self.idp_addr, '-p', self.port, '-l', username, '-o', self.credsfile, '-b'] process = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=False) stdout, stderr = process.communicate(password) process.wait() assert(process.returncode == 0), "fail to get the credentials for {0} (sdtout: {1} ; stderr: {2})".format(self.idp_addr, stdout, stderr) file_content = '' with open(self.credsfile, 'r') as file: for line in file.readlines(): file_content += line certs = re.findall('(-+BEGIN CERTIFICATE-+.+?-+END CERTIFICATE-+\\s+)', file_content, re.DOTALL) key = re.findall('(-+BEGIN RSA PRIVATE KEY-+.+?-+END RSA PRIVATE KEY-+\\s+)', file_content, re.DOTALL) self.credentials = (certs[0], key[0], certs[1])
def test_cog_root_login(self): # Alway start with this method so as to dodge side effects. self.reset_browser() index_node=config.get(config.NODES_SECTION, config.INDEX_NODE_KEY) url = "https://{0}/login2".format(index_node) self.load_page(url) try: globals.browser.find_element_by_id('id_username')\ .send_keys(config.get(config.COG_SECTION, config.ADMIN_USERNAME_KEY)) except NoSuchElementException: assert(False), "{0} is corrupted or not compliant with esgf-test-suite".format(url) globals.browser.find_element_by_id('id_password')\ .send_keys(config.get(config.COG_SECTION, config.ADMIN_PASSWORD_KEY)) globals.browser.find_element_by_xpath("//input[@value='Login']").click() msg = "log onto the Cog admin page of '{0}'".format(index_node) self.wait_loading(msg, not_expected_element=(By.CLASS_NAME, 'errornote'))
def __init__(self): self.cacertdir = os.path.expanduser(naming.CA_CERT_DIR_PATH) cert_parent_dir_path = os.path.dirname(self.cacertdir) if False == os.path.exists(cert_parent_dir_path): # WORKAROUND SSL-MYPROXYCLIENT PROBLEM os.makedirs(cert_parent_dir_path) # WORKAROUND SSL-MYPROXYCLIENT PROBLEM self.credsfile = os.path.expanduser(naming.CREDENTIALS_FILE_PATH) self.idp_addr= config.get(config.NODES_SECTION, config.IDP_NODE_KEY).encode('ascii', 'replace') # self.myproxy = MyProxyClient(hostname=self.idp_addr) # WORKAROUND SSL-MYPROXYCLIENT PROBLEM # self.myproxy._setCACertDir(self.cacertdir) # WORKAROUND SSL-MYPROXYCLIENT PROBLEM self.credentials = None self.trustRoots = None self.port = '7512' # Reset the eventually files (for example, after a debugg session). self.delete_credentials() self.delete_trustroots()
def test_myproxy_get_trustroots(self): # Test output from get_trustroots if(globals.myproxy_utils.trustRoots): err_msg = "unsupported trusted root certificate format '{0}'".format(globals.myproxy_utils.trustRoots) assert(isinstance(globals.myproxy_utils.trustRoots, dict)), err_msg for fileName, fileContents in globals.myproxy_utils.trustRoots.items(): if fileName.endswith('.0'): # test parsing certificate cert = crypto.load_certificate(crypto.FILETYPE_PEM, fileContents) assert(isinstance(cert, crypto.X509)), "unsupported certificate format '{0}'".format(cert) subj = cert.get_subject() err_msg = "fail to get the trusted root certificates for '{0}'".format(config.get(config.NODES_SECTION, config.IDP_NODE_KEY)) assert(subj), err_msg else: raise SkipTest("unable to check the trustroots")
def test_myproxy_get_trustroots(self): # Test output from get_trustroots if(globals.myproxy_utils.trustRoots): err_msg = "unsupported trusted root certificate format '{0}'".format(globals.myproxy_utils.trustRoots) assert(isinstance(globals.myproxy_utils.trustRoots, dict)), err_msg for fileName, fileContents in list(globals.myproxy_utils.trustRoots.items()): if fileName.endswith('.0'): # test parsing certificate cert = crypto.load_certificate(crypto.FILETYPE_PEM, fileContents) assert(isinstance(cert, crypto.X509)), "unsupported certificate format '{0}'".format(cert) subj = cert.get_subject() err_msg = "fail to get the trusted root certificates for '{0}'".format(config.get(config.NODES_SECTION, config.IDP_NODE_KEY)) assert(subj), err_msg else: raise SkipTest("unable to check the trustroots")
def __init__(self): self.cacertdir = os.path.expanduser(naming.CA_CERT_DIR_PATH) cert_parent_dir_path = os.path.dirname(self.cacertdir) if False == os.path.exists( cert_parent_dir_path): # WORKAROUND SSL-MYPROXYCLIENT PROBLEM os.makedirs( cert_parent_dir_path) # WORKAROUND SSL-MYPROXYCLIENT PROBLEM self.credsfile = os.path.expanduser(naming.CREDENTIALS_FILE_PATH) self.idp_addr = config.get(config.NODES_SECTION, config.IDP_NODE_KEY).encode( 'ascii', 'replace') # self.myproxy = MyProxyClient(hostname=self.idp_addr) # WORKAROUND SSL-MYPROXYCLIENT PROBLEM # self.myproxy._setCACertDir(self.cacertdir) # WORKAROUND SSL-MYPROXYCLIENT PROBLEM self.credentials = None self.trustRoots = None self.port = '7512' # Reset the eventually files (for example, after a debugg session). self.delete_credentials() self.delete_trustroots()
def check_user_exists(self): # Alway start with this method so as to dodge side effects. self.reset_browser() URL = "https://{0}/login".format(self.idp_server) OpenID = "https://{0}/esgf-idp/openid/{1}".format(self.idp_server, config.get(config.ACCOUNT_SECTION, config.USER_NAME_KEY)) self.load_page(URL, (By.ID, 'openid_identifier')) globals.browser.find_element_by_id('openid_identifier').send_keys(OpenID) globals.browser.find_element_by_xpath("//input[@value='Login']").click() self.wait_loading('load the login confirmation') #text = 'OpenID Discovery Error: unrecognized by the Identity Provider' try: element_present = globals.browser.find_element_by_class_name('error-box') does_user_exist = False except NoSuchElementException: does_user_exist = True return does_user_exist
def __init__(self, front_ends, node_key_name): node_name = config.get(config.NODES_SECTION, node_key_name) requests.packages.urllib3.disable_warnings() self._front_ends = front_ends self._node_name = node_name
def train(configuration): #Setup directories for checkpoints and tensorboard logs createdirectories(configuration.get('model_path'), exist_ok=True) createdirectories(pjoin(configuration.get('model_path'), 'logs'), exist_ok=True) createdirectories(pjoin(configuration.get('model_path'), 'checkpoints'), exist_ok=True) #Initialize tensorboard logger if configuration.get('enable_log'): logger = Logger(pjoin(configuration.get('model_path'), 'logs')) #Choose target architecture if check_device_availability() and configuration.get('use_gpu'): target_arch = Arch('cuda:0') else: target_arch = Arch('cpu') network = configuration.get('network').to(target_arch) phases = configuration.get('phases') loaders = configuration.get('loaders') func_loss = configuration.get('loss') learner = configuration.get('learner') best_model_wts = Copier(network.state_dict()) best_loss_val = 1e5 train_loss = [] test_loss = [] train_acc = [] test_acc = [] #Main Epoch Loop for epoch_num in range(configuration.get('max_epoch_val')): for phase in phases: if phase == 'train': network.train() else: network.eval() if not loaders.get(phase): print(f'Not able to find loaders for phase: {phase}') continue loss_val = 0.0 corrects_val = 0 items_val = 0 for idx, (images, targets) in tqdm(enumerate(loaders[phase])): images = images.to(target_arch) targets = targets.to(target_arch) learner.zero_grad() with disable_backprop(phase == 'train'): predictions = network(images) loss_ = func_loss(predictions, targets) if configuration.get('use_gpu'): l2_regularization = Tensor(0.).cuda() else: l2_regularization = Tensor(0.) for param in network.parameters(): l2_regularization += Norm(param, 2)**2 loss_ += 1e-3 * l2_regularization if phase == 'train': loss_.backward() learner.step() loss_val += loss_.item() items_val += images.size(0) _, class_preds = Max(predictions, 1) corrects_val += (targets == class_preds).sum().item() epoch_loss = loss_val / items_val epoch_acc = corrects_val / items_val if phase == 'train': train_loss.append(epoch_loss) train_acc.append(epoch_acc) else: test_loss.append(epoch_loss) test_acc.append(epoch_acc) #Tensorboard logging logger.add_scalar(phase + '_loss', epoch_loss, global_step=epoch_num) logger.add_scalar(phase + '_acc', epoch_acc, global_step=epoch_num) print( f'Epoch {epoch_num} [{phase}]: Loss = {epoch_loss}, Accuracy = {epoch_acc}' ) #Retain best weights copy for final save based on test set performance if phase == 'test': if epoch_loss < best_loss_val: best_loss_val = epoch_loss best_model_wts = Copier(network.state_dict()) if epoch_acc > configuration.get('acc_threshold'): epoch_num = configuration.get('max_epoch_val') + 1 break # Save checkpoint every 10 epochs if (epoch_num + 1) % 10 == 0: Saver( network.state_dict(), pjoin(configuration.get('model_path'), 'checkpoints/model_' + str(epoch_num) + '.pth')) #Stop training if reached limit if epoch_num > configuration.get('max_epoch_val'): break #Save final model network.load_state_dict(best_model_wts) Saver( network.state_dict(), pjoin(configuration.get('model_path'), 'checkpoints/model_final.pth')) return train_loss, train_acc, test_loss, test_acc
def problem1c(train_dataset, test_dataset, new_train_dataset, new_test_dataset): # lr = 1e-3, decay = 0.001, batch size = 64 configuration['batch_size'] = 64 train_iterator = DataGenerator( train_dataset, shuffle=True, batch_size=configuration.get('batch_size'), num_workers=configuration.get('num_workers')) test_iterator = DataGenerator(test_dataset, shuffle=False, batch_size=configuration.get('batch_size'), num_workers=configuration.get('num_workers')) new_train_iterator = DataGenerator( new_train_dataset, shuffle=True, batch_size=configuration.get('batch_size'), num_workers=configuration.get('num_workers')) new_test_iterator = DataGenerator( new_test_dataset, shuffle=False, batch_size=configuration.get('batch_size'), num_workers=configuration.get('num_workers')) # Train LeNet on positives and test on negatives network = ClassifierLN5( in_channels=1, number_of_classes=configuration.get('number_of_classes'), init_method='uniform') model_configuration = { 'enable_log': configuration.get('enable_log'), 'model_path': pjoin(configuration.get('model_path'), 'mnist_w_neg'), 'network': network, 'use_gpu': configuration.get('use_gpu'), 'phases': ['train', 'test'], 'loaders': { 'train': train_iterator, 'test': test_iterator }, 'loss': CELoss(), 'learner': AdamOptimizer(network.parameters(), lr=1e-3, weight_decay=0), 'scheduler': None, 'max_epoch_val': 50, 'initialization_method': configuration.get('initialization_method'), 'acc_threshold': 0.99, } train_loss, train_acc, test_loss, test_acc = train(model_configuration) lenet5 = [train_loss, train_acc, test_loss, test_acc] #Custom classifier training network = CustomClassifier() model_configuration = { 'enable_log': configuration.get('enable_log'), 'model_path': pjoin(configuration.get('model_path'), 'mnist_new', 'config3'), 'network': network, 'use_gpu': configuration.get('use_gpu'), 'phases': ['train', 'test'], 'loaders': { 'train': new_train_iterator, 'test': new_test_iterator }, 'loss': CELoss(), 'learner': AdamOptimizer(network.parameters(), lr=1e-3, weight_decay=0.001), 'scheduler': None, 'max_epoch_val': configuration.get('max_epoch_val'), 'initialization_method': configuration.get('initialization_method'), 'acc_threshold': 0.995 } graphs = train(model_configuration) results = {'custom_classifier': graphs, 'lenet': lenet5} with open('problem_1c.json', 'w') as f: f.write(json.dumps(results)) drawGraphs1c(results)
def __init__(self): # init self.in_queue = multiprocessing.JoinableQueue() self.out_queue = multiprocessing.Queue() self.data_node = config.get(config.NODES_SECTION, config.DATA_NODE_KEY)
def problem1b(train_dataset, test_dataset, dataset_name, acc_threshold): #Final results structure results = { 'config1': { 'graphs': None, 'test_acc_mean': None, 'test_acc_std': None, }, 'config2': { 'graphs': None, 'test_acc_mean': None, 'test_acc_std': None, }, 'config3': { 'graphs': None, 'test_acc_mean': None, 'test_acc_std': None, }, 'config4': { 'graphs': None, 'test_acc_mean': None, 'test_acc_std': None, }, 'config5': { 'graphs': None, 'test_acc_mean': None, 'test_acc_std': None, }, } #Setting 1 configuration['batch_size'] = 64 train_iterator = DataGenerator( train_dataset, shuffle=True, batch_size=configuration.get('batch_size'), num_workers=configuration.get('num_workers')) test_iterator = DataGenerator(test_dataset, shuffle=False, batch_size=configuration.get('batch_size'), num_workers=configuration.get('num_workers')) if dataset_name == 'cifar': #Different setting 1 for CIFAR test_acc_config1 = [] graphs = [] for i in range(5): network = ClassifierLN5( in_channels=1, number_of_classes=configuration.get('number_of_classes'), init_method='uniform') model_configuration = { 'enable_log': configuration.get('enable_log'), 'model_path': pjoin(configuration.get('model_path'), dataset_name, 'config1'), 'network': network, 'use_gpu': configuration.get('use_gpu'), 'phases': ['train', 'test'], 'loaders': { 'train': train_iterator, 'test': test_iterator }, 'loss': CELoss(), 'learner': RMSOptimizer(network.parameters(), lr=1e-3, weight_decay=0), 'scheduler': None, 'max_epoch_val': 50, 'initialization_method': configuration.get('initialization_method'), 'acc_threshold': acc_threshold, } train_loss, train_acc, test_loss, test_acc = train( model_configuration) graphs.append({ 'train_loss': train_loss, 'train_acc': train_acc, 'test_loss': test_loss, 'test_acc': test_acc }) max_test_acc = max(test_acc) print('Max test accuracy [Config 1]:', max_test_acc) test_acc_config1.append(max_test_acc) test_acc_config1 = np.array(test_acc_config1) print('Mean test accuracy [Config 1]:', np.mean(test_acc_config1)) print('Std dev test accuracy [Config 1]:', np.std(test_acc_config1)) results['config1']['graphs'] = graphs results['config1']['test_acc_mean'] = np.mean(test_acc_config1) results['config1']['test_acc_std'] = np.std(test_acc_config1) else: #Different setting 1 for MNIST/Fashion MNIST test_acc_config1 = [] graphs = [] for i in range(5): network = ClassifierLN5( in_channels=1, number_of_classes=configuration.get('number_of_classes'), init_method='uniform') model_configuration = { 'enable_log': configuration.get('enable_log'), 'model_path': pjoin(configuration.get('model_path'), dataset_name, 'config1'), 'network': network, 'use_gpu': configuration.get('use_gpu'), 'phases': ['train', 'test'], 'loaders': { 'train': train_iterator, 'test': test_iterator }, 'loss': CELoss(), 'learner': AdamOptimizer(network.parameters(), lr=1e-3, weight_decay=0), 'scheduler': None, 'max_epoch_val': 50, 'initialization_method': configuration.get('initialization_method'), 'acc_threshold': acc_threshold, } train_loss, train_acc, test_loss, test_acc = train( model_configuration) graphs.append({ 'train_loss': train_loss, 'train_acc': train_acc, 'test_loss': test_loss, 'test_acc': test_acc }) max_test_acc = max(test_acc) print('Max test accuracy [Config 1]:', max_test_acc) test_acc_config1.append(max_test_acc) test_acc_config1 = np.array(test_acc_config1) print('Mean test accuracy [Config 1]:', np.mean(test_acc_config1)) print('Std dev test accuracy [Config 1]:', np.std(test_acc_config1)) results['config1']['graphs'] = graphs results['config1']['test_acc_mean'] = np.mean(test_acc_config1) results['config1']['test_acc_std'] = np.std(test_acc_config1) #Setting 2 configuration['batch_size'] = 256 train_iterator = DataGenerator( train_dataset, shuffle=True, batch_size=configuration.get('batch_size'), num_workers=configuration.get('num_workers')) test_iterator = DataGenerator(test_dataset, shuffle=False, batch_size=configuration.get('batch_size'), num_workers=configuration.get('num_workers')) test_acc_config2 = [] graphs = [] for i in range(5): network = ClassifierLN5( in_channels=1, number_of_classes=configuration.get('number_of_classes'), init_method='xavier_normal') model_configuration = { 'enable_log': configuration.get('enable_log'), 'model_path': pjoin(configuration.get('model_path'), dataset_name, 'config2'), 'network': network, 'use_gpu': configuration.get('use_gpu'), 'phases': ['train', 'test'], 'loaders': { 'train': train_iterator, 'test': test_iterator }, 'loss': CELoss(), 'learner': AdamOptimizer(network.parameters(), lr=1e-2, weight_decay=0), 'scheduler': None, 'max_epoch_val': configuration.get('max_epoch_val'), 'initialization_method': configuration.get('initialization_method'), 'acc_threshold': acc_threshold, } train_loss, train_acc, test_loss, test_acc = train(model_configuration) graphs.append({ 'train_loss': train_loss, 'train_acc': train_acc, 'test_loss': test_loss, 'test_acc': test_acc }) max_test_acc = max(test_acc) print('Max test accuracy [Config 2]:', max_test_acc) test_acc_config2.append(max_test_acc) test_acc_config2 = np.array(test_acc_config2) print('Mean test accuracy [Config 2]:', np.mean(test_acc_config2)) print('Std dev test accuracy [Config 2]:', np.std(test_acc_config2)) results['config2']['graphs'] = graphs results['config2']['test_acc_mean'] = np.mean(test_acc_config2) results['config2']['test_acc_std'] = np.std(test_acc_config2) # Setting 3 configuration['batch_size'] = 64 train_iterator = DataGenerator( train_dataset, shuffle=True, batch_size=configuration.get('batch_size'), num_workers=configuration.get('num_workers')) test_iterator = DataGenerator(test_dataset, shuffle=False, batch_size=configuration.get('batch_size'), num_workers=configuration.get('num_workers')) test_acc_config3 = [] graphs = [] for i in range(5): network = ClassifierLN5( in_channels=1, number_of_classes=configuration.get('number_of_classes'), init_method='he_normal') model_configuration = { 'enable_log': configuration.get('enable_log'), 'model_path': pjoin(configuration.get('model_path'), dataset_name, 'config3'), 'network': network, 'use_gpu': configuration.get('use_gpu'), 'phases': ['train', 'test'], 'loaders': { 'train': train_iterator, 'test': test_iterator }, 'loss': CELoss(), 'learner': AdamOptimizer(network.parameters(), lr=1e-3, weight_decay=0.01), 'scheduler': None, 'max_epoch_val': configuration.get('max_epoch_val'), 'initialization_method': configuration.get('initialization_method'), 'acc_threshold': acc_threshold, } train_loss, train_acc, test_loss, test_acc = train(model_configuration) graphs.append({ 'train_loss': train_loss, 'train_acc': train_acc, 'test_loss': test_loss, 'test_acc': test_acc }) max_test_acc = max(test_acc) print('Max test accuracy [Config 3]:', max_test_acc) test_acc_config3.append(max_test_acc) test_acc_config3 = np.array(test_acc_config3) print('Mean test accuracy [Config 3]:', np.mean(test_acc_config3)) print('Std dev test accuracy [Config 3]:', np.std(test_acc_config3)) results['config3']['graphs'] = graphs results['config3']['test_acc_mean'] = np.mean(test_acc_config3) results['config3']['test_acc_std'] = np.std(test_acc_config3) #Setting 4 configuration['batch_size'] = 64 train_iterator = DataGenerator( train_dataset, shuffle=True, batch_size=configuration.get('batch_size'), num_workers=configuration.get('num_workers')) test_iterator = DataGenerator(test_dataset, shuffle=False, batch_size=configuration.get('batch_size'), num_workers=configuration.get('num_workers')) test_acc_config4 = [] graphs = [] for i in range(5): network = ClassifierLN5( in_channels=1, number_of_classes=configuration.get('number_of_classes'), init_method='he_normal') model_configuration = { 'enable_log': configuration.get('enable_log'), 'model_path': pjoin(configuration.get('model_path'), dataset_name, 'config4'), 'network': network, 'use_gpu': configuration.get('use_gpu'), 'phases': ['train', 'test'], 'loaders': { 'train': train_iterator, 'test': test_iterator }, 'loss': CELoss(), 'learner': RMSOptimizer(network.parameters(), lr=1e-3, weight_decay=0.01), 'scheduler': None, 'max_epoch_val': configuration.get('max_epoch_val'), 'initialization_method': configuration.get('initialization_method'), 'acc_threshold': acc_threshold, } train_loss, train_acc, test_loss, test_acc = train(model_configuration) graphs.append({ 'train_loss': train_loss, 'train_acc': train_acc, 'test_loss': test_loss, 'test_acc': test_acc }) max_test_acc = max(test_acc) print('Max test accuracy [Config 4]:', max_test_acc) test_acc_config4.append(max_test_acc) test_acc_config4 = np.array(test_acc_config4) print('Mean test accuracy [Config 4]:', np.mean(test_acc_config4)) print('Std dev test accuracy [Config 4]:', np.std(test_acc_config4)) results['config4']['graphs'] = graphs results['config4']['test_acc_mean'] = np.mean(test_acc_config4) results['config4']['test_acc_std'] = np.std(test_acc_config4) # Setting 5 configuration['batch_size'] = 128 train_iterator = DataGenerator( train_dataset, shuffle=True, batch_size=configuration.get('batch_size'), num_workers=configuration.get('num_workers')) test_iterator = DataGenerator(test_dataset, shuffle=False, batch_size=configuration.get('batch_size'), num_workers=configuration.get('num_workers')) test_acc_config5 = [] graphs = [] for i in range(5): network = ClassifierLN5( in_channels=1, number_of_classes=configuration.get('number_of_classes'), init_method='he_uniform') model_configuration = { 'enable_log': configuration.get('enable_log'), 'model_path': pjoin(configuration.get('model_path'), dataset_name, 'config5'), 'network': network, 'use_gpu': configuration.get('use_gpu'), 'phases': ['train', 'test'], 'loaders': { 'train': train_iterator, 'test': test_iterator }, 'loss': CELoss(), 'learner': SGDOptimizer(network.parameters(), lr=1e-3, weight_decay=0.001), 'scheduler': None, 'max_epoch_val': configuration.get('max_epoch_val'), 'initialization_method': configuration.get('initialization_method'), 'acc_threshold': acc_threshold, } train_loss, train_acc, test_loss, test_acc = train(model_configuration) graphs.append({ 'train_loss': train_loss, 'train_acc': train_acc, 'test_loss': test_loss, 'test_acc': test_acc }) max_test_acc = max(test_acc) print('Max test accuracy [Config 5]:', max_test_acc) test_acc_config5.append(max_test_acc) test_acc_config5 = np.array(test_acc_config4) print('Mean test accuracy [Config 5]:', np.mean(test_acc_config5)) print('Std dev test accuracy [Config 5]:', np.std(test_acc_config5)) results['config5']['graphs'] = graphs results['config5']['test_acc_mean'] = np.mean(test_acc_config5) results['config5']['test_acc_std'] = np.std(test_acc_config5) with open(dataset_name + '_results.json', 'w') as f: json.dump(results, f) drawGraphs1b(results, dataset_name)
import utils.naming as naming import utils.configuration as config is_debug = config.get(config.SYSTEM_SECTION, config.IS_DEBUG_KEY).lower() == naming.TRUE # At the begining of a run, nosetest instanciates the class of the tests. But, it instanciates # as many time the same class as this class has tests. Abstract classes, in utils # package, manage this problem. These global variables are centralized so as to # optimize the test suite. # Create only one instance of the browser so as to optimized the wall clock time. # As the tests share the same browser instance, AbstractBrowserBasedTest class # offers a convenient way to create a new session of the browser instance # (see reset_browser). browser = None # Create only one instance of the myproxy utils so as to optimized the wall clock time. myproxy_utils = None