def __create_condor_config__(self): """ This first updates the primary condor_config with either: a. the gwms condor_config file if a tarball install b. the config.d directory containing the gwms config files Then it creates the individual condor config files. """ #if len(self.colocated_services) > 0: # return # we've already updated this common.logit("... updating: %s" % self.condor_config()) common.logit(" to point to GWMS config files directory") cfg_data = """ ######################################################## # Using local configuration file directory below ######################################################## LOCAL_CONFIG_FILE = LOCAL_CONFIG_DIR = %s """ % (self.local_config_dir()) common.write_file("a",0644,self.condor_config(),cfg_data,SILENT=False) stdout = glideinwms.lib.subprocessSupport.iexe_cmd("tail -5 %s" % self.condor_config()) common.logit(stdout) common.logit("\nCreating GWMS condor_config files in:") common.logit("%s" % self.local_config_dir()) common.make_directory(self.local_config_dir(),self.username(),0755) types = self.condor_config_data.keys() types.sort() for type in types: filename = "%s/%s.config" % (self.local_config_dir(),type) common.logit(" %s" % os.path.basename(filename)) common.write_file("w",0644,filename,self.condor_config_data[type],SILENT=True) self.__create_secondary_schedd_dirs__()
def validate_web_location(self): dir = self.web_location() common.logit("... validating web_location: %s" % dir) common.make_directory(dir,self.username(),0755) for sdir_name in ("stage","monitor"): sdir_fullpath=os.path.join(self.web_location(),sdir_name) common.make_directory(sdir_fullpath,self.username(),0755)
def create_factory_client_dirs(self,owner,perm): dirs = [self.factory.client_log_dir(),self.factory.client_proxy_dir(),] for dir in dirs: common.logit("... checking factory client directory: %s" % dir) if os.path.isdir(dir): if len(os.listdir(dir)) > 0: common.ask_continue("This directory must be empty. Can we delete the contents") common.remove_dir_contents(dir) common.make_directory(dir,owner,perm)
def validate_client_proxy_dir(self): common.logit("... validating client_proxy_dir: %s" % self.client_proxy_dir()) if self.wms.privilege_separation() == "y": #-- done in WMS collector install if privilege separation is used -- if not os.path.isdir(self.client_proxy_dir()): common.logerr("""Privilege separation is in effect. This should have been created by the WMS Collector installation or you did not start the service or you changed the ini file and did not reinstall that service.""") else: common.make_directory(self.client_proxy_dir(),self.username(),0755)
def install_vdt_package(self,packages): """ Installs specified VDT packages. """ self.install_pacman() common.logit("... validating vdt_location: %s" % self.vdt_location()) common.check_for_value("vdt_location",self.vdt_location()) common.make_directory(self.vdt_location(),self.username(),0755) #-- install vdt packages --- self.messagesDict["packages"] = packages common.logit("... installing VDT packages") common.run_script("export VDTSETUP_AGREE_TO_LICENSES=y; . %(pacman_location)s/setup.sh && cd %(vdt_location)s && pacman -trust-all-caches -get %(packages)s" % self.messagesDict) #--- vdt-post-install -- common.run_script(". %(vdt_location)s/setup.sh && vdt-post-install" % self.messagesDict)
def install_pacman(self): """ Installs pacman if not present. """ common.logit("... validating pacman_location: %s" % self.pacman_location()) common.check_for_value("pacman_location", self.pacman_location()) if self.pacman_is_installed(): os.system("sleep 2") return # -- no need to install pacman-- common.ask_continue( """ Pacman is required and does not appear to be installed in: %(pacman_location)s ... continue with pacman installation""" % self.messagesDict ) common.logit( """ ======== pacman install starting ========== """ ) common.check_for_value("pacman_location", self.pacman_location()) if os.path.exists(self.pacman_location()): common.logerr( """The pacman_location for the pacman installation already exists and should not. This script was looking for a setup.sh in that directory and it did not exist. If a valid pacman distribution, it may be corrupt or the pacman_location is incorrect. Please verify.""" ) common.logit("... validating pacman_url: %s" % self.pacman_url()) common.check_for_value("pacman_url", self.pacman_url()) if not common.wget_is_valid(self.pacman_urlfile()): common.logerr( """A pacman tarball of this name does not exist at: %(pacman_urlfile)s ... please verify.""" % self.messagesDict ) os.system("sleep 2") common.make_directory(self.pacman_parent(), self.username(), 0755) common.run_script( "cd %(pacman_parent)s && wget %(pacman_urlfile)s && tar --no-same-owner -xzf %(pacman_tarball)s && rm -f %(pacman_tarball)s" % self.messagesDict ) if not self.pacman_is_installed(): common.logerr("Pacman install failed. No setup.sh file exists in: %(pacman_location)s" % self.messagesDict) common.logit( """... pacman requires the setup script to be sourced to initialize some variables in it for subsequent use.""" ) common.run_script("cd %(pacman_location)s && source setup.sh" % self.messagesDict) common.logit("\nPacman successfully installed: %(pacman_location)s" % self.messagesDict) common.logit("======== pacman install complete ==========\n") os.system("sleep 2")
def __create_condor_mapfile__(self,users): """ Creates the condor mapfile for GSI authentication""" if self.client_only_install == True: common.logit( "... No Condor mapfile file needed. Client only install") return mapfile_entries = self.__condor_mapfile_entries__(users) filename = self.condor_mapfile() common.logit("... creating Condor mapfile") common.logit(" %s" % filename) common.make_directory(os.path.dirname(filename),pwd.getpwuid(os.getuid())[0],0755) mapfile_entries += """GSI (.*) anonymous FS (.*) \\1 """ common.write_file("w",0644,filename,mapfile_entries,SILENT=True) common.logit("\nCondor mapfile entries:") common.logit("%s" % mapfile_entries)
from tensorflow.keras.utils import to_categorical from tensorflow.keras.models import load_model from sklearn.metrics import accuracy_score # This project from common import make_directory, set_batches_per_epoch from network import init_model, init_adam, init_datagen from parameters import build_sherpa_augmentations_space from train import train if __name__ == "__main__": algorithm = 'gpyopt-augs-only' # Setup directory tree make_directory('{}/'.format(algorithm)) make_directory('{}/model/'.format(algorithm)) make_directory('{}/metrics/'.format(algorithm)) make_directory('{}/params/'.format(algorithm)) # Load dataset (X_train, Y_train), (X_test, Y_test) = cifar100.load_data(label_mode='fine') Y_train, Y_test = to_categorical(Y_train), to_categorical(Y_test) X_train, X_test = X_train / 255, X_test / 255 # Baseline setup params = build_sherpa_augmentations_space() baseline_params = { 'input_shape':X_train.shape[1:], 'output_shape':Y_train.shape[1], 'depth':3,
def __validate_condor_location__(self): common.logit("... validating condor_location: %s" % self.condor_location()) if self.install_type() == "tarball": common.make_directory(self.condor_location(),self.username(),0755)
def create_config(self,config_xml): common.logit("\nCreating configuration files") common.logit(" %s" % self.config_file()) common.make_directory(self.config_dir(),self.username(),0755) common.write_file("w",0644,self.config_file(),config_xml,SILENT=True)
def validate_logs_dir(self): common.logit("... validating logs_dir: %s" % self.logs_dir()) common.make_directory(self.logs_dir(),self.username(),0755)
def create_config(self): config_xml = self.config_data() common.logit("\nCreating configuration file: %s" % self.config_file()) common.make_directory(self.config_dir(),self.username(),0755) common.write_file("w",0644,self.config_file(),config_xml)
# Keras import from tensorflow.keras.datasets import cifar10, cifar100 from tensorflow.keras.utils import to_categorical from tensorflow.keras.models import load_model from sklearn.metrics import accuracy_score # This project from common import make_directory, set_batches_per_epoch from network import init_model, init_adam from train import train if __name__ == "__main__": # Setup directory tree make_directory('baseline/') make_directory('baseline/model/') make_directory('baseline/metrics/') make_directory('baseline/params/') # Determine trail index trial_id = 0 models = glob.glob('baseline/model/*.h5') for model in models: index = int(model.split('.')[1]) if index >= trial_id: trial_id = index + 1 print('Performing trial {}'.format(trial_id)) savename = 'baseline/model/model.{}.h5'.format(trial_id) # Load dataset