def _set_nexus(self, nexus_wwn=None): ''' Sets the nexus initiator WWN. Raises an exception if the nexus is already set or if the TPG does not use a nexus. ''' self._check_self() if not self.has_feature('nexus'): raise RTSLibError("The TPG does not use a nexus.") if self._get_nexus(): raise RTSLibError("The TPG's nexus initiator WWN is already set.") # Nexus wwn type should match parent target wwn_type = self.parent_target.wwn_type if nexus_wwn: # Not using fabric-specific version of normalize_wwn, since we # want to make sure wwn conforms to regexp, but don't check # against target wwn_list, since we're setting the "initiator" here. nexus_wwn = normalize_wwn((wwn_type,), nexus_wwn)[0] else: nexus_wwn = generate_wwn(wwn_type) fm = self.parent_target.fabric_module fwrite("%s/nexus" % self.path, fm.to_fabric_wwn(nexus_wwn))
def _set_write_protect(self, write_protect): self._check_self() path = "%s/write_protect" % self.path if write_protect: fwrite(path, "1") else: fwrite(path, "0")
def _set_discovery_password(self, password): self._check_self() self._assert_feature('discovery_auth') path = "%s/discovery_auth/password" % self.path if password.strip() == '': password = "******" fwrite(path, "%s" % password)
def _config_pr_aptpl(self): """ LIO actually *writes* pr aptpl info to the filesystem, so we need to read it in and squirt it back into configfs when we configure the storage object. BLEH. """ aptpl_dir = "/var/target/pr" try: lines = fread("%s/aptpl_%s" % (aptpl_dir, self.wwn)).split() except: return if not lines[0].startswith("PR_REG_START:"): return reservations = [] for line in lines: if line.startswith("PR_REG_START:"): res_list = [] elif line.startswith("PR_REG_END:"): reservations.append(res_list) else: res_list.append(line.strip()) for res in reservations: fwrite(self.path + "/pr/res_aptpl_metadata", ",".join(res))
def _set_discovery_userid(self, userid): self._check_self() self._assert_feature('discovery_auth') path = "%s/discovery_auth/userid" % self.path if userid.strip() == '': userid = "NULL" fwrite(path, "%s" % userid)
def restore_pr_aptpl(self, src_path=None): ''' Restores StorageObject persistent reservations read from src_path. If src_path is omitted, uses the default LIO PR APTPL system path if it exists. This only works if the StorageObject is not in use currently, else an IO error will occur. @param src_path: The PR metadata file path. @type src_path: string or None ''' dst_path = "%s/pr/res_aptpl_metadata" % self.path if src_path is None: src_path = "%s/aptpl_%s" % (self.pr_aptpl_metadata_dir, self.wwn) if not os.path.isfile(src_path): return lines = fread(src_path).split() if not lines[0].startswith("PR_REG_START:"): return for line in lines: if line.startswith("PR_REG_START:"): pr_lines = [] elif line.startswith("PR_REG_END:"): fwrite(dst_path, ",".join(pr_lines)) else: pr_lines.append(line.strip())
def _set_tcq_depth(self, depth): self._check_self() path = "%s/cmdsn_depth" % self.path try: fwrite(path, "%s" % depth) except IOError, msg: msg = msg[1] raise RTSLibError("Cannot set tcq_depth: %s" % str(msg))
def save(self): writeout = json.dumps(self.data, indent=4) data_set_type = "valid" if self.data_set_type == "dev" else self.data_set_type save_f = path.join(path.dirname(path.realpath(__file__)), data_set_type + ".json") fwrite(writeout, save_f) print("[Info] Saved {} data into {}".format(len(self.data), save_f))
def _set_iser(self, boolean): path = "%s/iser" % self.path try: fwrite(path, str(int(boolean))) except IOError: # b/w compat: don't complain if iser entry is missing if os.path.isfile(path): raise RTSLibError("Cannot change iser")
def _set_tcq_depth(self, depth): self._check_self() path = "%s/cmdsn_depth" % self.path try: fwrite(path, "%s" % depth) except IOError as msg: msg = msg[1] raise RTSLibError("Cannot set tcq_depth: %s" % str(msg))
def _set_wwn(self, wwn): self._check_self() if self.is_configured(): path = "%s/wwn/vpd_unit_serial" % self.path fwrite(path, "%s\n" % wwn) else: raise RTSLibError("Cannot write a T10 WWN Unit Serial to " + "an unconfigured StorageObject")
def setCycle(self,dutyR,dutyB): self.cleanProcess() procR = subprocess.Popen(['nohup','python',utils.getContextPath()+'/bin/scripts/led/on.py',str(self.channel_0),str(dutyR),'&']) procB = subprocess.Popen(['nohup','python',utils.getContextPath()+'/bin/scripts/led/on.py',str(self.channel_1),str(dutyB),'&']) utils.fwrite(utils.getContextPath()+'/bin/scripts/led/pid_0',procR.pid) utils.fwrite(utils.getContextPath()+'/bin/scripts/led/pid_1',procB.pid) print("Red channel PID: "+str(procR.pid)+", Blue channel PID: "+str(procB.pid)) print("LedController cycle running") return
def _set_discovery_enable_auth(self, enable): self._check_self() self._assert_feature('discovery_auth') path = "%s/discovery_auth/enforce_discovery_auth" % self.path if int(enable): enable = 1 else: enable = 0 fwrite(path, "%s" % enable)
def _set_iser_attr(self, iser_attr): path = "%s/iser" % self.path if os.path.isfile(path): if iser_attr: fwrite(path, "1") else: fwrite(path, "0") else: raise RTSLibError("iser network portal attribute does not exist.")
def delete(self): ''' Delete the NetworkPortal. ''' path = "%s/iser" % self.path if os.path.isfile(path): iser_attr = fread(path).strip() if iser_attr == "1": fwrite(path, "0") super(NetworkPortal, self).delete()
def _set_enable(self, boolean): """ Enables or disables the TPG. If the TPG doesn't support the enable attribute, do nothing. """ self._check_self() path = "%s/enable" % self.path if os.path.isfile(path) and (boolean != self._get_enable()): try: fwrite(path, str(int(boolean))) except IOError as e: raise RTSLibError("Cannot change enable state: %s" % e)
def _set_enable(self, boolean): ''' Enables or disables the TPG. If the TPG doesn't support the enable attribute, do nothing. ''' self._check_self() path = "%s/enable" % self.path if os.path.isfile(path) and (boolean != self._get_enable()): try: fwrite(path, str(int(boolean))) except IOError as e: raise RTSLibError("Cannot change enable state: %s" % e)
def _set_enable(self, boolean): ''' Enables or disables the TPG. Raises an error if trying to disable a TPG without an enable attribute (but enabling works in that case). ''' self._check_self() path = "%s/enable" % self.path if os.path.isfile(path) and (boolean != self._get_enable()): try: fwrite(path, str(int(boolean))) except IOError, e: raise RTSLibError("Cannot change enable state: %s" % e)
def _set_enable(self, boolean): ''' Enables or disables the TPG. Raises an error if trying to disable a TPG without en enable attribute (but enabling works in that case). ''' self._check_self() path = "%s/enable" % self.path if os.path.isfile(path): if boolean and not self._get_enable(): fwrite(path, "1") elif not boolean and self._get_enable(): fwrite(path, "0") elif not boolean: raise RTSLibError("TPG cannot be disabled.")
def _set_nexus(self, nexus_wwn=None): ''' Sets the nexus initiator WWN. Raises an exception if the nexus is already set or if the TPG does not use a nexus. ''' self._check_self() if not self.has_feature('nexus'): raise RTSLibError("The TPG does not use a nexus.") elif self._get_nexus(): raise RTSLibError("The TPG's nexus initiator WWN is already set.") else: if nexus_wwn is None: nexus_wwn = generate_wwn(self.parent_target.wwn_type) elif not is_valid_wwn(self.parent_target.wwn_type, nexus_wwn): raise RTSLibError("WWN '%s' is not of type '%s'." % (nexus_wwn, self.parent_target.wwn_type)) fwrite("%s/nexus" % self.path, nexus_wwn)
def model_setup(proc_id, model, args): def _count_parameters(model): return sum(p.numel() for p in model.parameters() if p.requires_grad) args.n_params = _count_parameters(model) if proc_id == 0: writeout = " ".join(sys.argv[1:]).replace(' -', ' \ \n-') writeout += '\n' * 3 + \ json.dumps(args.__dict__, indent=4, sort_keys=True) writeout += '\n' * 3 + repr(model) fwrite(writeout, args.save_meta_fname) print('[Info] Model has {} trainable parameters'.format(args.n_params)) return args
def __init__(self, data_dir='./data', train_fname='train.csv', valid_fname='valid.csv', test_fname='test.csv', vocab_fname='vocab.json'): stop_words = get_stop_words() tokenize = lambda x: x.split() INPUT = Field(sequential=True, batch_first=True, tokenize=tokenize, lower=True) ENT = Field(sequential=False, batch_first=True, lower=True) TGT = Field(sequential=True, batch_first=True) SHOW_INP = RawField() fields = [('tgt', TGT), ('input', INPUT), ('show_inp', SHOW_INP), ('ent1', ENT), ('ent2', ENT)] datasets = TabularDataset.splits( fields=fields, path=data_dir, format=train_fname.rsplit('.')[-1], train=train_fname, validation=valid_fname, test=test_fname, skip_header=True, ) INPUT.build_vocab(*datasets, max_size=100000, vectors=GloVe(name='6B', dim=100), unk_init=torch.Tensor.normal_, ) TGT.build_vocab(*datasets) ENT.build_vocab(*datasets) self.INPUT = INPUT self.ENT = ENT self.TGT = TGT self.train_ds, self.valid_ds, self.test_ds = datasets if vocab_fname: writeout = { 'tgt_vocab': { 'itos': TGT.vocab.itos, 'stoi': TGT.vocab.stoi, }, 'input_vocab': { 'itos': INPUT.vocab.itos, 'stoi': INPUT.vocab.stoi, }, 'ent_vocab': { 'itos': ENT.vocab.itos, 'stoi': ENT.vocab.stoi, }, } fwrite(json.dumps(writeout, indent=4), vocab_fname)
def set_parameter(self, parameter, value): ''' Sets the value of a named RFC-3720 parameter. The parameter must exist in configFS. @param parameter: The RFC-3720 parameter's name. It is case-sensitive. @type parameter: string @param value: The parameter's value. @type value: string ''' self._check_self() path = "%s/param/%s" % (self.path, str(parameter)) if not os.path.isfile(path): raise RTSLibError("Cannot find parameter: %s" % parameter) else: try: fwrite(path, "%s\n" % str(value)) except Exception as e: raise RTSLibError("Cannot set parameter %s: %s" % (parameter, e))
def set_parameter(self, parameter, value): ''' Sets the value of a named RFC-3720 parameter. The parameter must exist in configFS. @param parameter: The RFC-3720 parameter's name. It is case-sensitive. @type parameter: string @param value: The parameter's value. @type value: string ''' self._check_self() path = "%s/param/%s" % (self.path, str(parameter)) if not os.path.isfile(path): raise RTSLibError("Cannot find parameter: %s." % str(parameter)) else: try: fwrite(path, "%s\n" % str(value)) except Exception as e: raise RTSLibError("Cannot set parameter %s: %s" % (parameter, e))
def set_attribute(self, attribute, value): ''' Sets the value of a named attribute. The attribute must exist in configFS. @param attribute: The attribute's name. It is case-sensitive. @type attribute: string @param value: The attribute's value. @type value: string ''' self._check_self() path = "%s/attrib/%s" % (self.path, str(attribute)) if not os.path.isfile(path): raise RTSLibError("Cannot find attribute: %s." % str(attribute)) else: try: fwrite(path, "%s" % str(value)) except Exception as e: raise RTSLibError("Cannot set attribute %s: %s" % (attribute, e))
def setCycleOn(self): self.cleanProcess() procB = subprocess.Popen([ 'nohup', 'python', utils.getContextPath() + '/bin/scripts/led/on.py', str(self.pin_blue), '&' ]) procR = subprocess.Popen([ 'nohup', 'python', utils.getContextPath() + '/bin/scripts/led/on.py', str(self.pin_red), '&' ]) utils.fwrite(utils.getContextPath() + '/bin/scripts/led/pid_red', procR.pid) utils.fwrite(utils.getContextPath() + '/bin/scripts/led/pid_blue', procB.pid) print("Red pin PID: " + str(procR.pid) + ", Blue pin PID: " + str(procB.pid)) print("LedController is now ON") return
def _set_nexus(self, nexus_wwn=None): """ Sets the nexus initiator WWN. Raises an exception if the nexus is already set or if the TPG does not use a nexus. """ self._check_self() if not self.has_feature("nexus"): raise RTSLibError("The TPG does not use a nexus.") if self._get_nexus(): raise RTSLibError("The TPG's nexus initiator WWN is already set.") fm = self.parent_target.fabric_module if nexus_wwn: nexus_wwn = fm.to_normalized_wwn(nexus_wwn)[0] else: # Nexus wwn type should match parent target nexus_wwn = generate_wwn(self.parent_target.wwn_type) fwrite("%s/nexus" % self.path, fm.to_fabric_wwn(nexus_wwn))
def set_auth_attr(self, auth_attr, value): ''' Sets the value of a named auth_attr. The auth_attr must exist in configFS. @param auth_attr: The auth_attr's name. It is case-sensitive. @type auth_attr: string @param value: The auth_attr's value. @type value: string ''' self._check_self() path = "%s/auth/%s" % (self.path, str(auth_attr)) if not os.path.isfile(path): raise RTSLibError("Cannot find auth attribute: %s." % str(auth_attr)) else: try: fwrite(path, "%s" % str(value)) except IOError, msg: msg = msg[1] raise RTSLibError("Cannot set auth attribute %s: %s" % (str(auth_attr), str(msg)))
def _set_nexus(self, nexus_wwn=None): ''' Sets the nexus initiator WWN. Raises an exception if the nexus is already set or if the TPG does not use a nexus. ''' self._check_self() if not self.has_feature('nexus'): raise RTSLibError("The TPG does not use a nexus.") if self._get_nexus(): raise RTSLibError("The TPG's nexus initiator WWN is already set.") fm = self.parent_target.fabric_module if nexus_wwn: nexus_wwn = fm.to_normalized_wwn(nexus_wwn)[0] else: # Nexus wwn type should match parent target nexus_wwn = generate_wwn(self.parent_target.wwn_type) fwrite("%s/nexus" % self.path, fm.to_fabric_wwn(nexus_wwn))
def delete(self): ''' Recursively deletes a Nexus object. This will delete all attached LUN, and then the Nexus itself. ''' self._check_self() for lun in self.luns: lun.delete() # TODO: check that ALUA MD removal works while removing Nexus if os.path.isdir(self.alua_metadata_path): shutil.rmtree(self.alua_metadata_path) nexus_path = self._path + "/nexus" if os.path.isfile(nexus_path): try: fwrite(nexus_path, "NULL") except IOError, msg: raise RTSLibError("Cannot delete Nexus initiator " + "(>=4.0 style, %s): %s." % (nexus_path, msg))
def final_evaluate(self, model, perl_fname='eval/semeval2010_task8_scorer-v1.2.pl'): preds = [] truths = [] for batch in self.dataloader: if self.with_ent is True: pred = model.predict(batch.input, batch.ent1, batch.ent2) else: pred = model.predict(batch.input) preds += pred truth = batch.tgt.view(-1).detach().cpu().numpy().tolist() truths += truth pred_fname = os.path.join(self.save_dir, 'tmp_pred.txt') truth_fname = os.path.join(self.save_dir, 'tmp_truth.txt') result_fname = os.path.join(self.save_dir, 'tmp_result.txt') writeout = ["{}\t{}\n".format(ix, self.label_itos[pred]) for ix, pred in enumerate(preds)] fwrite(''.join(writeout), pred_fname) writeout = ["{}\t{}\n".format(ix, self.label_itos[truth]) for ix, truth in enumerate(truths)] fwrite(''.join(writeout), truth_fname) cmd = 'perl {} {} {}'.format(perl_fname, pred_fname, truth_fname) stdout, _ = shell(cmd, stdout=True) fwrite(stdout, result_fname)
def select_data(save_dir='./tmp', data_dir='./data/wiki_person', train_fname='train.csv', data_sizes=[None, None, None], skip_header=True, verbose=True): files = ['train', 'valid', 'test'] suffix = '.' + train_fname.split('.')[-1] n_lines = {} def _get_num_lines(file): with open(file) as f: data = [line.strip() for line in f if line] num_lines = len(data) if not skip_header else len(data) - 1 return num_lines for file, data_size in zip(files, data_sizes): read_from = os.path.join(data_dir, train_fname.replace('train', file)) save_to = os.path.join(save_dir, file + suffix) with open(read_from) as f: data = [line for line in f] if skip_header: header, body = data[:1], data[1:] else: header, body = [], data random.shuffle(body) data = header + body[:data_size] fwrite(''.join(data), save_to) n_lines[file] = _get_num_lines(save_to) if verbose: writeout = ['{}: {}'.format(*item) for item in n_lines.items()] writeout = ', '.join(writeout) print('[Info] #samples in', writeout) return list(n_lines.values())
def write_summary(self, epoch=0, summ=None): def _format_value(v): if isinstance(v, float): return '{:.4f}'.format(v) elif isinstance(v, int): return '{:02d}'.format(v) else: return '{}'.format(v) summ = { 'Eval': '(e{:02d},{})'.format(epoch, self.valid_or_test), 'loss': self.avg_loss, 'acc': self.acc, } if summ is None else summ summ = {k: _format_value(v) for k, v in summ.items()} writeout = json.dumps(summ) fwrite(writeout + '\n', self.save_log_fname, mode='a') printout = '[Info] {}'.format(writeout) print(printout) return writeout
def _create_in_cfs_ine(self, mode): super(Nexus, self)._create_in_cfs_ine(mode) if not os.path.isdir(self.alua_metadata_path): os.makedirs(self.alua_metadata_path) if self._fresh: initiator = generate_wwn('naa') nexus_path = self._path + "/nexus" if os.path.isfile(nexus_path): try: fwrite(nexus_path, initiator) except IOError, msg: raise RTSLibError("Cannot create Nexus initiator " + "(>=4.0 style, %s): %s." % (nexus_path, msg)) else: try: os.makedirs(nexus_path + "/" + initiator) except IOError, msg: raise RTSLibError("Cannot create Nexus initiator." + "(<4.0 style, %s): %s." % (nexus_path, msg))
def _enable(self): self._check_self() path = "%s/enable" % self.path fwrite(path, "1\n")
def save_to_json(data, file): writeout = json.dumps(data, indent=4) fwrite(writeout, file) print('[Info] Saving {} data to {}'.format(len(data), file))
def _write_fd(self, contents): self._check_self() path = "%s/fd" % self.path fwrite(path, "%s" % str(contents).strip())
def __init__( self, proc_id=0, data_dir='tmp/', train_fname='train.csv', preprocessed=True, lower=True, vocab_max_size=100000, emb_dim=100, save_vocab_fname='vocab.json', verbose=True, ): self.verbose = verbose and (proc_id == 0) tokenize = lambda x: x.split() if preprocessed else 'spacy' INPUT = Field( sequential=True, batch_first=True, tokenize=tokenize, lower=lower, # include_lengths=True, ) # TGT = Field(sequential=False, dtype=torch.long, batch_first=True, # use_vocab=False) TGT = Field(sequential=True, batch_first=True) SHOW_INP = RawField() fields = [ ('tgt', TGT), ('input', INPUT), ('show_inp', SHOW_INP), ] if self.verbose: show_time("[Info] Start building TabularDataset from: {}{}".format( data_dir, 'train.csv')) datasets = TabularDataset.splits( fields=fields, path=data_dir, format=train_fname.rsplit('.')[-1], train=train_fname, validation=train_fname.replace('train', 'valid'), test=train_fname.replace('train', 'test'), skip_header=True, ) INPUT.build_vocab( *datasets, max_size=vocab_max_size, vectors=GloVe(name='6B', dim=emb_dim), unk_init=torch.Tensor.normal_, ) # load_vocab(hard_dosk) like opennmt # emb_dim = {50, 100} # Elmo TGT.build_vocab(*datasets) self.INPUT = INPUT self.TGT = TGT self.train_ds, self.valid_ds, self.test_ds = datasets if save_vocab_fname and self.verbose: writeout = { 'tgt_vocab': { 'itos': TGT.vocab.itos, 'stoi': TGT.vocab.stoi, }, 'input_vocab': { 'itos': INPUT.vocab.itos, 'stoi': INPUT.vocab.stoi, }, } fwrite(json.dumps(writeout, indent=4), save_vocab_fname) if self.verbose: msg = "[Info] Finished building vocab: {} INPUT, {} TGT" \ .format(len(INPUT.vocab), len(TGT.vocab)) show_time(msg)
def _set_tag(self, tag_str): with ignored(IOError): if tag_str is None: fwrite("%s/tag" % self.path, 'NULL') else: fwrite("%s/tag" % self.path, tag_str)
def _set_udev_path(self, udev_path): self._check_self() path = "%s/udev_path" % self.path fwrite(path, "%s" % udev_path)
seed = np.random.randint(10000) logfilename = os.path.join(logdir, f'lin_eval_{seed}.csv') save_path = os.path.join(logdir, f'lin_eval_{seed}.pth.tar') init_logfile(logfilename, "epoch,time,lr,train loss,train acc,test loss,test acc") for epoch in range(100): print("Epoch {}".format(epoch)) before = time.time() train_out = train(epoch, train_loader, model, optimizer, criterion) test_out = test_classifier(model, test_loader, ["loss", "error@1"]) after = time.time() epoch_time = after - before fwrite(logfilename, "{},{:.8},{:.4},{:.4},{:.4},{:.4},{:.4}".format( epoch, epoch_time, scheduler.get_lr()[0], train_out['loss'], train_out['acc@1'], test_out['loss'], 100 - test_out['error@1'])) print(' * [Loss %.3f] [Err@1 %.3f]' % (test_out['loss'], test_out['error@1'])) # In PyTorch 1.1.0 and later, you should call `optimizer.step()` before `lr_scheduler.step()`. # See more details at https://pytorch.org/docs/stable/optim.html#how-to-adjust-learning-rate scheduler.step() torch.save({ 'epoch': epoch + 1, 'state_dict': model.state_dict(), }, save_path)
def _control(self, command): self._check_self() path = "%s/control" % self.path fwrite(path, "%s" % str(command).strip())
def _set_tag(self, tag_str): with ignored(IOError): if tag_str is None: fwrite("%s/tag" % self.path, "NULL") else: fwrite("%s/tag" % self.path, tag_str)
def _control(self, command): self._check_self() path = "%s/control" % self.path fwrite(path, "%s\n" % str(command))