def loadData(self, data): if isinstance(data, dict): # If 'data' is preferences on users for training self.prefsOnUser = data self.prefs = tool.transposePrefs(self.prefsOnUser) elif isinstance(data, str): # If 'data' is a file path of training data self.prefsOnUser = tool.loadData(data) self.prefs = tool.transposePrefs(self.prefsOnUser) self.itemList = self.prefs.keys()
def loadData(self, data): if isinstance(data, dict): # If 'data' is preferences on users for training self.prefs = data elif isinstance(data, str): # If 'data' is a file path of training data self.prefs = tool.loadData(data) self.itemList = {} for user in self.prefs: for item in self.prefs[user]: self.itemList[item] = None
def _setUseRepr(self, inVal): """property which indicates whether to use __repr__ or __str__ method for debug""" if isinstance(inVal, bool): self._useRepr = inVal elif isinstance(inVal, str): assert inVal.upper( ) == "GLOBAL", "use_repr must be value of global when set as a string" PyClassHelper._Global_Use_Repr = True else: assert False, "use_repr must be assigned a bool or a string \"global\""
def loadData(self, data): if isinstance(data, dict): self.prefs = data elif isinstance(data, str): # self.prefs = tool.loadData(data) pass self.itemList = {} for user in self.prefs: for item in self.prefs[user]: self.itemList[item] = None
def _flatten_inputs(inputs): if not isinstance(inputs, (list, tuple)): return inputs res = [] for inp in inputs: if isinstance(inp, (list, tuple)): res.extend(_flatten_inputs(inp)) else: res.append(inp) return res
def do(ai, a): if not isinstance(ai, (Min, Max)): return ai cond = a in ai.args if not cond: return ai.func(*[do(i, a) for i in ai.args], evaluate=False) if isinstance(ai, cls): return ai.func(*[do(i, a) for i in ai.args if i != a], evaluate=False) return a
def forceWrite(this,key,val): ''' Always writes through to MBUS ''' this.log.debug("fored-write: " + str(key) + ':' + str(val)) addr = key[0] size = key[1] assert( isinstance(addr, int)) assert( isinstance(val, int)) this.mbus.write_mem(addr,val,size)
def write(self, obj): self.as_hdl_ast.name_scope = self.name_scope if not isinstance(obj, HdlConstraintList): hdl = self.as_hdl_ast.as_hdl(obj) ser = self.serializer_cls.TO_HDL() j = ser.visit_main_obj(hdl) if isinstance(hdl, HdlContext): self.ctx.extend(j) else: self.ctx.append(j)
def walkFlatten( self, offset: int = 0, shouldEnterFn=_default_shouldEnterFn ) -> Generator[Union[Tuple[Tuple[int, int], 'TransTmpl'], 'OneOfTransaction'], None, None]: """ Walk fields in instance of TransTmpl :param offset: optional offset for all children in this TransTmpl :param shouldEnterFn: function (transTmpl) which returns True when field should be split on it's children :param shouldEnterFn: function(transTmpl) which should return (shouldEnter, shouldUse) where shouldEnter is flag that means iterator should look inside of this actual object and shouldUse flag means that this field should be used (=generator should yield it) :return: generator of tuples ((startBitAddress, endBitAddress), TransTmpl instance) """ t = self.dtype base = self.bitAddr + offset end = self.bitAddrEnd + offset shouldEnter, shouldYield = shouldEnterFn(self) if shouldYield: yield ((base, end), self) if shouldEnter: if isinstance(t, Bits): pass elif isinstance(t, HStruct): for c in self.children: yield from c.walkFlatten(offset, shouldEnterFn) elif isinstance(t, (HArray, HStream)): itemSize = (self.bitAddrEnd - self.bitAddr) // self.itemCnt for i in range(self.itemCnt): if i == 0: c = self.children else: # spot a new array item c = deepcopy(self.children) assert c.rel_field_path == (0, ), c.rel_field_path # replace the index c.rel_field_path = TypePath(i, ) yield from c.walkFlatten(base + i * itemSize, shouldEnterFn) elif isinstance(t, HUnion): yield OneOfTransaction(self, offset, shouldEnterFn, self.children) else: raise TypeError(t)
def parse_single_datetime(date_str): if isinstance(date_str, datetime): return date_str elif isinstance(date_str, int) and date_str < 3600: # it is unlikely that we've chosen a unix time within the first hour # of 01/01/1970. It is probably a year value. return datetime(date_str, 1, 1, 0, 0) elif isinstance(date_str, str): return parser.parse(date_str, default=datetime(1970, 1, 1, 0, 0)) else: return None
def forward(ctx: Any, inputs: Any, dummy_input: Any, model_instance: Any) -> Any: inputs = inputs if isinstance(inputs, tuple) else (inputs,) ctx.inputs = inputs ctx.model_instance = model_instance # TODO(anj-s): We might need to store this for each boundary activation. # Currently we assume all boundary activation inputs require ctx.grad_requirements = tuple(x.requires_grad for x in inputs) ctx.fwd_rng_state = torch.get_rng_state() # List of input activations starting with the given input. model_instance._activations = [inputs] # Enumerate through layer shards and apply activations from the previous shard. for index, layer_shard in enumerate(model_instance.model_slices): with torch.autograd.profiler.record_function("fairscale.experimental.nn.offload:forward_load"): # Bring in the current activations onto the device. model_instance._activations[index] = tuple([a.cuda() for a in list(model_instance._activations[index])]) # Bring in the current layer shard onto the device. layer_shard.forward_load() # Apply the FP and store the activations on the CPU. inputs = model_instance._activations[index] with torch.autograd.profiler.record_function("fairscale.experimental.nn.offload:no_grad_forward_pass"): with torch.no_grad(): output_list: List[Any] = [] for given_input in inputs: given_input_list = torch.chunk(given_input, model_instance._num_microbatches) given_output_list = [] for inputs in given_input_list: output = layer_shard(inputs) given_output_list.append(output) given_output = torch.cat(given_output_list).squeeze(-1) output_list.append(given_output) output = tuple(output_list) output = output if isinstance(output, tuple) else (output,) with torch.autograd.profiler.record_function("fairscale.experimental.nn.offload:forward_drop"): # Move the activation used back for the curent shard back to the CPU. model_instance._activations[index] = tuple([a.cpu() for a in list(model_instance._activations[index])]) # The newly computed activations remain on the GPU ready for the next shard computation. model_instance._activations.append(output) # Move the layer shard back to the CPU. layer_shard.forward_drop() # The last instance will lose the gradient function if we move it to the CPU. # This is because all grad function are present on the device that ran the FW pass. # The last activation remains on the GPU and is the return value of this function. # Note that this assumes that the target is also on the GPU which is required for calculating # the loss. result = model_instance._activations[-1] result = [r.cuda() for r in result] for r in result: r.requires_grad = True return result[0] if len(result) == 1 else result
def __eq__(self, obj): if obj is None: return False if isinstance(obj, base.Quantity): obj = obj.uval if isinstance(obj, self.__class__): self.check_units(obj._units) return self._value == obj._value else: raise EngineeringTools_uval_Error( 'units do not match: {} != {}'.format(self, obj))
def codes_set_array(handle, key, values): # type: (cffi.FFI.CData, bytes, T.List[T.Any]) -> None if len(values) > 0: if isinstance(values[0], float): codes_set_double_array(handle, key, values) elif isinstance(values[0], int): codes_set_long_array(handle, key, values) else: raise TypeError("Unsupported value type: %r" % type(values[0])) else: raise ValueError("Cannot set an empty list.")
def get_transactions(self, card_id: str, start_date=date.today(), end_date=date.today()): if not isinstance(start_date, date): start_date = datetime.strptime(start_date, Constants.MYSQL_DATE_FORMAT) if not isinstance(end_date, date): end_date = datetime.strptime(end_date, Constants.MYSQL_DATE_FORMAT) return self._get_transactions(card_id, start_date, end_date)
def message_set(self, item, value): # type: (str, T.Any) -> None key = item.encode(self.encoding) set_array = isinstance( value, T.Sequence) and not isinstance(value, (str, bytes)) if set_array: eccodes.codes_set_array(self.codes_id, key, value) else: if isinstance(value, str): value = value.encode(self.encoding) eccodes.codes_set(self.codes_id, key, value)
def _設定欄位(self, 內容, 資料表, 會使加新的): 欄位名 = 資料表.__name__[:-1] 內容資料 = 內容[欄位名] if isinstance(內容資料, int): setattr(self, 欄位名, 資料表.objects.get(pk=內容資料)) elif isinstance(內容資料, str): if 會使加新的: setattr(self, 欄位名, 資料表.objects.get_or_create(**{欄位名: 內容資料})[0]) else: setattr(self, 欄位名, 資料表.objects.get(**{欄位名: 內容資料})) else: setattr(self, 欄位名, 內容資料)
def forward(ctx: Any, inputs: Any, dummy_input: Any, model_instance: Any) -> Any: inputs = inputs if isinstance(inputs, tuple) else (inputs, ) ctx.inputs = inputs ctx.model_instance = model_instance # TODO(anj-s): We might need to store this for each boundary activation. # Currently we assume all boundary activation inputs require ctx.grad_requirements = tuple(x.requires_grad for x in inputs) ctx.fwd_rng_state = torch.get_rng_state() # List of input activations starting with the given input. model_instance._activations = [inputs] # Enumerate through layer shards and apply activations from the previous shard. for index, layer_shard in enumerate(model_instance.model_slices): # Bring in the current activations onto the device. model_instance._activations[index] = tuple( [a.cuda() for a in list(model_instance._activations[index])]) # Bring in the current layer shard onto the device. layer_shard.forward_load() # Apply the FP and store the activations on the CPU. inputs = model_instance._activations[index] with torch.no_grad(): output_list: List[Any] = [] for given_input in inputs: given_input_list = torch.chunk( given_input, model_instance._num_microbatches) given_output_list = [] for inputs in given_input_list: output = layer_shard(inputs) given_output_list.append(output) given_output = torch.cat(given_output_list).squeeze(-1) output_list.append(given_output) output = tuple(output_list) output = output if isinstance(output, tuple) else (output, ) # The last instance will lose the gradient function if we move it to the CPU. # This is because all grad function are present on the device that ran the FW pass. if index == len(model_instance.model_slices) - 1: model_instance._activations.append(output) else: model_instance._activations.append( tuple([a.cpu() for a in list(output)])) # Move the layer shard back to the CPU. layer_shard.forward_drop() # TODO(anj-s): Check device of the result to make sure the outputs and targets match device. result = model_instance._activations[-1] for r in result: r.requires_grad = True return result[0] if len(result) == 1 else result
def upload_audit(model_id, dataset_id, measure, value, url=None): """Function upload audits of the model Params ------ model : str Name/id of the model measure : str, list name of the measure or list of the names of the measures used in audit score : float, array-like score or list of the scores in the same order as in measure parameter url : str url to mogger Returns ------- """ if os.path.isfile('.mogger.config'): user_name, password, url = get_config() else: user_name = input('user: '******'password: '******'utf8')) password = md5.hexdigest() url += "mogger/api/v1/audit" info = \ { "modelId": model_id, "datasetId": dataset_id, 'measure': measure, 'value': value } r = requests.post(url, json=info, headers={ 'content-type': 'application/json;charset=UTF8', 'userName': user_name, 'password': password }) return r
def __sub__(self, obj): if isinstance(obj, UVal): pass elif isinstance(obj, float): obj = UVal(obj, {}) elif isinstance(obj, base.Quantity): obj = obj.uval else: raise EngineeringTools_uval_Error('wrong type: %s - %s' % (self, obj)) self.check_units(obj._units) return UVal(self._value - obj._value, self._units)
def normalize_rows_source(rows_source): if isinstance(rows_source, str): # Open the file with the csv reader with open(rows_source) as csvf: yield from normalize_rows_source(csvf) return elif isinstance(rows_source, io.TextIOBase): # This includes stdin rows_source = csv.reader(rows_source, delimiter="\t") yield from rows_source
def create(self, buildParameter, environment): assert isinstance(buildParameter, WaeBuildParameter) assert isinstance(environment, WaeEnvironment) return WaeAgent(nX=environment.nX, nZ=environment.nZ, nH=buildParameter.nH, nXi=buildParameter.nXi, nLayer=buildParameter.nLayer, cluster_interval=buildParameter.cluster_interval, activation=buildParameter.activation)
def _defaultShouldEnterFn(root: HdlType, field_path: Tuple[Union[str, int]]): """ Default method which resolves how the parts of input data type should be represented on interface level. """ t = field_path_get_type(root, field_path) isNonPrimitiveArray = isinstance(t, HArray) and\ not isinstance(t.element_t, Bits) shouldEnter = isinstance(t, HStruct) or isNonPrimitiveArray shouldUse = not shouldEnter return shouldEnter, shouldUse
def version_to_int(version): if isinstance(version, int): return version elif isinstance(version, str): version = version.replace('v', '').replace('.', '') else: return None try: version = int(version) except ValueError: version = None return version
def save(self, agent, buildParameter, epoch): assert isinstance(agent, Agent) assert isinstance(buildParameter, BuildParameter) agentMemento = agent.createMemento() buildParameterMemento = buildParameter.createMemento() buildParameterKey = buildParameter.key buildParameterLabel = buildParameter.label storeField = StoreField(agentMemento, epoch, buildParameterMemento, buildParameterKey, buildParameterLabel) self.store.append(storeField)
def __setitem__(this,key,val): ''' note: by default this only caches updates locally ''' this.log.debug("MemWr: " + str(key) + ':' + str(val)) addr = key[0] size = key[1] assert( isinstance(addr, int)) assert( isinstance(val, int)) if this.writeback: this.mbus.write_mem(addr,val,size) else: this.local[key] = val # not the best, but ehh
def dump(node, p='', outfile=sys.stdout): if isinstance(node, (ast.expr, ast.stmt)): print(p + node.__class__.__name__, node.lineno, file=outfile) else: print(p + node.__class__.__name__, file=outfile) for key, value in ast.iter_fields(node): if key == 'body' \ or (isinstance(value, list) and not value) \ or (key in ('starargs', 'kwargs') and value is None): continue print(p + '+ {0}={1}'.format(key, value), file=outfile) for child in ast.iter_child_nodes(node): dump(child, p + ' ', outfile)
def __init__(self, X, Y): if not isinstance(X, np.ndarray) or not isinstance(Y, np.ndarray): self.X = np.asarray(X) self.Y = np.asarray(Y) else: self.X = X self.Y = Y self.A = [Y.shape[0]] self.AVar = [Y.shape[0]] self.SSE = None self.SST = None self.SSR = None
def __setitem__(this, key, val): ''' note: by default this only caches updates locally ''' this.log.debug("MemWr: " + str(key) + ':' + str(val)) addr = key[0] size = key[1] assert (isinstance(addr, int)) assert (isinstance(val, int)) if this.writeback: this.mbus.write_mem(addr, val, size) else: this.local[key] = val # not the best, but ehh
def update(self, batchDataIn, batchDataOut): assert isinstance(batchDataIn, PoleBatchDataEnvironment) assert isinstance(batchDataOut, PoleBatchDataAgent) _Y = batchDataIn._Y # (Nhrz+1, *, Ny) _Yhat = batchDataOut._Yhat # (Nhrz+1, * , Ny) _loss = torch.mean((_Y - _Yhat)**2) self.optimizer.zero_grad() _loss.backward() self.optimizer.step()
def times(self, other): """"Provides a numerical approximation of the multiplication between an FDataObject to other object Args: other (int, list, FDataBasis): Object to multiply with the FDataBasis object. * int: Multiplies all samples with the value * list: multiply each values with the samples respectively. Length should match with FDataBasis samples * FDataBasis: if there is one sample it multiplies this with all the samples in the object. If not, it multiplies each sample respectively. Samples should match Returns: (FDataBasis): FDataBasis object containing the multiplication """ if isinstance(other, FDataBasis): if not _same_domain(self.domain_range, other.domain_range): raise ValueError("The functions domains are different.") basisobj = self.basis.basis_of_product(other.basis) neval = max(constants.BASIS_MIN_FACTOR * max(self.n_basis, other.n_basis) + 1, constants.N_POINTS_COARSE_MESH) (left, right) = self.domain_range[0] evalarg = np.linspace(left, right, neval) first = self.copy(coefficients=(np.repeat(self.coefficients, other.n_samples, axis=0) if (self.n_samples == 1 and other.n_samples > 1) else self.coefficients.copy())) second = other.copy(coefficients=(np.repeat(other.coefficients, self.n_samples, axis=0) if (other.n_samples == 1 and self.n_samples > 1) else other.coefficients.copy())) fdarray = first.evaluate(evalarg) * second.evaluate(evalarg) return FDataBasis.from_data(fdarray, evalarg, basisobj) if isinstance(other, int): other = [other for _ in range(self.n_samples)] coefs = np.transpose(np.atleast_2d(other)) return self.copy(coefficients=self.coefficients * coefs)
def set_scatter_data_pg(widget: ScatterPlotWidget, data): def field_flags(name, ftype, values): if not issubclass(ftype, Number): return dict(mode='enum', values=list(values)) return dict(values=list(set(values))) if isinstance(data, Sequence) and len(data) > 0 and isinstance( data[0], Mapping): ftypes = OrderedDict() fvalues = OrderedDict() def to_acceptable_value(v): if isinstance(v, Number): return v else: return str(v) for record in data: for k, v in flatten_dicts(record): acc_val = to_acceptable_value(v) ftypes[k] = type(acc_val) fvalues.setdefault(k, set()).add(acc_val) def ftype_to_numpy(ftype): if issubclass(ftype, str): return 'U1024' return ftype dtype = [(name, ftype_to_numpy(ftype)) for name, ftype in ftypes.items()] data_ar = np.empty(len(data), dtype=dtype) for i, record in enumerate(data): for k, v in flatten_dicts(record): data_ar[i][k] = to_acceptable_value(v) widget.setFields([(name, field_flags(name, ftypes[name], fvalues[name])) for name in ftypes]) elif isinstance(data, np.ndarray): widget.setFields([(name, field_flags(name, data.dtype.fields[name][0].type, data[name])) for name in data.dtype.names]) data_ar = data else: raise Exception( "data type not supported, you may want to add the support here") widget.setData(data_ar)
def produce_profile_option(option: str, value: str, origin: str = None) -> AnyEntry: """Instantiate option by name""" assert isinstance(option, str), f"need option string: {option}" assert isinstance(value, str), f"need value string: {option}" option_mapper = profile_option_mapper() if option in option_mapper: entry_class = option_mapper[option] return entry_class(value=value, origin=origin) else: raise TypeError( f"Invalid option: '{option}'. Supported list: {profile_option_list()}" )
def _揣來源資料(self, 內容資料, 會使加新的): if isinstance(內容資料, int): return 來源表.objects.get(pk=內容資料) if isinstance(內容資料, str) or isinstance(內容資料, dict): 來源物件 = self._內容轉物件(內容資料) try: return 來源表.揣來源(來源物件) except TypeError: raise ValueError('來源毋是有效字串!!') except ObjectDoesNotExist: if 會使加新的: return 來源表.加來源(來源物件) raise return 內容資料
def _split_params( self, params: Dict[str, Any] ) -> Tuple[Dict[str, Any], Dict[str, LazyWorkflowDataFrame]]: p: Dict[str, Any] = {} dfs: Dict[str, LazyWorkflowDataFrame] = {} for k, v in params.items(): if isinstance(v, (int, str, float, bool)): p[k] = v elif isinstance(v, (DataFrame, Yielded)) or is_acceptable_raw_df(v): dfs[k] = LazyWorkflowDataFrame(k, v, self) else: p[k] = v return p, dfs
def 加資料(cls, 輸入內容): 外語 = cls() 內容 = 外語._內容轉物件(輸入內容) if isinstance(內容['外語語言'], int): 外語.外語語言 = 語言腔口表.objects.get(pk=內容['外語語言']) elif isinstance(內容['外語語言'], str): 外語.外語語言 = 語言腔口表.objects.get_or_create(語言腔口=內容['外語語言'])[0] else: 外語.外語語言 = 內容['外語語言'] if isinstance(內容['外語資料'], str): 外語.外語資料 = 內容['外語資料'] else: raise ValueError('外語資料必須愛是字串型態') 外語._加基本內容而且儲存(內容) return 外語
def get_logo_button(self, index): """ Return the button of the station specified by its index :param index: button index in the playlist :return: current station button """ try: self.button = self.buttons[str(index)] except: pass b = self.factory.create_station_button(self.button.state, self.bounding_box, self.switch_mode) b.components[1].content = self.button.state.icon_base img = b.components[1].content if isinstance(img, tuple): img = img[1] bb = self.bounding_box logo_height = int((200 * bb.h)/228) img = self.util.scale_image(img, (logo_height, logo_height)) b.components[1].content = img b.components[1].content_x = bb.x + bb.w/2 - img.get_size()[0]/2 b.components[1].content_y = bb.y + bb.h/2 - img.get_size()[1]/2 return b
def __setitem__(this,key,val): ''' note: by default this only caches updates locally ''' this.log.debug("RegWr: " + str(key) + ':' + hex(val)) if key in this.warn_names: this.log.warn('Writing: ' + str(key) + ' as 0') return 0 elif key in this.warn_trans_names: this.log.warning('Writing' + str(this.warn_trans_names[key]) + \ ' in place of ' + str(key)) key = this.warn_trans_names[key] elif key in this.trans_names: key = this.trans_names[key] assert( key in this.names) assert( isinstance(val, int)) if (this.writeback): assert(this.base_addr != None) mem_addr = this.base_addr + this.offsets[key] this.mbus.write_mem(mem_addr,val,32) else: this.local[key] = val
def transform_config(self, obj, config): """ transform config field in master model """ # config = json.loads(config) if isinstance(config, dict): details = dict(event_return=config.get("event_return", 'Unknown'), worker_threads=config.get("worker_threads", 'Unknown'), pillar_version=config.get("pillar_version", 'Unknown'), mysql_db=config.get("mysql.db", 'Unknown'), mysql_host=config.get("mysql.host", 'Unknown'), postgresql_db=config.get("returner.pgjsonb.db", 'Unknown'), postgresql_host=config.get("returner.pgjsonb.host", 'Unknown'), rest_cherrypy=config.get("rest_cherrypy", 'Unknown'), file_roots=config.get("file_roots", 'Unknown'), transport=config.get("transport", 'Unknown'), key_logfile=config.get("key_logfile", 'Unknown'), renderer=config.get("renderer", 'Unknown'), max_event_size=config.get("max_event_size", 'Unknown'), loop_interval=config.get("loop_interval", 'Unknown'), master_job_cache=config.get("master_job_cache", 'Unknown'), log_file=config.get("log_file", 'Unknown'), interface=config.get("interface", 'Unknown')) else: details = dict() return details
def encryptedContent(self, virtualFile, offset, length): assert(isinstance(virtualFile, VirtualFile)) realDataOffset = offset - Encryption.FILE_KEYADDITION_LENGTH realDataLength = length keyAdditionOffset = 0 keyAdditionLength = 0 if 0 <= offset < Encryption.FILE_KEYADDITION_LENGTH: requiredlengthOfRealData = max(0, offset + length - Encryption.FILE_KEYADDITION_LENGTH) realDataOffset = 0 realDataLength = max(requiredlengthOfRealData, 0) keyAdditionOffset = offset if offset + length > Encryption.FILE_KEYADDITION_LENGTH: keyAdditionLength = Encryption.FILE_KEYADDITION_LENGTH - offset else: keyAdditionLength = length realDataBlockedOffset = (realDataOffset // Encryption.BLOCKSIZE_BYTES) * Encryption.BLOCKSIZE_BYTES diffOffset = realDataOffset - realDataBlockedOffset realDataBlockedLength = (ceil((realDataLength + diffOffset) / Encryption.BLOCKSIZE_BYTES)) * Encryption.BLOCKSIZE_BYTES readData = virtualFile.read(realDataBlockedOffset, realDataBlockedLength) if "fileCipher" not in virtualFile.encryptionDict().keys(): keyAddition = self.__getFileKeyAdditionFromPlainVirtualFile(virtualFile) cipher = self.__createCipher(keyAddition) virtualFile.encryptionDict()["fileCipher"] = cipher virtualFile.encryptionDict()["fileKeyAddition"] = keyAddition cipher = virtualFile.encryptionDict()["fileCipher"] keyAddition = virtualFile.encryptionDict()["fileKeyAddition"] padding = realDataBlockedOffset + realDataBlockedLength > virtualFile.size() encryptedData = self.__encrypt(cipher, readData, padding) return keyAddition[keyAdditionOffset:keyAdditionOffset + keyAdditionLength] + encryptedData[diffOffset:diffOffset+length-keyAdditionLength]
def __set_ethernet_uris(self, ethernet_names, operation="add"): """Updates network uris.""" if not isinstance(ethernet_names, list): ethernet_names = [ethernet_names] associated_enets = self.data.get('networkUris', []) ethernet_uris = [] for i, enet in enumerate(ethernet_names): enet_exists = self._ethernet_networks.get_by_name(enet) if enet_exists: ethernet_uris.append(enet_exists.data['uri']) else: raise HPOneViewResourceNotFound("Ethernet: {} does not exist".foramt(enet)) if operation == "remove": enets_to_update = sorted(list(set(associated_enets) - set(ethernet_uris))) elif operation == "add": enets_to_update = sorted(list(set(associated_enets).union(set(ethernet_uris)))) else: raise ValueError("Value {} is not supported as operation. The supported values are: ['add', 'remove']") if set(enets_to_update) != set(associated_enets): updated_network = {'networkUris': enets_to_update} self.update(updated_network)
def __init__(this, mbus, writeback=False, log_level = logging.WARN): assert( isinstance(mbus, MBusInterface)) this.mbus = mbus this.writeback = writeback this.local = {} this.log = m3_logging.getLogger( type(this).__name__) this.log.setLevel(log_level)
def to_windows_path(path): # A Windows path can only be created from an absolute POSIX path if isinstance(path, pathlib.PosixPath) and path.is_absolute(): # Create a wine windows path path_str = str(path) path_str = "Z:" + path_str path = pathlib.PureWindowsPath(path_str) return path
def 加資料(cls, 輸入內容): 文本 = cls() 內容 = 文本._內容轉物件(輸入內容) if isinstance(內容['文本資料'], str): 文本.文本資料 = 內容['文本資料'] else: raise ValueError('文本資料必須愛是字串型態') 文本._加基本內容而且儲存(內容) return 文本
def __init__(self, command, *args): if isinstance(command, bytes): self.command = command else: self.command = Decoder.encodeData(command) if isinstance(args, bytes): self.args = args return if isinstance(args, str): self.args = Decoder.encodeData(args) return argsLen = len(args) self.args = [None] * argsLen for i in range(0, argsLen): if isinstance(args[i], bytes): self.args[i] = args[i] else: self.args[i] = Decoder.encodeData(str(args[i]))
def write_mem(this, addr, value, size): this.log.debug('MBUS Writing ' + hex(value) + ' to ' + hex(addr)) assert(isinstance(addr, int)) assert(isinstance(value, int)) assert(size in [32,16,8]) align32 = this._align32(addr,size) if size == 32: write32 = value elif size == 16: byte_idx = addr & 0x02 mask32 = ((2 ** size -1) << (8 * byte_idx)) mask32n = 0xffffffff - mask32 # bitwise not hack orig32 = this.read_mem(align32,32) value32 = value << size | value # just duplicate it value32 = value32 & mask32 # and mask it write32 = orig32 & mask32n write32 = write32 | value32 elif size == 8: byte_idx = addr & 0x3 mask32 = ((2 ** size -1) << (8 * byte_idx)) mask32n = 0xffffffff - mask32 # bitwise not hack orig32 = this.read_mem(align32,32) value32 = (value << (8 * byte_idx)) value32 = value32 & mask32 write32 = orig32 & mask32n write32 = write32 | value32 this.log.debug("MBUS Writing " + hex(write32) + " @ " + \ hex(align32)) prc_memwr = struct.pack(">I", ( this.prc_addr << 4) | 0x2 ) memwr_addr = struct.pack(">I", align32) memwr_data = struct.pack(">I", write32) this.ice.mbus_send(prc_memwr, memwr_addr + memwr_data)
def msetnx(self, keyword): if not isinstance(keyword, dict): raise Exception('parameter is ERROR') value = [] for k, v in keyword.items(): value.append(k) value.append(v) self._conn.send_command(MSETNX, *value) recv = self._conn.recv() return self.parse_recv(recv)
def decryptedFileSize(self, virtualFile): assert(isinstance(virtualFile, VirtualFile)) absRootPathFileSize = virtualFile.size() if absRootPathFileSize % Encryption.BLOCKSIZE_BYTES != 0: raise MalformedInputException('The file ' + virtualFile.name() + ' is not properly encrypted.') lastBlock = virtualFile.read(absRootPathFileSize - Encryption.BLOCKSIZE_BYTES, Encryption.BLOCKSIZE_BYTES) keyAddition = self.__getKeyAdditionFromEncryptedVirtualFile(virtualFile) cipher = self.__createCipher(keyAddition) return absRootPathFileSize - Encryption.FILE_KEYADDITION_LENGTH - Encryption.BLOCKSIZE_BYTES + len(self.__decrypt(cipher, lastBlock, True))
def align_images(images): if not isinstance(images, list) or len(images) < 2: print("Input has to be a list of at least two images") return None size = images[0].shape for i in range(len(images)): if not images[i].shape == size: print("Input images have to be of the same size") return None # Convert images to grayscale gray_images = [] for image in images: gray_images.append(cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)) model_image = gray_images[0] # Find size of images sz = model_image.shape # Define the motion model warp_mode = cv2.MOTION_TRANSLATION # Define 2x3 or 3x3 matrices and initialize the matrix to identity if warp_mode == cv2.MOTION_HOMOGRAPHY: warp_matrix = np.eye(3, 3, dtype=np.float32) else: warp_matrix = np.eye(2, 3, dtype=np.float32) # Specify the number of iterations. number_of_iterations = 5000 # Specify the threshold of the increment in the correlation coefficient between two iterations termination_eps = 1e-10 # Define termination criteria criteria = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, number_of_iterations, termination_eps) # Run the ECC algorithm. The results are stored in warp_matrix. aligned_images = [images[0]] for i in range(1, len(images)): (cc, warp_matrix) = cv2.findTransformECC(model_image, gray_images[i], warp_matrix, warp_mode, criteria) if warp_mode == cv2.MOTION_HOMOGRAPHY : # Use warpPerspective for Homography aligned_image = cv2.warpPerspective (images[i], warp_matrix, (sz[1], sz[0]), flags=cv2.INTER_LINEAR + cv2.WARP_INVERSE_MAP) else: # Use warpAffine for Translation, Euclidean and Affine aligned_image = cv2.warpAffine(images[i], warp_matrix, (sz[1], sz[0]), flags=cv2.INTER_LINEAR + cv2.WARP_INVERSE_MAP) aligned_images.append(aligned_image) return aligned_images
def _create_all_timeline_stacks(timeline): all_stacks = [] local_timeline = [] for handle in timeline.heap: while handle.when is not None: local_timeline.append(ElementHandle(handle.when, handle.ix, handle.timeline, handle.is_scheduled, handle.element)) for element_handle in sorted(local_timeline): while not element_handle.element is None: if isinstance(element_handle.element, AlarmElement): pass all_stacks.append(_create_stack(element_handle)) return all_stacks
def hmset(self, key, field_value): if isinstance(field_value, dict): value = [] for k, v in field_value.items(): value.append(k) value.append(v) self._conn.send_command(HMSET, key, *value) recv = self._conn.recv() res = self.parse_recv(recv) return res else: return None
def 加資料(cls, 輸入內容): 聽拍 = cls() 內容 = 聽拍._內容轉物件(輸入內容) if isinstance(內容['規範'], int): 聽拍.規範 = 聽拍規範表.objects.get(pk=內容['規範']) elif isinstance(內容['規範'], str): 聽拍.規範 = 聽拍規範表.objects.get(規範名=內容['規範']) else: 聽拍.規範 = 內容['規範'] # raise TypeError('規範必須愛是字串抑是整數型態') 聽拍資料內容 = 聽拍._內容轉物件(內容['聽拍資料']) try: for 一句 in 聽拍資料內容: if not isinstance(一句, dict): raise ValueError('聽拍資料內底應該是字典型態') if '內容' not in 一句: raise KeyError('逐句聽拍資料攏愛有「內容」欄位') except TypeError: raise ValueError('聽拍資料應該是字典型態') 聽拍.聽拍資料 = json.dumps(聽拍資料內容) 聽拍._加基本內容而且儲存(內容) return 聽拍
def 加資料(cls, 輸入內容): 文本 = cls() 內容 = 文本._內容轉物件(輸入內容) try: 內容['屬性'] = 文本._內容轉物件(內容['屬性']) except Exception: pass if isinstance(內容['文本資料'], str): 文本.文本資料 = 內容['文本資料'] else: raise ValueError('文本資料必須愛是字串型態') 文本._揣出內容的音標資料(內容) 文本._加基本內容而且儲存(內容) return 文本
def _加資料(cls, 輸入內容): 聽拍 = cls() 內容 = 聽拍._內容轉物件(輸入內容) 聽拍資料內容 = 聽拍._內容轉物件(內容['聽拍資料']) try: for 一句 in 聽拍資料內容: if not isinstance(一句, dict): raise ValueError('聽拍資料內底應該是字典型態') if '內容' not in 一句: raise KeyError('逐句聽拍資料攏愛有「內容」欄位') except TypeError: raise ValueError('聽拍資料應該是字典型態') 聽拍.聽拍資料 = json.dumps(聽拍資料內容) 聽拍._加基本內容而且儲存(內容) return 聽拍
def write(self, event, channel=None, data=None, **kw): msg = {'event': event} if channel: msg['channel'] = channel if kw: if data: data.update(kw) else: data = kw if data: if not isinstance(data, str): data = json.dumps(data) msg['data'] = data array = [json.dumps(msg)] self.transport.write('a%s' % json.dumps(array))
def val(E): #print(E) #print("Program is ",Program) if isScalar(E): return E if isinstance(E,str) and (E,0) in Program: return valDefined(E,[]) (Op,X) = E if Op in {'and','=>','some','all','setComp','Union','Sum','Prod','Nrsec'}: Args=X else: #print(E) Args = [val(E) for E in X] if Op=='vector': return ('vector',Args) if Op=='set' : return ('set',Args) if Op=='tuple' : return ('tuple',Args) #if Op=='some' : return valSome(Args) #if Op=='all' : return valAll(Args) if Op in builtIns : return valBuiltIn(Op,Args) if (Op,len(Args)) in Program : return valDefined(Op,Args)
def DefVal(fbody): if not isinstance(fbody,tuple): # function body is a user defined constant if (fbody,0) in Program: return val(fbody) return fbody if not fbody[0] == 'cond' : return val(fbody) # function body is a conditional for clause in fbody[1]: (op,Args) = clause if op == 'if': [guard,term] = Args guardValue = val(guard) if guardValue: return val(term) if op == 'ow': term = Args[0] return val(term)
def isGround(E): if not isinstance(E,tuple) : if isScalar(E): return True if E in builtIns: return True if E in ['set','tuple','vector']: return True if (E,0) in Program: return True else: return False (Op,Args) = E # special case for some/all var in term: sentence if Op in ['some','all']: return True if isGround(Op) or (Op,len(Args)) in Program: return all(isGround(i) for i in Args) return False