def __init__(self, verbose_name=None, name=None, max_digits=None, decimal_places=None, default=Decimal("0.0"), default_currency=DEFAULT_CURRENCY, **kwargs): if isinstance(default, Money): self.default_currency = default.currency # Avoid giving the user hard-to-debug errors if they miss required attributes if max_digits is None: raise Exception( "You have to provide a max_digits attribute to Money fields.") if decimal_places is None: raise Exception( "You have to provide a decimal_places attribute to Money fields." ) self.default_currency = default_currency super(MoneyField, self).__init__(verbose_name, name, max_digits, decimal_places, default=default, **kwargs)
def on_cmd_branch(args, branch_functions): if len(args) < 1: cmd = "help" else: cmd = args[0] try: if cmd == "start": if len(args) < 2: raise Exception("Release name must be specified.") branch_functions["start"](args[1]) elif cmd == "finish": if len(args) < 2: raise Exception("Release name must be specified.") branch_functions["finish"](args[1]) elif cmd == "list": branch_functions["list"]() elif cmd == "help": branch_functions["help"]() else: raise Exception("Unknown command: " + cmd) except Exception, e: console_utils.print_error(str(e)) branch_functions["help"]()
def create_vm(self, lab_spec): # vm_spec is a json string # Allocate a vm_id: not required as platform adapter will allocate it. # Invoke platform adapter server (POST) logger.debug("VMPool: create_vm(); poolID=%s, Desciption=%s" "AdapterIP=%s, AdapterPort=%s, CreatePath=%s" "DestroyPath=%s" % (self.vmpool_id, self.vm_description, self.adapter_ip, self.adapter_port, self.create_path, self.destroy_path)) adapter_url = "%s:%s%s" % (self.adapter_ip, self.adapter_port, self.create_path) payload = {'lab_spec': json.dumps(lab_spec)} logger.debug("VMPool: create_vm(); adapter_url = %s, payload = %s" % (adapter_url, str(payload))) try: result = requests.post(url=adapter_url, data=payload) logger.debug("VMPool: create_vm(): Response text from adapter: " + result.text) if result.status_code == requests.codes.ok: vm_id = result.json()["vm_id"] vm_ip = result.json()["vm_ip"] vm_port = result.json()["vm_port"] return self.construct_state(lab_spec, vm_id, vm_ip, vm_port) else: raise Exception("VMPool: create_vm(): Error creating VM: " + result.text) except Exception, e: logger.error("VMPool: create_vm(): Error communicating with" + "adapter: " + str(e)) raise Exception("VMPool: create_vm(): Error creating VM: " + str(e))
def __init__(self, verbose_name=None, name=None, max_digits=None, decimal_places=None, default=Decimal("0.0"), default_currency=DEFAULT_CURRENCY, **kwargs): self.frozen_by_south = kwargs.pop("frozen_by_south", False) if isinstance(default, Money): self.default_currency = default.currency else: self.default_currency = default_currency # To ultimately pass default_currency on to widget default = Money(default, default_currency) # Avoid giving the user hard-to-debug errors if they miss required attributes if max_digits is None: raise Exception( "You have to provide a max_digits attribute to Money fields.") if decimal_places is None: raise Exception( "You have to provide a decimal_places attribute to Money fields." ) super(MoneyField, self).__init__(verbose_name, name, max_digits, decimal_places, default=default, **kwargs)
def determine_jdk_directory(cluster): """ Return the directory where the JDK is installed. For example if the JDK is located in /usr/java/jdk1.8_91, then this method will return the string 'jdk1.8_91'. This method will throw an Exception if the number of JDKs matching the /usr/java/jdk* pattern is not equal to 1. :param cluster: cluster on which to search for the JDK directory """ # TODO: remove this Oracle JDK checks once we port all OS type images to JDK 11 zulu_jdk = cluster.exec_cmd_on_host( cluster.master, 'bash -c "ls -ld /usr/lib/jvm/zulu-11| wc -l"') try: if int(zulu_jdk) == 1: return ZULU_JDK raise Exception( 'The number of Zulu JDK directories matching /usr/lib/jvm/zulu-11 is not 1' ) except ValueError: number_of_jdks = cluster.exec_cmd_on_host( cluster.master, 'bash -c "ls -ld /usr/java/j*| wc -l"') if int(number_of_jdks) != 1: raise Exception( 'The number of JDK directories matching /usr/java/jdk* is not 1' ) output = cluster.exec_cmd_on_host(cluster.master, 'ls -d /usr/java/j*') return output.split(os.path.sep)[-1].strip('\n')
def test_exc(self): from exceptions import Exception, BaseException assert issubclass(Exception, BaseException) assert isinstance(Exception(), Exception) assert isinstance(Exception(), BaseException) assert repr(Exception(3, "x")) == "Exception(3, 'x')" assert str(IOError("foo", "bar")) == "[Errno foo] bar"
def __init__(self, status, msg): supermsg = 'Memcached error #' + repr(status) if msg: supermsg += ": " + msg Exception.__init__(self, supermsg) self.status = status self.msg = msg
def __raise_if_dir_invalid(self, dir_path): full_path = self.svn.full_path(dir_path) if os.path.exists(full_path) and not self.svn.is_tracked(dir_path): raise Exception("'" + dir_path + "' is not tracked by SVN.") if self.svn.is_tracked(dir_path) and not os.path.isdir(full_path): raise Exception("'" + dir_path + "' is not a directory.")
def parse_dob(dob_string): """Tries to parse a string into some kind of date representation. Note that we don't use Date objects to store things away, because we want to accept limited precision dates, ie, just the year if necessary.""" # simple #### date.. ie, 1987 or 87 m3 = re.search("^(\d+)$", dob_string) if m3: value = m3.group(1) # two digit date, guess on the first digits based on size if len(value) == 2: if int(value) <= date.today().year % 100: value = "20%s" % value else: value = "19%s" % value # we have a four digit date, does it look reasonable? if len(value) == 4: return value # full date: DD.MM.YYYY m3 = re.search("^(\d+)\.(\d+)\.(\d+)$", dob_string) if m3: dd = m3.group(1) mm = m3.group(2) yyyy = m3.group(3) # print "%s = '%s' '%s' '%s'" % (dob_string, dd, mm, yyyy) # make sure we are in the right format if len(dd) > 2 or len(mm) > 2 or len(yyyy) != 4 or int(yyyy) < 2009: raise Exception( _("Invalid date format, must be in the form: DD.MM.YYYY")) # invalid month if int(mm) > 12 or int(mm) < 1: raise Exception( _("Invalid date format, must be in the form: DD.MM.YYYY")) # invalid day if int(dd) > 31 or int(dd) < 1: raise Exception( _("Invalid date format, must be in the form: DD.MM.YYYY")) # is the year in the future if int(yyyy) > int(date.today().year): raise Exception(_("Invalid date, cannot be in the future.")) #is the the date in future dob = "%02d.%02d.%04d" % (int(dd), int(mm), int(yyyy)) if datetime.strptime(dob, "%d.%m.%Y").date() > date.today(): raise Exception(_("Invalid date, cannot be in the future.")) # Otherwise, parse into our format return "%02d.%02d.%04d" % (int(dd), int(mm), int(yyyy)) return None
def __init__(self, error, status, code=400, headers=None): Exception.__init__(self) if not isinstance(error, list): self.errors = [str(error)] else: self.errors = error self.code = code self.status = status self.headers = headers
def deleteRequest(self, url, data={}): encodedData = json.dumps(data) retVal = requests.delete(url, data=encodedData) if retVal.status_code == 400: raise Exception("Error: Failed to execute cluster deletion process") elif retVal.status_code == 404: raise Exception("Error: Cluster not found in db") elif retVal.status_code < 200 or retVal.status_code > 300: raise Exception("Unknown error deleting environment: %s" % (data['name'])) return self.toJson(retVal.content)
def create_vm(self, lab_spec): # vm_spec is a json string # Allocate a vm_id: not required as platform adapter will allocate it. # Invoke platform adapter server (POST) def construct_state(): return { "lab_spec": lab_spec, "vm_info": { "vm_id": vm_id, "vm_ip": vm_ip, "vmm_port": vmm_port }, "vmpool_info": { "vmpool_id": self.vmpool_id, "vm_description": self.vm_description, "adapter_ip": self.adapter_ip, "adapter_port": self.adapter_port }, "vm_status": { "last_known_status": None, "last_successful_connection": None, "reconnect_attempts": None, "disk_usage": None, "mem_usage": None }, "lab_history": { "released_by": None, "released_on": None, "destroyed_by": None, "destroyed_on": None } } Logging.LOGGER.debug("VMPool: create_vm(); poolID=%s, Desciption=%s, AdapterIP=%s, AdapterPort=%s, CreatePath=%s, DestroyPath=%s" % \ (self.vmpool_id, self.vm_description, self.adapter_ip, self.adapter_port, self.create_path, self.destroy_path)) adapter_url = "%s:%s%s" % (self.adapter_ip, self.adapter_port, self.create_path) payload = {'lab_spec': json.dumps(lab_spec)} Logging.LOGGER.debug("VMPool: create_vm(); adapter_url = %s, payload = %s" % (adapter_url, str(payload))) try: result = requests.post(url=adapter_url, data=payload) Logging.LOGGER.debug("VMPool: create_vm(): Response text from adapter: " + result.text) if result.status_code == requests.codes.ok: vm_id = result.json()["vm_id"] vm_ip = result.json()["vm_ip"] vmm_port = result.json()["vmm_port"] return construct_state() else: raise Exception("VMPool: create_vm(): Error creating VM: " + result.text) except Exception, e: Logging.LOGGER.error("VMPool: create_vm(): Error communicating with adapter: " + str(e)) raise Exception("VMPool: create_vm(): Error creating VM: " + str(e))
def __init__(self, error_code, attach_value=''): attach_value = '' if attach_value == '' else ' (定位值:%s)' % attach_value obj = t_sys_error_code.objects.filter(error_code=error_code) if obj.count() > 0: err = obj[0].error_text + attach_value Exception.__init__(self, err) self.todo = obj[0].possible_reason else: err = u'未知异常::>_<:: ' + attach_value Exception.__init__(self, err) self.todo = u'请维护error_code:%s' % error_code
def putRequest(self, url, data): encodedData = json.dumps(data) retVal = requests.put(url, data=encodedData) if retVal.status_code == 400: raise Exception("Error: Invalid data supplied! %s" % (data)) elif retVal.status_code < 200 or retVal.status_code > 300: raise Exception("Unknown error creating environment: %s" % (data['name'])) try: return self.toJson(retVal.content) except: return None
def __init__(self, message, traceback=None): """ Constructor :type message: str :param message: Exception explanations :type traceback: str :param traceback: String representation of exception traceback. """ Exception.__init__(self, message) self.__traceback = traceback
def get_link(session, channel): session.headers.update({ 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.181 Safari/537.36', 'pragma': 'no-cache', 'accept-encoding': 'gzip', 'accept-language': 'es-ES,es;q=0.9,ca;q=0.8,en;q=0.7', }) request_headers = {} r = session.get('https://www.atresplayer.com/', headers=request_headers) if r.status_code != 200: raise Exception( r.status_code, "Error {}, message: {}".format(r.status_code, r.text.encode('utf-8'))) m = re.findall('= ({.*SERVICES_HOST[^;]+)', r.text.encode('utf-8')) if not m: raise Exception(1, "Service host list not found") hosts = json.loads(m[0]) channel_id = hosts[channel + '_ID'] r = session.get('https://api.atresplayer.com/client/v1/player/live/' + channel_id, headers=request_headers) if r.status_code != 200: raise Exception( r.status_code, "Error {}, message: {}".format(r.status_code, r.text.encode('utf-8'))) channel_info = json.loads(r.text.encode('utf-8')) master = channel_info['sources'][0]['src'] r = session.get(master, headers=request_headers) options = re.findall('^[^#][^\n]+', r.text, re.MULTILINE) stream_link = re.sub('[^/]+$', options[__QUALITY_OPTION], master) return stream_link
def get_auth_tokens(): #Get a new timestamp # timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S') # #Retrieve old timestamp from config file # old_timestamp = config_data.get('tokens', 'token_creation_time') # #Retrieve token expiration time in minutes # expiration_time_minutes = config_data.get('tokens', 'token_expiration_interval_min') # #Calculate the delta of the token timestamp and the current timestamp # token_time_delta = calculate_time_delta_minutes(old_timestamp,timestamp) # if (int(token_time_delta) < int(expiration_time_minutes)): # # print "Valid token found. No authentication required." # tokens_tmp = {'logging_token': config_data.get('tokens', 'logging_token'),'access_token': config_data.get('tokens', 'access_token'), 'space_id': config_data.get('tokens', 'space_id')} # return tokens_tmp server = "logmet.ng.bluemix.net" #print "Requesting authentication token from server %s" % (server) user = "******" passwd = "passw0rd" space = "devel" organization = "*****@*****.**" data={'user': user, 'passwd': passwd, 'space': space, 'organization': organization} try : login_request = requests.post("https://" + server + "/login", data=data) login_status = login_request.status_code except: login_status = 401 e = sys.exc_info()[0] v = sys.exc_info()[1] raise Exception("Exception while requesting a token: %s %s" % (str(e),str(v))) if (login_status == 200): tokens_json = login_request.json() tokens_tmp = {'logging_token': tokens_json['logging_token'],'access_token': tokens_json['access_token'], 'space_id':tokens_json['space_id']} # if not config_data.has_section("tokens"): # config_data.add_section("tokens") # config_data.set('tokens','logging_token',tokens_tmp['logging_token']) # config_data.set('tokens','access_token',tokens_tmp['access_token']) # config_data.set('tokens','space_id',tokens_tmp['space_id']) # config_data.set('tokens','token_creation_time',timestamp) # write_config() #print "Tokens retrieved successfully." else: tokens_tmp = None raise Exception("Errors while retrieving tokens. HTTP code %d" % (login_status)) return tokens_tmp
def get_share_list_for_height(self, index): raw_shares = [ x for x in self.mongo.db.shares.find({ 'index': index }).sort([('index', 1)]) ] if not raw_shares: raise Exception('no shares') total_difficulty = self.get_difficulty( [x['block'] for x in raw_shares]) shares = {} for share in raw_shares: if share['address'] not in shares: shares[share['address']] = {'blocks': []} shares[share['address']]['blocks'].append(share['block']) add_up = 0 for address, item in shares.iteritems(): test_difficulty = self.get_difficulty(item['blocks']) shares[address]['payout_share'] = float(test_difficulty) / float( total_difficulty) add_up += test_difficulty if add_up == total_difficulty: return shares else: raise NonMatchingDifficultyException()
def Norm(self, col_number, type='L1'): """ Calculates the Norm L1 of the table on the first column, if it is a time, example : for the first column after the column time [0] --- \ (abs(c2)+abs(c1))/2)*(t2-t1) for L1 Norm / --- t in times Possible types of the Norm are: L1 and L2 (table.getColumn[1].[t]**2) """ if col_number <= 0 or col_number > (self.getNbColumns() - 1): raise Exception("column number out of table, Please verify") else: c = self.getColumn(col_number) times = self.getColumn(0) res = 0 for i in range(len(times) - 1): if c[i] * c[i + 1] > 0: if type.lower() == 'l1': res += ((abs(c[i]) + abs(c[i + 1])) / 2.) * (times[i + 1] - times[i]) pass elif type.lower() == 'l2': res += (c[i] * c[i] + c[i + 1] * c[i + 1]) * 1. / 2 * ( times[i + 1] - times[i]) pass pass pass return res
def addColumnValues(self, column): """ Enables to add a column of data """ nr1 = self.data.shape[1] nr = len(column) if nr1 == 0: # case 1: empty table if nr == 0: # case 1a: we're just adding a name self.data = numpy.reshape(self.data, (1, 0)) pass else: # case 1b: we're adding a column of values self.data = numpy.reshape(numpy.array(column), (1, nr)) pass pass else: # case 2: non-empty table if nr1 > 0 and nr != nr1: raise Exception( "New column must have the same length as existing ones %s %s" % (nr1, nr)) new_column = numpy.reshape(numpy.array(column), (1, nr)) self.data = numpy.concatenate((self.data, new_column)) pass return
def test_unicode_message(self): assert unicode(Exception(u"\xe1")) == u"\xe1" class E(BaseException): def __str__(self): return u"\xe1" e = E() assert unicode(e) == u"\xe1"
def __init__(self, generic_error_msg, specific_msg=None): """ Initializes this instance. :type generic_error_msg: str :param generic_error_msg: this object's generic error message. :type specific_msg: str :param specific_msg: specific additional error message. Optional parameter, defaults to C{None}. """ Exception.__init__(self) self.__generic_error_msg = generic_error_msg self.__specific_msg = specific_msg self._error_code = None self._error_msg = self.get_error_message()
def prepend_changelog_entry(filename, package, version): last_entry = ''.join('# %s' % l for l in read_last_changelog_entry(filename)) text = CHANGELOG_TEMPLATE % { 'package': package, 'version': version, 'author': os.getenv('NOVABUILD_AUTHOR', DEFAULT_AUTHOR).decode('utf-8'), 'email': os.getenv('NOVABUILD_EMAIL', DEFAULT_EMAIL).decode('utf-8'), 'date': formatdate(localtime=True), 'last_entry': last_entry } text = run_editor(text) # We don't want to continue if there is no changelog! if not text: raise Exception("No changelog provided.") tmp_filename = filename + '.new' fp = open(tmp_filename, 'w') fp.write("%s (%s) stable; urgency=low\n\n" % (package, version)) fp.write(text) fp.write('\n') # ... and put the old content in. fp2 = open(filename, 'r') for line in fp2: fp.write(line) fp2.close() fp.close() os.rename(tmp_filename, filename)
def __init__(self, spacefunction, timecoeff): if spacefunction.getTimeCoefficient(): raise Exception( "You initialize a SpaceAndTimeTabulatedFunction with a space function depending on time" ) self.spacefunction = spacefunction Function.__init__(self, spacefunction.getCoefficients(), timecoeff)
def loss(self, x, y): """Computes loss and gradients of the model Args: x (np.ndarray): Tensor of input images, of shape (N, H*W). For MNIST data, it is (N, 784) y (np.ndarray): Tensor of labels in one-hot representation, of shape (N, 10) """ if self.loss_func is None: raise Exception( "please provide a loss function using compile() before calling loss()" ) for i in range(len(self.layers)): w = self.weights[i] b = self.biases[i] x = self.layers[i].forward_prop(x, w, b) x = self.activations[i].forward_prop(x) y_pred = x loss = self.loss_func(y_pred, y) grad_weights, grad_biases = {}, {} grad_upstream = y for i in reversed(range(len(self.layers))): grad_upstream = self.activations[i].backprop(grad_upstream) grad_upstream, grad_w, grad_b = self.layers[i].backprop( grad_upstream) grad_weights[i] = grad_w grad_biases[i] = grad_b return loss, grad_weights, grad_biases, y_pred
def report_assign_responsible(sender, instance, **kwargs): if not instance.responsible_entity: # Detect who is the responsible Manager for the given type if (instance.created_by and hasattr(instance.created_by, 'fmsuser') and instance.created_by.fmsuser.organisation and instance.created_by.fmsuser.organisation.is_responsible()): # assign entity of the creator instance.responsible_entity = instance.created_by.fmsuser.organisation else: reponsibles = OrganisationEntitySurface.objects.filter( geom__contains=instance.point) if len(reponsibles) == 1: instance.responsible_entity = reponsibles[0].owner else: raise Exception("point does not match any entity surface") if not instance.responsible_department: # Detect who is the responsible Manager for the given type # Search the right responsible for the current organisation. departements = instance.responsible_entity.associates.filter( type=OrganisationEntity.DEPARTMENT) # Get the responsible according to dispatching category instance.responsible_department = departements.get( dispatch_categories=instance.secondary_category)
def __init__(self, fonction, derivee=None, interval=None): if isinstance(fonction, types.FunctionType) or isinstance( fonction, float) or isinstance(fonction, int): self.fonction = fonction pass else: raise Exception( "Foncthd, fonction doit etre de type Function, Float ou Int") if derivee != None: if isinstance(derivee, types.FunctionType) or isinstance( derivee, float) or isinstance(derivee, int): self.derivee = derivee pass pass self.derivee = derivee if interval == None: print( "Attention interval nul, les fonctions seront evaluees en zero" ) pass self.interval = interval # if type(fonction) == types.FunctionType: self.nom = fonction.__name__ pass else: self.nom = 'const' pass
def load_all(self): """The function to load all data and labels Give: data: the list of raw data, needs to be decompressed (e.g., raw JPEG string) labels: numpy array, with each element is a string """ start = time.time() print("Start Loading Data from BCF {}".format( 'MEMORY' if self._bcf_mode == 'MEM' else 'FILE')) self._labels = np.loadtxt(self._label_fn).astype(str) if self._bcf.size() != self._labels.shape[0]: raise Exception("Number of samples in data" "and labels are not equal") else: for idx in range(self._bcf.size()): datum_str = self._bcf.get(idx) self._data.append(datum_str) end = time.time() print("Loading {} samples Done: Time cost {} seconds".format( len(self._data), end - start)) return self._data, self._labels
def __init__(self, capacity): self.timeseries_head = None #head and tail of the DLink list self.timeseries_tail = None #tail of the list self.keyval_map = {} #{key->listNode} self.cap = capacity if capacity <= 0: raise Exception("Cache capacity must be greater than 0!")
def __init__(self, param): self._source_fn = param.get('source') if not os.path.isfile(self._source_fn): raise Exception("Source file does not exist") self._label_fn = param.get('labels', None) self._data = [] self._labels = []
def find_tags_dir(svn_base_dir): tags_dir = "tags" files = os.listdir(svn_base_dir) if not tags_dir in files: raise Exception("SVN base dir does not have 'tags' dir.") return os.path.join(svn_base_dir, tags_dir)
def __init__(self, timecoeff=None): if not timecoeff: from exceptions import Exception raise Exception( "For a tabulated function,\ngive the coefficients.") coefList = None Function.__init__(self, coefList, timecoeff)
def __init__(self, **kwargs): """ DBS exception can be initialized in following ways: DBSException(args=exceptionString) DBSException(exception=exceptionObject) """ args = kwargs.get("args", "") ex = kwargs.get("exception", None) if ex != None: if isinstance(ex, Exception): exArgs = "%s" % (ex) if args == "": args = exArgs else: args = "%s (%s)" % (args, exArgs) Exception.__init__(self, args)
def __init__(self, filename): Exception.__init__(self, 'curly bracket not match "%s"' % filename)
def __init__(self, path): Exception.__init__(self, 'Path not found, %s', path)
def __init__(self): Exception.__init__(self)
def __init__(self, filename): Exception.__init__(self, 'square bracket not match "%s"' % filename)
def __init__(self, msg): Exception.__init__(self, msg)
def __init__(self, *args, **keywords): Exception.__init__(self, *args) self.keywords = keywords self._print_exc_str = None
def __init__(self, extent): Exception.__init__(self) self.message = "Could not use: {0} as extent".format(extent)
def __init__(self,mixture): self.mixture = mixture Exception.__init__(self)
def __init__(self, message): """ Initialize module """ Exception.__init__(self) self.message = message
def __init__(self, old_insee_code, new_insee_code): message = "Insee code of this commune was change from %s to %s"%(old_insee_code, new_insee_code) Exception.__init__(self, message)
def __init__(self, message): Exception.__init__(self) self.message = message
def __init__(self, rv, *args): self.rv = rv Exc.__init__(self, *args)
def __init__(self, filename): Exception.__init__(self, 'round bracket not match, "%s"' % filename)
def __init__(self, name, referer=None): if referer != None: Exception.__init__(self, 'Module not found "%s", referer "%s"' % (name, referer)) else: Exception.__init__(self, 'Module not found "%s"' % name)
def __init__(self, layers, behavior, trim): Exception.__init__(self) self.message = ( "Could not create a spatial finder with layers: {0}, " "behavior {1} and trim value {2}").format(layers, behavior, trim)
def __init__(self, message, innerException = None): Exception.__init__(self) self.message = message self.innerException = innerException
def __init__(self, errStr = ''): Exception.__init__(self) self.errStr = errStr
def __init__(self, value=None): Exception.__init__(self) self.value = value
def __init__(self, insee_code): message = "This commune with insee code %s has not been imported yet"%(insee_code) Exception.__init__(self, message)
def __init__(msg): Exception.__init__(msg)
def __init__(self, insee_code, year): message = "The commune with insee code %s was removed in %s"%(insee_code, year) Exception.__init__(self, message)
def __init__(self, sofar, numpreds, succs): Exception.__init__(self, "cycle in constraints", sofar, numpreds, succs) self.preds = None
def __init__(self, msg, variableValue): Exception.__init__(self, msg) self.variableValue = variableValue
def __init__(self,struct,varname): self.struct = struct self.varname = varname Exception.__init__(self)
def __init__(self,message,processdict=None): Exception.__init__(self,message) self.classname="Spotfinder Problem" self.parameters = processdict
def __init__(self, msg, *args, **kwargs): Exception.__init__(self, *args, **kwargs) self.msg = msg
def __init__(self, a_behavior): Exception.__init__(self) self.message = "Could not instantiate behavior {0}".format(a_behavior)