def parsε(lexems, offset): global type_map try: return type_map[lexems[offset][0]](lexems, offset) except: error(SyntaxError, 'ZŁe nawjasy {}'.format(lexems[offset]))
def list_user_in_chan(channel2clean, token): print('Your token is: {token}'.format(token=token)) chan_to_find = channel2clean response = requests.get( 'https://slack.com/api/conversations.list?limit=100&token={token}&types=public_channel,private_channel' .format(token=token)).json() if response['ok'] == False: error('Error: {error}'.format(error=response['error'])) chan_id = False for channel in response['channels']: if channel['name'] == chan_to_find: chan_id = channel['id'] print('The channel "{chan}" have the id "{id}"'.format( chan=chan_to_find, id=chan_id)) if chan_id == False: error('Error: Your channel doesn\'t exist !') confirmation = input( 'Really want to print all users from this channel ? [y/n]\t') if confirmation == 'y': response = requests.get( 'https://slack.com/api/conversations.members?channel={id}&token={token}' .format(id=chan_id, token=token)).json() if response['ok'] == False: error('Error: {error}'.format(error=response['error'])) for member in response['members']: res = requests.post( 'https://slack.com/api/users.info?user={member}&token={token}'. format(member=member, token=token)).json() if res['ok'] == False: error('Error: {error}'.format(error=res['error'])) else: user = res['user'] print(user['name']) else: error('Okay, you\'re not sure.')
def joins(self, friend): # friend will be class if self.health < 1: error() self.friends.append(friend) friend.friends.append(self)
def aput(data, sel, sub): # data, sel and sub must be arrays # check input dimensions if len(sel) == 0: return data.copy() if len(sel.ravel()) == 0: return data.copy() if sel.shape[1] > len(data.shape): raise error("number of dimensions of index array is higher than that of data array") asub = array(sub) if len(asub.shape) == len(data.shape) - sel.shape[1]: if asub.shape != data.shape[sel.shape[1] :]: raise error("shape of subarray does not match selected data") asub = resize(asub, [sel.shape[0]] + list(data.shape[sel.shape[1] :])) elif len(asub.shape) == len(data.shape) - sel.shape[1] + 1: if list(asub.shape) != [sel.shape[0]] + list(data.shape[sel.shape[1] :]): raise error("shape of subarray does not match selected data") # collapse and write data coffset = [int(product(data.shape[i : sel.shape[1]])) for i in range(1, 1 + sel.shape[1])] coffset[-1] = 1 csel = add.reduce(sel * array(coffset), 1) subsize = int(product(data.shape[sel.shape[1] :])) suboffset = resize(arange(subsize), [sel.shape[0], subsize]) csel = (transpose(resize(csel * subsize, [subsize, sel.shape[0]])) + suboffset).ravel() cdata = data.copy().ravel() csub = asub.ravel() put(cdata, csel, csub) return cdata.reshape(data.shape)
def loses(self, thing): # thing will be string if thing in self.possesions and self.health > 0: self.possesions.remove(thing) stat_index = items_list.index(thing) if stat_index < 5: self.damage -= stats_list[stat_index] else: error()
def parse(tokens): for statement in tokens: if not check_statement(statement): error() last = tokens[-1][0] if not isinstance(last, End): error()
def main(): parser = Parser() parser.get_input() a = Algorithm(parser.puzzle, parser.size) if a.is_already_solved(): success(Success.ALREADY_SOLVED) if Algorithm.is_solvable(parser.puzzle) is False: error(Error.ERROR_NON_SOLVABLE_PUZZLE) print_answer(a)
def symbol_search(symbol): i = len(stos_zła) - 1 while i >= 0: try: return stos_zła[i][symbol[1]] except: i -= 1 error(NameError, 'NO SACH SYMBOL at line {} column {}'.format(symbol[2], symbol[3]))
def load(self, path): self.path = os.path.realpath(path) self.mf = manifest(self.cfg) if not os.path.exists(path): raise error("Project not found at '" + path + "'") mf_xml_path = path + '/AndroidManifest.xml' if not os.path.exists(mf_xml_path): raise error("AndroidManifest.xml not found at " + path) self.mf.load(mf_xml_path) self.reload_project_properties()
def func_eval(function,values): global stos_zła _, (arguments, expressions), _, _ = function.token if not len(arguments) == len(values): error(TypeError,'zła ARYJSKOŚĆ') stos_zła.append({a.token[1]: evaluate(v) for a, v in zip(arguments, values)}) n = parse.Node(('','','','',)) for e in expressions: n.adopt(e) res = run(n) stos_zła.pop() return res
def validar_clase(tokens): pos = 0 while pos <= len(tokens): lex, token = tokens[pos] id = tokens[pos + 1][1] begin = tokens[pos + 2][0] print(lex, id, begin) if lex == 'class': if id == 'ID': if begin == 'begin': return "{lex} {id} {begin} \n\t".format(lex=lex, id=id, begin=':') error('Error en estructura de clase')
def load_release_settings(section, error = False): vc, v = _load_released_version_config() items = [] if v is not None: try: items = v.items(section) except Exception as e: if not isinstance(error, bool): error(e) elif error: raise error.general('Invalid config section: %s: %s: %s' % (vc, section, e)) return items
def tokeniseLine(line): line = line.strip().split(' ') if any(map(lambda z: not (z.isalnum() or '-' in z), line)): error() line = concat_doubles(line) line = list(map(lambda l: [C(l) for C in classes], line)) result = [] for l in line: for a in l: if a.valid: result.append(a) break return result
def load_release_setting(section, option, raw = False, error = False): vc, v = _load_released_version_config() value = None if v is not None: try: value = v.get(section, option, raw = raw) except Exception as e: if not isinstance(error, bool): error(e) elif error: raise error.general('Invalid config section: %s: %s: %s.%s' % (vc, section, option, e)) return value
def reload_project_properties(self): self.projectprops = [] proj_props = self.path + '/project.properties' if not os.path.exists(proj_props): raise error("project.properties not found at " + path) file = open(proj_props, 'r') line = file.readline() if len(line) <= 0: raise error("empty project.properties") while line.count("\n") > 0: line = line.replace("\n", "") matches = re.match("([^#=][^=]*)=([^=]+)", line) if matches: self.projectprops.append(line.strip()) line = file.readline() file.close()
def set_value(self, name, val): val = val.strip() if name == "Name": self.name = val elif name == "Type": self.Type = val elif name == "Vendor": self.vendor = val elif name == "Revision": self.revision = val elif name == "API level": self.api_level = val elif name == "Description": self.description = val matches = re.search("API level ([0-9]+)", val) if matches: self.api_level = matches.group(1) elif name == "Libraries": self.libraries = val elif name == "Skins": self.skins = [] self.skins_str = val sk = val.split(", ") for skin in sk: matches = re.search(' \(default\)', skin) if matches: self.def_skin = skin.replace(" (default)", "") self.skins.append(self.def_skin) else: self.skins.append(skin) elif name == "ABIs ": self.abis_str = val else: raise error("Unknown `target` paramerter name: '" + name + "'")
def aget(data, sel): # data and sel must be arrays # check input dimensions if (len(sel) == 0): return array([], dtype=data.dtype) if (len(sel.ravel()) == 0): return array([], dtype=data.dtype) if (sel.shape[1] > len(data.shape)): raise error( 'number of dimensions of index array is higher than that of data array' ) # collapse data along sel axes cdata_len = int(product(data.shape[0:sel.shape[1]])) cdata_shape = [cdata_len] + list(data.shape[sel.shape[1]:]) cdata = data.reshape(cdata_shape) coffset = [ int(product(data.shape[i:sel.shape[1]])) for i in range(1, 1 + sel.shape[1]) ] coffset[-1] = 1 csel = add.reduce(sel * array(coffset), 1) return take(cdata, csel, axis=0)
def run_debug_activity(self, device): activity = self.mf.get_launcher_activity() if not activity: raise error(self.path + ' is not a launcher application') package = self.mf.get_package() runnable = package + '/' + package + '.' + activity return device.run_debug_activity(runnable)
def authenticate(self, os_version=None, local_ip=None): from sensorcloud import UserAgent #determine protocol from the auth server if self._authServer.startswith("http://"): PROTOCOL = "http://" else: PROTOCOL = "https://" url = self._authServer + "/SensorCloud/devices/" + self._deviceId + "/authenticate/" request = self._requests.url(url) if os_version: request.param("os_version", os_version) if local_ip: request.param("local_ip", local_ip) request = request.param("version", "1")\ .param("key", self._deviceKey)\ .accept("application/xdr")\ .header("User-Agent", UserAgent)\ .get() #check the response code for success if request.status_code != httplib.OK: response = SensorCloudRequests.Request(request) raise error(response, "authenticating") #Extract the authentication token and server from the response unpacker = xdrlib.Unpacker(request.raw) self._authToken = unpacker.unpack_string() self._apiServer = PROTOCOL + unpacker.unpack_string() if self._cache: self._cache.token = self._authToken self._cache.server = self._apiServer
def _get_histogram_info(self): """ get a histogram start and end from SensorCloud""" response = self.url("/streams/histogram/")\ .param("version", "1")\ .accept("application/xdr")\ .get() #if the channel doesn't have timeseries data then we'll get a 404-003 error if response.status_code == httplib.NOT_FOUND: error = json.loads(response.text) if error.get("errorcode") == "404-010": return None #if we don't get a 200 ok then we had an error if response.status_code != httplib.OK: raise error(response, "get histogram info") #packer for the units xdr format unpacker = xdrlib.Unpacker(response.raw) datastructure_version = unpacker.unpack_int() assert datastructure_version == 1, "structure version should always be 1" start_nano = unpacker.unpack_uhyper() end_nano = unpacker.unpack_uhyper() s = HistogramStreamInfo(start_time=start_nano, end_time=end_nano) return s
def upload_csv_file(table_name): try: print request.files f = request.files['file'] file_path = os.path.join( app.config['UPLOAD_FOLDER'], strftime("file_%Y%m%d_%H_%M_%S_", gmtime()) + f.filename) f.save(file_path) with get_connect(current_identity.dbstring) as c: with open(file_path, 'rb') as csvfile: reader = csv.reader(csvfile, skipinitialspace=True) headers = reader.next() if not all(headers): raise Exception("Invalid CSV File!") headers_str = ','.join(headers) vheaders_str = ','.join(["%s" for i in headers]) base_sql = "INSERT INTO {} ({}) VALUES ({})".format( table_name, headers_str, vheaders_str) params = [] for i in reader: if len(i) != len(headers): raise Exception("Invalid CSV File!") params.append(tuple(i)) if len(params) > 9999: r = c.execute(base_sql, params) params = [] if len(params) > 0: r = c.execute(base_sql, params) return jsonify(success()) except Exception as e: print e traceback.print_exc() return jsonify(error("SQL_ERROR", remark=str(e)))
def _retrieve_timeseries_partitions(self): def unpackPartition(unpacker): partition = {} partition['start_time'] = unpacker.unpack_uhyper() partition['end_time'] = unpacker.unpack_uhyper() unpacker.unpack_int() unpacker.unpack_int() sampleRateType = unpacker.unpack_uint() partition['sample_rate'] = SampleRate.hertz(unpacker.unpack_uint()) if sampleRateType == 1 else \ SampleRate.seconds(unpacker.unpack_uint()) for _ in range(unpacker.unpack_uint()): popSize = unpacker.unpack_uint() * 12 unpacker.unpack_fopaque(popSize) return timeseries.descriptor(partition['sample_rate']), partition response = self.url("/streams/timeseries/partitions/") \ .param("version", "1") \ .accept("application/xdr") \ .get() if response.status_code != httplib.OK: raise error(response, "get timeseries partitions") partitions = [] unpacker = xdrlib.Unpacker(response.raw) unpacker.unpack_int() return dict( [unpackPartition(unpacker) for _ in range(unpacker.unpack_uint())])
def _histogram_submit_blob(self, sampleRate, bin_start, bin_size, num_bins, blob): hist_size = 8 + (4 * num_bins) hist_count = len(blob) / hist_size packer = xdrlib.Packer() VERSION = 1 packer.pack_int(VERSION) packer.pack_fopaque(8, sampleRate.to_xdr()) #pack histogram info packer.pack_float(bin_start) packer.pack_float(bin_size) packer.pack_uint(num_bins) #Writing an array in XDR. an array is always prefixed by the array length packer.pack_int(hist_count) data = packer.get_buffer() + blob response = self.url("/streams/histogram/data/")\ .param("version", "1")\ .content_type("application/xdr")\ .data(data)\ .post() # if response is 201 created then we know the data was successfully added if response.status_code != httplib.CREATED: raise error(response, "histogram upload")
def add_sensor(self, sensor_name, sensor_type="", sensor_label="", sensor_desc=""): """ Add a sensor to the device. type, label, and description are optional. """ logger.debug("add_sensor(sensor_name='%s', sensor_type='%s', sensor_label='%s', sensor_desc='%s')", sensor_name, sensor_type, sensor_label, sensor_desc) #addSensor allows you to set the sensor type label and description. All fileds are strings. #we need to pack these strings into an xdr structure packer = xdrlib.Packer() packer.pack_int(1) #version 1 packer.pack_string(sensor_type) packer.pack_string(sensor_label) packer.pack_string(sensor_desc) data = packer.get_buffer() response = self.url("/sensors/%s/"%sensor_name)\ .param("version", "1")\ .data(data)\ .content_type("application/xdr").put() #if response is 201 created then we know the sensor was added if response.status_code != httplib.CREATED: raise error(response, "add sensor") return self.sensor(sensor_name)
def _retrieve_histogram_partitions(self): def unpackPartition(unpacker): partition = {} partition['start_time'] = unpacker.unpack_uhyper() partition['end_time'] = unpacker.unpack_uhyper() unpacker.unpack_int() unpacker.unpack_int() sampleRateType = unpacker.unpack_uint() partition['sample_rate'] = SampleRate.hertz(unpacker.unpack_uint()) if sampleRateType == 1 else \ SampleRate.seconds(unpacker.unpack_uint()) partition['num_bins'] = unpacker.unpack_uint() partition['bin_start'] = unpacker.unpack_float() partition['bin_size'] = unpacker.unpack_float() return histogram.descriptor(partition['sample_rate'], partition['bin_start'], partition['bin_size'], partition['num_bins']), partition response = self.url("/streams/histogram/partitions/") \ .param("version", "1") \ .accept("application/xdr") \ .get() if response.status_code != httplib.OK: raise error(response, "get histogram partitions") unpacker = xdrlib.Unpacker(response.raw) unpacker.unpack_int() # version return dict( [unpackPartition(unpacker) for _ in range(unpacker.unpack_uint())])
def get_token(command : str) -> Union[token, error]: #operators if command in operator_tokens: return operator_token(operator_tokens[command]) #compare elif command in compare_tokens: return compare_token(compare_tokens[command]) #storage elif command in storage_tokens: return storage_token(storage_tokens[command]) #commands elif command in command_tokens: return command_token(command_tokens[command]) #flow elif command in flow_tokens: return flow_token(flow_tokens[command]) #value elif all(map(is_number, command)): return value_token('VALUE', int(command)) #label definition elif command[-1] == ':' and (not any(map(is_number, command))): return label_token(command[:-1]) #label elif (not any(map(str.isdigit, command))): return label_token(command) #error return error(error_symbols.Invalid_token, command + ' is an invalid command')
def api_run_simple_sql(sql, max_rows=50, return_dict=True): try: with get_connect(current_identity.dbstring) as c: r = c.execute(sql) if r.returns_rows: ks = r.keys() cnt = 0 rst = [] if return_dict: for i in r: obj = {} for idx in xrange(len(ks)): obj[ks[idx]] = i[idx] rst.append(obj) cnt = cnt + 1 if cnt > max_rows: break else: for i in r: rst.append(list(i)) cnt = cnt + 1 if cnt > max_rows: break return success(header=ks, rows=rst) else: return success() except Exception as e: print e traceback.print_exc() return error("SQL_ERROR", remark=str(e))
def get_target_id(self): for line in self.projectprops: matches = re.search("target\=(.*)", line) if matches: target_id = matches.group(1).strip() return target_id raise error("Broken Project: Target Id for project " + \ self.path + " is not defined")
def apply_eval(node): if node.kids[0].token[0] == '(': node.kids[0] = evaluate(node.kids[0]) if node.kids[0].token[0] == 'func': return func_eval(node.kids[0], node.kids[1:]) try: s = macroos[node.kids[0].token[1]] args = node.kids[1:] except: s = symbol_search(node.kids[0].token) args = [evaluate(e) for e in node.kids[1:]] try: return s(*args) except TypeError: error(TypeError, '{}: experiencing unexpected error, please try again'.format(node.kids))
def declaration_check(listy): is_declared=["" for line in listy] for line in listy: if isinstance(line[0], Variable) and isinstance(line[1], ISA): if line[0].label in is_declared: error() else: is_declared[listy.index(line)]=line[0].label for i,line in enumerate(listy): b=i+1 for t in line: if isinstance(t, Variable): if t.label in is_declared[:b]: pass else: error()
def apt_delete_crowd_sql(sql_ids): try: with get_crowd_connect() as c: r = remove_crowd_sqls(c, sql_ids) return success(sql_ids=r.lastrowid) except Exception as e: print e traceback.print_exc() return error("SQL_ERROR", remark=str(e))
def trapezium_rule(function, a, b, n): """ This function applies the trapezium rule to a function ∫_a^b y dx = 1/2 * h[(y_0+y_n) + 2(Σ_(i=1)^(n-1) y_i)] h = (b - a) / n :param function: function :param a: int, float, Decimal :param b: int, float, Decimal :param n: int :return: Decimal """ if n == 0: error('n cannot be 0') raise DivisionByZero # If a > b then calculate -∫_b^a y dx invert = False if a > b: a, b = b, a invert = True h = Decimal(b - a) / n y_values = [] for i in range(0, n): y = function(a + i * h) y_values.append(y) y_0, y_n = 0, 0 try: y_0 = y_values.pop(0) except IndexError: pass try: y_n = y_values.pop(-1) except IndexError: pass integral = y_0 + y_n integral += 2 * sum(y_values) integral *= Decimal(1 / 2) integral *= h if invert: return -integral else: return integral
def writes_story(self): if self.health < 1: error() list_of_places = [ "Iron Hills", "Mirkwood", "Misty Mountains", "Gondor", "Mordor", "Mount Doom", "Eriador", "Erebor", "Fangorn", "Helms Deep", "Isengard", "Kazad-dum", "Rivendell", "the Shire", "Arnor", "Weathertop", "Emyn Muil", "Minas Tirith", "Rohan", "Morannon", "Grey Havens", "Morthond Vale", "Ringlo Vale", "Bruinen", "Andvin", "Erid Luin" ] alphabet = list(map(chr, range(97, 123))) lst = [] for loc in self.journey: if loc in list_of_places: lst.append(alphabet[list_of_places.index(loc)]) else: lst.append(loc) sentence = ''.join(lst) print(sentence)
def aatan2(y, x): if shape(x) != shape(y): raise error("x and y have different shapes") if len(shape(x)) == 0: z = atan2(y, x) else: xx = x.ravel() yy = y.ravel() zz = array([atan2(yy[i], xx[i]) for i in range(len(xx))], dtype=x.dtype) z = zz.reshape(x.shape) return z
def api_register(username, password): dbname = username + "_db" try: db_str = create_user_env(username, password, dbname) user = User(username=username, password=password_hash(password), dbname=dbname, dbstring=db_str) if not db_str: return error('USER_EXISTS') db.session.add(user) db.session.commit() with get_connect(db_str) as user_conn: with get_crowd_connect() as crowd_conn: create_sample(user_conn, crowd_conn, user.id, user.dbname) return success(user=user.to_json()) except Exception as e: print e traceback.print_exc() return error('USER_EXISTS')
def aatan2(y, x): if (shape(x) != shape(y)): raise error('x and y have different shapes') if (len(shape(x)) == 0): z = atan2(y, x) else: xx = x.ravel() yy = y.ravel() zz = array([atan2(yy[i], xx[i]) for i in range(len(xx))], dtype=x.dtype) z = zz.reshape(x.shape) return z
def __contains__(self, sensor_name): """ check if a sensor exits for this device on SensorCloud """ response = self.url("/sensors/%s/" % sensor_name)\ .param("version", "1")\ .accept("application/xdr").get() if response.status_code == httplib.OK: return True if response.status_code == httplib.NOT_FOUND: return False raise error(response, "has sensor")
def __contains__(self, channel_name): """ check if a channel exits for this device on SensorCloud """ response = self.url("/channels/%s/attributes/"%channel_name)\ .param("version", "1")\ .accept("application/xdr").get() if response.status_code == httplib.OK: return True if response.status_code == httplib.NOT_FOUND: return False raise error(response, "channel contains")
def __contains__(self, channel_name): """ check if a channel exits for this device on SensorCloud """ response = self.url("/channels/%s/attributes/"%channel_name)\ .param("version", "1")\ .accept("application/xdr").get() if response.status_code == httplib.OK: return True if response.status_code == httplib.NOT_FOUND: return False raise error(response, "channel contains")
def set_value(self, name, val): val = val.strip() if name == "Path": self.path = val elif name == "Target": self.target_str = val self.target_id = re.sub("[\x20]{2,}.*", "", val) elif name == "ABI": self.abi = val elif name == "Skin": self.skin = val elif name == "Sdcard": self.sdcard = val else: raise error("Unknown `virtual` paramerter name: '" + name + "'")
def _downloadData(self, start, end): start = int(start) end = int(end) #url: /sensors/<sensor_name>/channels/<channel_name>/streams/timeseries/data/ # params: # start (required) # end (required) # showSampleRateBoundary (oiptional) # samplerate (oiptional) response = self._channel.url_without_create("/streams/timeseries/data/")\ .param("version", "1")\ .param("starttime", start)\ .param("endtime", end)\ .accept("application/xdr")\ .get() # check the response code for success if response.status_code == httplib.NOT_FOUND: #404 is an empty list return [] elif response.status_code != httplib.OK: #all other errors are exceptions raise error(response, "download timeseris data") #packer for the units xdr format xdrdata = xdrlib.Unpacker(response.raw) datapoints = [] try: # timeseries/data always returns a relativly small chunk of data less than 50,000 points so we can proccess it all at once. We won't be given an infinite stream # however a future enhancement could be to stat proccessing this stream as soon as we have any bytes avialable to the user, so they could be # iterating over the data while it is still being downloaded. while True: timestamp = xdrdata.unpack_uhyper() value = self._convert(xdrdata.unpack_float(), timestamp) datapoints.append(Point(timestamp, value)) except EOFError: pass return datapoints
def aget(data, sel): # data and sel must be arrays # check input dimensions if len(sel) == 0: return array([], dtype=data.dtype) if len(sel.ravel()) == 0: return array([], dtype=data.dtype) if sel.shape[1] > len(data.shape): raise error("number of dimensions of index array is higher than that of data array") # collapse data along sel axes cdata_len = int(product(data.shape[0 : sel.shape[1]])) cdata_shape = [cdata_len] + list(data.shape[sel.shape[1] :]) cdata = data.reshape(cdata_shape) coffset = [int(product(data.shape[i : sel.shape[1]])) for i in range(1, 1 + sel.shape[1])] coffset[-1] = 1 csel = add.reduce(sel * array(coffset), 1) return take(cdata, csel, axis=0)
def add_channel(self, channel_name, channel_label="", channel_desc=""): """ Add a channel to the sensor. label and description are optional. """ packer = xdrlib.Packer() VERSION = 1 packer.pack_int(VERSION) #version 1 packer.pack_string(channel_label) packer.pack_string(channel_desc) response = self.url("/channels/%s/"%channel_name)\ .param("version", "1")\ .data(packer.get_buffer())\ .content_type("application/xdr").put() #if response is 201 created then we know the sensor was added if response.status_code != httplib.CREATED: raise error(response, "add channel") return self.channel(channel_name)
#!/usr/bin/python import error l=[] t=5 t_upla_intervalos=(100,1000,10000,100000,1000000) t_upla_umbrales=(0.1,0.01,0.0001,0.00001,0,000001) for i in t_upla_intervalos: for j in t_upla_umbrales: error(i,t,j,)
from Tkinter import * import tkMessageBox import requests import json from light import * from register import * from webServer import * from camera import * from alarm import * from blinds import * from gui import * from error import * import urllib2 web=webServer() err = error() #Check network connection print 'err.testNetwork: ' print err.testNetwork() # err.setNoNetworkError() homepath = '/home/pi/' print homepath print os.path.exists(homepath + '/autopi.config') if not os.path.exists(homepath + '/autopi.config'): print 'no user info' root = Tk() root.wm_title('AutoPi Login') app = registerGUI(root,web) root.mainloop()
#!/usr/bin/python import error l=[] t_upla_intervalos=(10,100,1000,10000,100000) t_upla_umbrales=(0.1,0.01,0.001,0.0001,0.00001) for i in t_upla_intervalos: for j in t_upla_umbrales: error(i,j,5)