def get_model_accuracy(self, data_variables, validate_data): #TO-DO labels = validate_data.iloc[:, -1].values predictions = [] for i, row in enumerate(validate_data.values): pred = self.classify_forest(data_variables, row) predictions.append(pred) print("Y_pred= {}; Y= {}".format(pred, labels[i])) predictions = np.array(predictions) if len(labels) == len(predictions): if isinstance(predictions[0], str) and isinstance(labels[0], str): acc = [1 if x == y else 0 for x, y in zip(labels, predictions)] acc_tot = sum(acc) / len(labels) * 100 print("Accuratezza modello del {}".format(acc_tot)) if is_numeric(predictions) and is_numeric(labels[0]): E_SS = np.sum((predictions - labels.mean())**2) T_SS = np.sum((labels - labels.mean())**2) print("Accuratezza modello del {}".format(E_SS / T_SS)) return predictions
def get_animalcontrol_find_simple(dbo, query = "", limit = 0): """ Returns rows for simple animal control searches. query: The search criteria """ ors = [] query = query.replace("'", "`") def add(field): return utils.where_text_filter(dbo, field, query) # If no query has been given, show open animal control records # from the last 30 days if query == "": ors.append("ac.IncidentDateTime > %s AND ac.CompletedDate Is Null" % db.dd(subtract_days(now(dbo.timezone), 30))) else: if utils.is_numeric(query): ors.append("ac.ID = " + str(utils.cint(query))) ors.append(add("co.OwnerName")) ors.append(add("ti.IncidentName")) ors.append(add("ac.DispatchAddress")) ors.append(add("ac.DispatchPostcode")) ors.append(add("o1.OwnerName")) ors.append(add("o2.OwnerName")) ors.append(add("o3.OwnerName")) ors.append(add("vo.OwnerName")) ors.append(u"EXISTS(SELECT ad.Value FROM additional ad " \ "INNER JOIN additionalfield af ON af.ID = ad.AdditionalFieldID AND af.Searchable = 1 " \ "WHERE ad.LinkID=ac.ID AND ad.LinkType IN (%s) AND LOWER(ad.Value) LIKE '%%%s%%')" % (additional.INCIDENT_IN, query.lower())) if not dbo.is_large_db: ors.append(add("ac.CallNotes")) ors.append(add("ac.AnimalDescription")) sql = get_animalcontrol_query(dbo) + " WHERE " + " OR ".join(ors) if limit > 0: sql += " LIMIT " + str(limit) return db.query(dbo, sql)
def is_valid(self): """ Validate the data entered in the object The predefined conditions examine whether the data is appropriate for populating a database model focused on relative sugar content """ conditions = (# Certain fields should not be empty self._soup is not None, self.mass_sugar is not None, self.mass_total is not None, self.product is not None, # Numeric data should be numeric utils.is_numeric(self.mass_sugar), utils.is_numeric(self.mass_total)) return all(conditions)
def poll(self, filter_modeler_types=None): """ Get metrics from the http server's /jmx page, and transform them into normalized tupes @return: array of tuples ([u'Context', u'Array'], u'metricName', value) """ json_arr = self.request().get('beans', []) kept = [] for bean in json_arr: if (not bean['name']) or (not "name=" in bean['name']): continue if (filter_modeler_types is not None) and ( not self.filter_modeler_type(filter_modeler_types, bean["modelerType"])): continue # split the name string context = bean['name'].split("name=")[1].split(",sub=") # Create a set that keeps the first occurrence context = OrderedDict.fromkeys(context).keys() # lower case and replace spaces. context = [c.lower().replace(" ", "_") for c in context] # don't want to include the service or daemon twice context = [c for c in context if c != self.service and c != self.daemon] for key, value in bean.iteritems(): if key in EXCLUDED_KEYS: continue if not is_numeric(value): continue kept.append((context, key, value)) return kept
def get_foundanimal_find_simple(dbo, query = "", limit = 0): """ Returns rows for simple found animal searches. query: The search criteria """ ors = [] query = query.replace("'", "`") def add(field): return utils.where_text_filter(dbo, field, query) # If no query has been given, show unreturned found animal records # for the last 30 days if query == "": ors.append("a.DateFound > %s AND a.ReturnToOwnerDate Is Null" % db.dd(subtract_days(now(dbo.timezone), 30))) else: if utils.is_numeric(query): ors.append("a.ID = " + str(utils.cint(query))) ors.append(add("o.OwnerName")) ors.append(add("a.AreaFound")) ors.append(add("a.AreaPostcode")) ors.append(u"EXISTS(SELECT ad.Value FROM additional ad " \ "INNER JOIN additionalfield af ON af.ID = ad.AdditionalFieldID AND af.Searchable = 1 " \ "WHERE ad.LinkID=a.ID AND ad.LinkType IN (%s) AND LOWER(ad.Value) LIKE '%%%s%%')" % (additional.FOUNDANIMAL_IN, query.lower())) if not dbo.is_large_db: ors.append(add("x.Sex")) ors.append(add("b.BreedName")) ors.append(add("c.BaseColour")) ors.append(add("s.SpeciesName")) ors.append(add("a.AgeGroup")) ors.append(add("a.DistFeat")) ors.append(add("a.Comments")) sql = get_foundanimal_query(dbo) + " WHERE " + " OR ".join(ors) if limit > 0: sql += " LIMIT " + str(limit) return db.query(dbo, sql)
def __init__(self, x, y, max_features=None, max_depth=None, min_samples=None): """ Constructor for classification / regression Decision Tree :param x: (DataFrame) Training feature data :param y: Training dependent variable data :param max_features: (int) Number of features to consider per split :param max_depth: (int) maximum number of subbranches :param min_samples: (int) Minimum number of samples per branch / leaf node """ self.features = x.columns self.data = x self.feature_comparators = self.data.apply( lambda feature: get_comparator(feature)) self.data['dependent'] = y self.is_classifier = not (utils.is_numeric(self.data.dependent, False)) self.max_features = max_features self.max_depth = max_depth or (self.max_features and (2 * self.max_features)) self.min_samples = min_samples or max(1, round(0.001 * len(self.data))) self.tree_dict = {} self.pruned = False
def get_lostanimal_find_simple(dbo, query="", limit=0, onlyindexed=False): """ Returns rows for simple lost animal searches. query: The search criteria """ ors = [] query = query.replace("'", "`") def add(field): return utils.where_text_filter(dbo, field, query) # If no query has been given, show unfound lost animal records # for the last 30 days if query == "": ors.append("a.DateLost > %s AND a.DateFound Is Null" % db.dd(subtract_days(now(dbo.timezone), 30))) else: if utils.is_numeric(query): ors.append("a.ID = " + str(utils.cint(query))) ors.append(add("o.OwnerName")) ors.append(add("a.AreaLost")) ors.append(add("a.AreaPostcode")) if not onlyindexed: ors.append(add("x.Sex")) ors.append(add("b.BreedName")) ors.append(add("c.BaseColour")) ors.append(add("s.SpeciesName")) ors.append(add("a.AgeGroup")) ors.append(add("a.DistFeat")) ors.append(add("a.Comments")) sql = get_lostanimal_query(dbo) + " WHERE " + " OR ".join(ors) if limit > 0: sql += " LIMIT " + str(limit) return db.query(dbo, sql)
def get_animalcontrol_find_simple(dbo, query="", limit=0, onlyindexed=False): """ Returns rows for simple animal control searches. query: The search criteria """ ors = [] query = query.replace("'", "`") def add(field): return utils.where_text_filter(dbo, field, query) # If no query has been given, show open animal control records # from the last 30 days if query == "": ors.append("ac.IncidentDateTime > %s AND ac.CompletedDate Is Null" % db.dd(subtract_days(now(dbo.timezone), 30))) else: if utils.is_numeric(query): ors.append("ac.ID = " + str(utils.cint(query))) ors.append(add("co.OwnerName")) ors.append(add("ti.IncidentName")) ors.append(add("ac.DispatchAddress")) ors.append(add("ac.DispatchPostcode")) ors.append(add("o1.OwnerName")) ors.append(add("o2.OwnerName")) ors.append(add("o3.OwnerName")) ors.append(add("vo.OwnerName")) if not onlyindexed: ors.append(add("ac.CallNotes")) ors.append(add("ac.AnimalDescription")) sql = get_animalcontrol_query(dbo) + " WHERE " + " OR ".join(ors) if limit > 0: sql += " LIMIT " + str(limit) return db.query(dbo, sql)
def update_diary_from_form(dbo, username, data): """ Updates a diary note from form data """ l = dbo.locale diaryid = utils.df_ki(data, "diaryid") if utils.df_ks(data, "diarydate") == "": raise utils.ASMValidationError(i18n._("Diary date cannot be blank", l)) if utils.df_kd(data, "diarydate", l) is None: raise utils.ASMValidationError(i18n._("Diary date is not valid", l)) if utils.df_ks(data, "subject") == "": raise utils.ASMValidationError(i18n._("Diary subject cannot be blank", l)) if utils.df_ks(data, "note") == "": raise utils.ASMValidationError(i18n._("Diary note cannot be blank", l)) diarytime = utils.df_ks(data, "diarytime").strip() if diarytime != "": if diarytime.find(":") == -1: raise utils.ASMValidationError(i18n._("Invalid time, times should be in HH:MM format", l)) if not utils.is_numeric(diarytime.replace(":", "")): raise utils.ASMValidationError(i18n._("Invalid time, times should be in HH:MM format", l)) sql = db.make_update_user_sql(dbo, "diary", username, "ID=%d" % diaryid, ( ( "DiaryDateTime", utils.df_dt(data, "diarydate", "diarytime", l) ), ( "DiaryForName", utils.df_t(data, "diaryfor") ), ( "Subject", utils.df_t(data, "subject") ), ( "Note", utils.df_t(data, "note") ), ( "Comments", utils.df_t(data, "comments") ), ( "DateCompleted", utils.df_d(data, "completed", l) ) )) preaudit = db.query(dbo, "SELECT * FROM diary WHERE ID=%d" % diaryid) db.execute(dbo, sql) postaudit = db.query(dbo, "SELECT * FROM diary WHERE ID=%d" % diaryid) audit.edit(dbo, username, "diary", audit.map_diff(preaudit, postaudit))
def rps_game(): elements = {'rock': 'scissor', 'paper': 'rock', 'scissor': 'paper'} while True: clear_term() pc_choice = get_item_by_index(elements, int(random() * 3)) print("""Which one? 1. rock 2. paper 3. scissor (sth else = quit)""") your_choice = input('Enter the number of your choice: ') if not is_numeric(your_choice): break your_choice = get_item_by_index(elements, int(your_choice) - 1) if your_choice[0] == 'quit': break if your_choice[0] == pc_choice[0]: print(f'Equal choices: {your_choice[0]}') elif your_choice[1] == pc_choice[0]: print(f'You won!!! -> {your_choice[0]} vs {pc_choice[0]}') elif pc_choice[1] == your_choice[0]: print(f'You lost... -> {your_choice[0]} vs {pc_choice[0]}') ans = input('Play again? (type "yes", otherwise anything else...): ') if not ans.lower() == 'yes': break
def update_diary_from_form(dbo, username, post): """ Updates a diary note from form data """ l = dbo.locale diaryid = post.integer("diaryid") if post["diarydate"] == "": raise utils.ASMValidationError(i18n._("Diary date cannot be blank", l)) if post.date("diarydate") is None: raise utils.ASMValidationError(i18n._("Diary date is not valid", l)) if post["subject"] == "": raise utils.ASMValidationError( i18n._("Diary subject cannot be blank", l)) if post["note"] == "": raise utils.ASMValidationError(i18n._("Diary note cannot be blank", l)) diarytime = post["diarytime"].strip() if diarytime != "": if diarytime.find(":") == -1: raise utils.ASMValidationError( i18n._("Invalid time, times should be in HH:MM format", l)) if not utils.is_numeric(diarytime.replace(":", "")): raise utils.ASMValidationError( i18n._("Invalid time, times should be in HH:MM format", l)) sql = db.make_update_user_sql( dbo, "diary", username, "ID=%d" % diaryid, (("DiaryDateTime", post.db_datetime("diarydate", "diarytime")), ("DiaryForName", post.db_string("diaryfor")), ("Subject", post.db_string("subject")), ("Note", post.db_string("note")), ("DateCompleted", post.db_date("completed")))) preaudit = db.query(dbo, "SELECT * FROM diary WHERE ID=%d" % diaryid) db.execute(dbo, sql) postaudit = db.query(dbo, "SELECT * FROM diary WHERE ID=%d" % diaryid) audit.edit(dbo, username, "diary", audit.map_diff(preaudit, postaudit))
def get_waitinglist_find_simple(dbo, query="", limit=0, siteid=0): """ Returns rows for simple waiting list searches. query: The search criteria """ ss = utils.SimpleSearchBuilder(dbo, query) sitefilter = "" if siteid != 0: sitefilter = " AND (o.SiteID = 0 OR o.SiteID = %d)" % siteid # If no query has been given, do a current waitinglist search if query == "": return get_waitinglist(dbo) if utils.is_numeric(query): ss.add_field_value("a.ID", utils.cint(query)) ss.add_field("o.OwnerName") ss.add_clause("EXISTS(SELECT ad.Value FROM additional ad " \ "INNER JOIN additionalfield af ON af.ID = ad.AdditionalFieldID AND af.Searchable = 1 " \ "WHERE ad.LinkID=a.ID AND ad.LinkType IN (%s) AND LOWER(ad.Value) LIKE ?)" % additional.WAITINGLIST_IN) ss.add_large_text_fields([ "a.AnimalDescription", "a.ReasonForWantingToPart", "a.ReasonForRemoval" ]) sql = "%s WHERE a.ID > 0 %s AND (%s) ORDER BY a.ID" % ( get_waitinglist_query(dbo), sitefilter, " OR ".join(ss.ors)) return dbo.query(sql, ss.values, limit=limit, distincton="ID")
def get_lostanimal_find_simple(dbo, query = "", limit = 0, onlyindexed = False): """ Returns rows for simple lost animal searches. query: The search criteria """ ors = [] query = query.replace("'", "`") def add(field): return utils.where_text_filter(dbo, field, query) # If no query has been given, show unfound lost animal records # for the last 30 days if query == "": ors.append("a.DateLost > %s AND a.DateFound Is Null" % db.dd(subtract_days(now(dbo.timezone), 30))) else: if utils.is_numeric(query): ors.append("a.ID = " + str(utils.cint(query))) ors.append(add("OwnerName")) ors.append(add("AreaLost")) ors.append(add("AreaPostcode")) if not onlyindexed: ors.append(add("SexName")) ors.append(add("BreedName")) ors.append(add("BaseColourName")) ors.append(add("SpeciesName")) ors.append(add("AgeGroup")) ors.append(add("DistFeat")) ors.append(add("Comments")) sql = get_lostanimal_query() + " WHERE " + " OR ".join(ors) if limit > 0: sql += " LIMIT " + str(limit) return db.query(dbo, sql)
def update_diary_from_form(dbo, username, post): """ Updates a diary note from form data """ l = dbo.locale if post["diarydate"] == "": raise utils.ASMValidationError(i18n._("Diary date cannot be blank", l)) if post.date("diarydate") is None: raise utils.ASMValidationError(i18n._("Diary date is not valid", l)) if post["subject"] == "": raise utils.ASMValidationError( i18n._("Diary subject cannot be blank", l)) if post["note"] == "": raise utils.ASMValidationError(i18n._("Diary note cannot be blank", l)) diarytime = post["diarytime"].strip() if diarytime != "": if diarytime.find(":") == -1: raise utils.ASMValidationError( i18n._("Invalid time, times should be in HH:MM format", l)) if not utils.is_numeric(diarytime.replace(":", "")): raise utils.ASMValidationError( i18n._("Invalid time, times should be in HH:MM format", l)) diaryid = post.integer("diaryid") dbo.update( "diary", diaryid, { "DiaryDateTime": post.datetime("diarydate", "diarytime"), "DiaryForName": post["diaryfor"], "Subject": post["subject"], "Note": post["note"], "Comments": post["comments"], "DateCompleted": post.date("completed") }, username)
def get_foundanimal_find_simple(dbo, query = "", limit = 0, siteid = 0): """ Returns rows for simple found animal searches. query: The search criteria """ ss = utils.SimpleSearchBuilder(dbo, query) sitefilter = "" if siteid != 0: sitefilter = " AND (o.SiteID = 0 OR o.SiteID = %d)" % siteid # If no query has been given, show unfound lost animal records # for the last 30 days if query == "": ss.ors.append("a.DateFound > ? AND a.ReturnToOwnerDate Is Null %s" % sitefilter) ss.values.append(dbo.today(offset=-30)) else: if utils.is_numeric(query): ss.add_field_value("a.ID", utils.cint(query)) ss.add_fields([ "o.OwnerName", "a.AreaFound", "a.AreaPostcode" ]) ss.add_clause("EXISTS(SELECT ad.Value FROM additional ad " \ "INNER JOIN additionalfield af ON af.ID = ad.AdditionalFieldID AND af.Searchable = 1 " \ "WHERE ad.LinkID=a.ID AND ad.LinkType IN (%s) AND LOWER(ad.Value) LIKE ?)" % additional.FOUNDANIMAL_IN) ss.add_large_text_fields([ "b.BreedName", "a.DistFeat", "a.Comments" ]) sql = "%s WHERE a.ID > 0 %s AND (%s)" % (get_foundanimal_query(dbo), sitefilter, " OR ".join(ss.ors)) return dbo.query(sql, ss.values, limit=limit, distincton="ID")
def __repr__(self): """To String for Question""" condition = "==" if is_numeric(self.value): condition = ">=" # TODO: how to get the header value??? # return f"{header[self.column]} {condition} {str(self.value)}" return f"{condition} {str(self.value)}"
def dereference(c): c = map(utils.tokenify, c) label_length = utils.log256(len(c)*4) iq = [x for x in c] mq = [] pos = 0 labelmap = {} beginning_stack = [0] while len(iq): front = iq.pop(0) if not utils.is_numeric(front.val) and front.val[0] == '~': labelmap[front.val[1:]] = pos - beginning_stack[-1] elif front.val == '#CODE_BEGIN': beginning_stack.append(pos) elif front.val == '#CODE_END': beginning_stack.pop() else: mq.append(front) if utils.is_numeric(front.val): pos += 1 + max(1, utils.log256(front.val)) elif front.val[:1] == '$': pos += label_length + 1 else: pos += 1 oq = [] for m in mq: oqplus = [] if utils.is_numberlike(m.val): m.val = utils.numberize(m.val) if utils.is_numeric(m.val): L = max(1, utils.log256(m.val)) oqplus.append('PUSH' + str(L)) oqplus.extend(utils.tobytearr(m.val, L)) elif m.val[:1] == '$': vals = m.val[1:].split('.') if len(vals) == 1: oqplus.append('PUSH'+str(label_length)) oqplus.extend(utils.tobytearr(labelmap[vals[0]], label_length)) else: oqplus.append('PUSH'+str(label_length)) value = labelmap[vals[1]] - labelmap[vals[0]] oqplus.extend(utils.tobytearr(value, label_length)) else: oqplus.append(m) oq.extend(map(lambda x: utils.tokenify(x, *m.metadata), oqplus)) return oq
def dereference(c): c = map(utils.tokenify, c) label_length = utils.log256(len(c) * 4) iq = [x for x in c] mq = [] pos = 0 labelmap = {} beginning_stack = [0] while len(iq): front = iq.pop(0) if not utils.is_numeric(front.val) and front.val[0] == '~': labelmap[front.val[1:]] = pos - beginning_stack[-1] elif front.val == '#CODE_BEGIN': beginning_stack.append(pos) elif front.val == '#CODE_END': beginning_stack.pop() else: mq.append(front) if utils.is_numeric(front.val): pos += 1 + max(1, utils.log256(front.val)) elif front.val[:1] == '$': pos += label_length + 1 else: pos += 1 oq = [] for m in mq: oqplus = [] if utils.is_numberlike(m.val): m.val = utils.numberize(m.val) if utils.is_numeric(m.val): L = max(1, utils.log256(m.val)) oqplus.append('PUSH' + str(L)) oqplus.extend(utils.tobytearr(m.val, L)) elif m.val[:1] == '$': vals = m.val[1:].split('.') if len(vals) == 1: oqplus.append('PUSH' + str(label_length)) oqplus.extend(utils.tobytearr(labelmap[vals[0]], label_length)) else: oqplus.append('PUSH' + str(label_length)) value = labelmap[vals[1]] - labelmap[vals[0]] oqplus.extend(utils.tobytearr(value, label_length)) else: oqplus.append(m) oq.extend(map(lambda x: utils.tokenify(x, *m.metadata), oqplus)) return oq
def plcNeutered(self, neutered): if utils.is_str(neutered): if neutered.find("payed") != -1 or neutered.find("eutered") != -1: return "y" elif utils.is_numeric(neutered): if neutered == 1: return "y" return "n" return "u"
def dereference(c): label_length = utils.log256(len(c)*4) iq = [x for x in c] mq = [] pos = 0 labelmap = {} beginning_stack = [0] while len(iq): front = iq.pop(0) if not utils.is_numeric(front) and front[0] == '~': labelmap[front[1:]] = pos - beginning_stack[-1] elif front == '#CODE_BEGIN': beginning_stack.append(pos) elif front == '#CODE_END': beginning_stack.pop() else: mq.append(front) if utils.is_numeric(front): pos += 1 + max(1, utils.log256(front)) elif front[:1] == '$': pos += label_length + 1 else: pos += 1 oq = [] for m in mq: oqplus = [] if utils.is_numeric(m): L = max(1, utils.log256(m)) oqplus.append('PUSH' + str(L)) oqplus.extend(utils.tobytearr(m, L)) elif m[:1] == '$': vals = m[1:].split('.') if len(vals) == 1: oqplus.append('PUSH'+str(label_length)) oqplus.extend(utils.tobytearr(labelmap[vals[0]], label_length)) else: oqplus.append('PUSH'+str(label_length)) value = labelmap[vals[1]] - labelmap[vals[0]] oqplus.extend(utils.tobytearr(value, label_length)) else: oqplus.append(m) oq.extend(oqplus) return oq
def get_comparator(var): """ Returns appropriate comparator depending on variable type (<= for numeric, == for categorical) :param var: Obesrvations of single variable :type var: Union[array, Series] :return: (builtin_function_or_method) Python operator """ if utils.is_numeric(var): return operator.le return operator.eq
def add_rain(image, slant=-1, drop_length=20, drop_width=1, drop_color=(200, 200, 200), rain_type='None'): ## (200,200,200) a shade of gray ut.verify_image(image) slant_extreme = slant if not (ut.is_numeric(slant_extreme) and (slant_extreme >= -20 and slant_extreme <= 20) or slant_extreme == -1): raise Exception(err_rain_slant) if not (ut.is_numeric(drop_width) and drop_width >= 1 and drop_width <= 5): raise Exception(err_rain_width) if not (ut.is_numeric(drop_length) and drop_length >= 0 and drop_length <= 100): raise Exception(err_rain_length) if (ut.is_list(image)): image_RGB = [] image_list = image imshape = image[0].shape if slant_extreme == -1: slant = np.random.randint( -10, 10) ##generate random slant if no slant value is given rain_drops, drop_length = generate_random_lines( imshape, slant, drop_length, rain_type) for img in image_list: output = rain_process(img, slant_extreme, drop_length, drop_color, drop_width, rain_drops) image_RGB.append(output) else: imshape = image.shape if slant_extreme == -1: slant = np.random.randint( -10, 10) ##generate random slant if no slant value is given rain_drops, drop_length = generate_random_lines( imshape, slant, drop_length, rain_type) output = rain_process(image, slant_extreme, drop_length, drop_color, drop_width, rain_drops) image_RGB = output return image_RGB
def numberize(arg): if utils.is_numeric(arg): return arg elif arg.upper() in reverse_opcodes: return reverse_opcodes[arg.upper()] elif arg[:4] == 'PUSH': return 95 + int(arg[4:]) elif re.match('^[0-9]*$', arg): return int(arg) else: raise Exception("Cannot serialize: " + str(arg), source)
def partition(self, df): true_df = None false_df = None index = None if is_numeric(self.critical_value): index = df[:, self.column] >= self.critical_value else: index = df[:, self.column] == self.critical_value true_df = df[index] false_df = df[~index] return true_df, false_df
def numberize(arg): if utils.is_numeric(arg): return arg elif arg in reverse_opcodes: return reverse_opcodes[arg] elif arg[:4] == 'PUSH': return 95 + int(arg[4:]) elif re.match('^[0-9]*$', arg): return int(arg) else: raise Exception("Cannot serialize: " + str(arg))
def enc(n): if utils.is_numeric(n): return ''.join(map(chr, utils.tobytearr(n, 32))) elif utils.is_string(n) and len(n) == 40: return '\x00' * 12 + n.decode('hex') elif utils.is_string(n): return '\x00' * (32 - len(n)) + n elif n is True: return 1 elif n is False or n is None: return 0
def involved_features_from_rules(rules: Iterable[str]) -> Set[str]: """Extract the features involved in the rules""" known_tokens = set(["<=", "&", ">"]) retval = set() for r in rules: tokens = r.split(" ") for t in tokens: if t in known_tokens or is_numeric(t): continue retval.add(t) return retval
def execute(self, serv, canal, handle, message): stats = OrderedDict() stats['day'] = {} stats['week'] = {} stats['month'] = {} stats['all'] = {} # Message title stats['day']['title'] = "Daily statistics" stats['week']['title'] = "Weekly statistics" stats['month']['title'] = "Monthly statistics" stats['all']['title'] = "All time statistics" # Mongo information stats['day']['mongo'] = self.day_collection.find({}) stats['week']['mongo'] = self.week_collection.find({}) stats['month']['mongo'] = self.month_collection.find({}) stats['all']['mongo'] = self.all_collection.find({}) # Array initialization for key, value in stats.items(): stats[key]['detailed'] = [] # Detailed information for key, value in stats.items(): for current_handle in value['mongo']: stats[key]['detailed'].append(dict(handle=current_handle['handle'], messages=int(current_handle['messages']))) value.pop('mongo', None) stats[key]['detailed'] = sorted(stats[key]['detailed'], key=lambda k: k['messages'], reverse=True) # Total count stats['day']['total'] = self.day_collection.aggregate([{"$group": {"_id": "null", "total": {"$sum": "$messages"}}}]) stats['day']['total'] = int(stats['day']['total']['result'][0]['total']) stats['week']['total'] = self.week_collection.aggregate([{"$group": {"_id": "null", "total": {"$sum": "$messages"}}}]) stats['week']['total'] = int(stats['week']['total']['result'][0]['total']) stats['month']['total'] = self.month_collection.aggregate([{"$group": {"_id": "null", "total": {"$sum": "$messages"}}}]) stats['month']['total'] = int(stats['month']['total']['result'][0]['total']) stats['all']['total'] = self.all_collection.aggregate([{"$group": {"_id": "null", "total": {"$sum": "$messages"}}}]) stats['all']['total'] = int(stats['all']['total']['result'][0]['total']) serv.notice(handle, "\00303\002Messages count statistics") for key, value in stats.items(): message_spell = "messages" if value['total'] > 1 else "message" serv.notice(handle, "\00302\002" + value['title']) serv.notice(handle, "\002Total :\017 " + str(value['total']) + " " + message_spell) if utils.is_numeric(message): for num in range(0, min(int(message), len(value['detailed']))): message_spell = "messages" if value['detailed'][num]['messages'] > 1 else "message" serv.notice(handle, "\002" + str(num + 1) + ". " + value['detailed'][num]['handle'] + ": \017" + str(value['detailed'][num]['messages']) + " " + message_spell) serv.notice(handle, "****************")
def __init__(self, covfunc, *args, **kwargs): self.covfunc = covfunc params = list(args) for i in range(len(args)): # Check constant parameters if utils.is_numeric(args[i]): params[i] = ef.NodeConstant([np.asanyarray(args[i])], dims=[np.shape(args[i])]) # TODO: Parameters could be constant functions? :) ef.Node.__init__(self, *params, dims=[(np.inf, np.inf)], **kwargs)
def divide_on_question(self, rows): '''left list contains the true valued rows for the given question. Assuming rows is a pandas Data Frame ''' if helper.is_numeric(rows, self.col_index): mask = rows.iloc[:, self.col_index] >= self.wedge else: mask = rows.iloc[:, self.col_index] == self.wedge left = rows[mask] right = rows[~mask] return left, right
def _determine_type(self): tokens = self.str.split(' ') if tokens[0] in operand_size_dict: self.str = tokens[1] self.size_specifier_str = tokens[0] if '[' in self.str: if ']' in self.str: self.type = OperandType.MEM else: raise InvalidInstructionException("Expected ']'") elif is_numeric(self.str): self.type = OperandType.CON elif self.str in register_dict: self.type = OperandType.REG else: raise InvalidInstructionException("Unknown operand: {}".format( self.str))
def get_waitinglist_find_simple(dbo, query="", limit=0, onlyindexed=False): """ Returns rows for simple waiting list searches. query: The search criteria """ # If no query has been given, do a current waitinglist search if query == "": return get_waitinglist(dbo) ors = [] add = lambda f: "LOWER(%s) LIKE '%%%s%%'" % (f, query.lower()) if utils.is_numeric(query): ors.append("a.ID = " + str(utils.cint(query))) ors.append(add("a.OwnerName")) ors.append(add("a.AnimalDescription")) if not onlyindexed: ors.append(add("a.ReasonForWantingToPart")) ors.append(add("a.ReasonForRemoval")) sql = get_waitinglist_query() + " WHERE " + " OR ".join(ors) if limit > 0: sql += " LIMIT " + str(limit) return db.query(dbo, sql)
def get_waitinglist_find_simple(dbo, query = "", limit = 0, onlyindexed = False): """ Returns rows for simple waiting list searches. query: The search criteria """ # If no query has been given, do a current waitinglist search if query == "": return get_waitinglist(dbo) ors = [] add = lambda f: "LOWER(%s) LIKE '%%%s%%'" % (f, query.lower()) if utils.is_numeric(query): ors.append("a.ID = " + str(utils.cint(query))) ors.append(add("a.OwnerName")) ors.append(add("a.AnimalDescription")) if not onlyindexed: ors.append(add("a.ReasonForWantingToPart")) ors.append(add("a.ReasonForRemoval")) sql = get_waitinglist_query() + " WHERE " + " OR ".join(ors) if limit > 0: sql += " LIMIT " + str(limit) return db.query(dbo, sql)
def insert_diary_from_form(dbo, username, linktypeid, linkid, post): """ Creates a diary note from the form data username: User creating the diary linktypeid, linkid: The link post: A PostedData object """ l = dbo.locale if post["diarydate"] == "": raise utils.ASMValidationError(i18n._("Diary date cannot be blank", l)) if post.date("diarydate") is None: raise utils.ASMValidationError(i18n._("Diary date is not valid", l)) if post["subject"] == "": raise utils.ASMValidationError( i18n._("Diary subject cannot be blank", l)) if post["note"] == "": raise utils.ASMValidationError(i18n._("Diary note cannot be blank", l)) diarytime = post["diarytime"].strip() if diarytime != "": if diarytime.find(":") == -1: raise utils.ASMValidationError( i18n._("Invalid time, times should be in HH:MM format", l)) if not utils.is_numeric(diarytime.replace(":", "")): raise utils.ASMValidationError( i18n._("Invalid time, times should be in HH:MM format", l)) linkinfo = get_link_info(dbo, linktypeid, linkid) return dbo.insert( "diary", { "LinkID": linkid, "LinkType": linktypeid, "LinkInfo": linkinfo, "DiaryDateTime": post.datetime("diarydate", "diarytime"), "DiaryForName": post["diaryfor"], "Subject": post["subject"], "Note": post["note"], "Comments": post["comments"], "DateCompleted": post.date("completed") }, username)
def insert_diary_from_form(dbo, username, linktypeid, linkid, post): """ Creates a diary note from the form data username: User creating the diary linktypeid, linkid: The link post: A PostedData object """ l = dbo.locale if post["diarydate"] == "": raise utils.ASMValidationError(i18n._("Diary date cannot be blank", l)) if post.date("diarydate") is None: raise utils.ASMValidationError(i18n._("Diary date is not valid", l)) if post["subject"] == "": raise utils.ASMValidationError( i18n._("Diary subject cannot be blank", l)) if post["note"] == "": raise utils.ASMValidationError(i18n._("Diary note cannot be blank", l)) diarytime = post["diarytime"].strip() if diarytime != "": if diarytime.find(":") == -1: raise utils.ASMValidationError( i18n._("Invalid time, times should be in HH:MM format", l)) if not utils.is_numeric(diarytime.replace(":", "")): raise utils.ASMValidationError( i18n._("Invalid time, times should be in HH:MM format", l)) linkinfo = get_link_info(dbo, linktypeid, linkid) diaryid = db.get_id(dbo, "diary") sql = db.make_insert_user_sql( dbo, "diary", username, (("ID", db.di(diaryid)), ("LinkID", db.di(linkid)), ("LinkType", db.di(linktypeid)), ("LinkInfo", db.ds(linkinfo)), ("DiaryDateTime", post.db_datetime("diarydate", "diarytime")), ("DiaryForName", post.db_string("diaryfor")), ("Subject", post.db_string("subject")), ("Note", post.db_string("note")), ("DateCompleted", post.db_date("completed")))) db.execute(dbo, sql) audit.create(dbo, username, "diary", str(diaryid)) return diaryid
def insert_diary_from_form(dbo, username, linktypeid, linkid, post): """ Creates a diary note from the form data username: User creating the diary linktypeid, linkid: The link post: A PostedData object """ l = dbo.locale if post["diarydate"] == "": raise utils.ASMValidationError(i18n._("Diary date cannot be blank", l)) if post.date("diarydate") is None: raise utils.ASMValidationError(i18n._("Diary date is not valid", l)) if post["subject"] == "": raise utils.ASMValidationError(i18n._("Diary subject cannot be blank", l)) if post["note"] == "": raise utils.ASMValidationError(i18n._("Diary note cannot be blank", l)) diarytime = post["diarytime"].strip() if diarytime != "": if diarytime.find(":") == -1: raise utils.ASMValidationError(i18n._("Invalid time, times should be in HH:MM format", l)) if not utils.is_numeric(diarytime.replace(":", "")): raise utils.ASMValidationError(i18n._("Invalid time, times should be in HH:MM format", l)) linkinfo = get_link_info(dbo, linktypeid, linkid) diaryid = db.get_id(dbo, "diary") sql = db.make_insert_user_sql(dbo, "diary", username, ( ( "ID", db.di(diaryid)), ( "LinkID", db.di(linkid) ), ( "LinkType", db.di(linktypeid) ), ( "LinkInfo", db.ds(linkinfo) ), ( "DiaryDateTime", post.db_datetime("diarydate", "diarytime")), ( "DiaryForName", post.db_string("diaryfor")), ( "Subject", post.db_string("subject")), ( "Note", post.db_string("note")), ( "Comments", post.db_string("comments")), ( "DateCompleted", post.db_date("completed")) )) db.execute(dbo, sql) audit.create(dbo, username, "diary", diaryid, audit.dump_row(dbo, "diary", diaryid)) return diaryid
def insert_diary_from_form(dbo, username, linktypeid, linkid, data): """ Creates a diary note from the form data username: User creating the diary linktypeid, linkid: The link data: The web.py form object """ l = dbo.locale if utils.df_ks(data, "diarydate") == "": raise utils.ASMValidationError(i18n._("Diary date cannot be blank", l)) if utils.df_kd(data, "diarydate", l) is None: raise utils.ASMValidationError(i18n._("Diary date is not valid", l)) if utils.df_ks(data, "subject") == "": raise utils.ASMValidationError(i18n._("Diary subject cannot be blank", l)) if utils.df_ks(data, "note") == "": raise utils.ASMValidationError(i18n._("Diary note cannot be blank", l)) diarytime = utils.df_ks(data, "diarytime").strip() if diarytime != "": if diarytime.find(":") == -1: raise utils.ASMValidationError(i18n._("Invalid time, times should be in HH:MM format", l)) if not utils.is_numeric(diarytime.replace(":", "")): raise utils.ASMValidationError(i18n._("Invalid time, times should be in HH:MM format", l)) linkinfo = get_link_info(dbo, linktypeid, linkid) diaryid = db.get_id(dbo, "diary") sql = db.make_insert_user_sql(dbo, "diary", username, ( ( "ID", db.di(diaryid)), ( "LinkID", db.di(linkid) ), ( "LinkType", db.di(linktypeid) ), ( "LinkInfo", db.ds(linkinfo) ), ( "DiaryDateTime", utils.df_dt(data, "diarydate", "diarytime", l) ), ( "DiaryForName", utils.df_t(data, "diaryfor") ), ( "Subject", utils.df_t(data, "subject") ), ( "Note", utils.df_t(data, "note") ), ( "DateCompleted", utils.df_d(data, "completed", l) ) )) db.execute(dbo, sql) audit.create(dbo, username, "diary", str(diaryid)) return diaryid
def get_animalcontrol_find_simple(dbo, query="", username="", limit=0, siteid=0): """ Returns rows for simple animal control searches. query: The search criteria """ ss = utils.SimpleSearchBuilder(dbo, query) sitefilter = "" if siteid != 0: sitefilter = " AND (ac.SiteID = 0 OR ac.SiteID = %d)" % siteid # If no query has been given, show open animal control records # from the last 30 days if query == "": ss.ors.append( "ac.IncidentDateTime > %s AND ac.CompletedDate Is Null %s" % (dbo.sql_date(dbo.today(offset=-30)), sitefilter)) else: if utils.is_numeric(query): ss.add_field_value("ac.ID", utils.cint(query)) ss.add_fields([ "co.OwnerName", "ti.IncidentName", "ac.DispatchAddress", "ac.DispatchPostcode", "o1.OwnerName", "o2.OwnerName", "o3.OwnerName", "vo.OwnerName" ]) ss.add_clause(u"EXISTS(SELECT ad.Value FROM additional ad " \ "INNER JOIN additionalfield af ON af.ID = ad.AdditionalFieldID AND af.Searchable = 1 " \ "WHERE ad.LinkID=ac.ID AND ad.LinkType IN (%s) AND LOWER(ad.Value) LIKE ?)" % (additional.INCIDENT_IN)) ss.add_large_text_fields(["ac.CallNotes", "ac.AnimalDescription"]) sql = "%s WHERE ac.ID > 0 %s AND (%s) ORDER BY ac.ID" % ( get_animalcontrol_query(dbo), sitefilter, " OR ".join(ss.ors)) return reduce_find_results( dbo, username, dbo.query(sql, ss.values, limit=limit, distincton="ID"))
def decint(n, signed=False): if isinstance(n, str): n = utils.to_string(n) if is_numeric(n): min, max = (-TT255, TT255 - 1) if signed else (0, TT256 - 1) if n > max or n < min: raise EncodingError("Number out of range: %r" % n) return n elif is_string(n): if len(n) == 40: n = decode_hex(n) if len(n) > 32: raise EncodingError("String too long: %r" % n) i = big_endian_to_int(n) return (i - TT256) if signed and i >= TT255 else i elif n is True: return 1 elif n is False or n is None: return 0 else: raise EncodingError("Cannot encode integer: %r" % n)
def decint(n, signed=False): if isinstance(n, str): n = utils.to_string(n) if is_numeric(n): min_, max_ = (-TT255, TT255 - 1) if signed else (0, TT256 - 1) if n > max_ or n < min_: raise EncodingError("Number out of range: %r" % n) return n elif is_string(n): if len(n) == 40: n = decode_hex(n) if len(n) > 32: raise EncodingError("String too long: %r" % n) i = big_endian_to_int(n) return (i - TT256) if signed and i >= TT255 else i elif n is True: return 1 elif n is False or n is None: return 0 else: raise EncodingError("Cannot encode integer: %r" % n)
def fit(self, x, y, store_losses=False, verbose=False): self.input_vals, self.target_vals = x, y if self.fit_intercept: self.input_vals = np.hstack((np.ones((len(x),1)), x)) self.hidden_layers[0].init_weights(self.input_vals.shape[1]) self.output_activation = self.output_activation or ((utils.is_numeric(y, int_as_numeric=False) and 'identity') or 'logistic') if hasattr(self, 'output_layer'): self.output_layer.init_weights(len(self.hidden_layers[-1].weights)) else: self.add_output_layer(y.shape[1], self.output_activation) self.loss_fn = self.loss_fn or self.set_loss_fn() if store_losses: self.losses = [] self._feed_forward(self.input_vals) loss = self._compute_loss() epochs, n_epochs_no_change, last_diff, n = 0, 0, None, self.input_vals.shape[0] while (n_epochs_no_change < self.n_epochs_no_change) and (epochs < self.max_epochs): self._shuffle_obs() i = 0 while i < n: end = min(i + self.samples_per_cycle, n) self._feed_forward(self.input_vals[i:end]) self._back_propagate(self.target_vals[i:end]) self._update_weights(self.input_vals[i:end]) i = end self._feed_forward(self.input_vals) next_loss = self._compute_loss() if abs(loss - next_loss) < self.tol: n_epochs_no_change += 1 loss = next_loss if store_losses: self.losses.append(loss) if verbose: print('epoch %d: loss = %f' % (epoch, loss)) epochs += 1 del self.input_vals, self.target_vals return self
def from_snapshot(cls, snapshot_data, env, executing_on_head=False): state = State(env=env) if "alloc" in snapshot_data: for addr, data in snapshot_data["alloc"].items(): if len(addr) == 40: addr = decode_hex(addr) assert len(addr) == 20 if 'balance' in data: state.set_balance(addr, utils.bin_to_object(data['balance'])) if 'nonce' in data: state.set_nonce(addr, parse_as_int(data['nonce'])) elif "state_root" in snapshot_data: state.trie.root_hash = parse_as_bin(snapshot_data["state_root"]) else: raise Exception( "Must specify either alloc or state root parameter") for k, default in STATE_DEFAULTS.items(): default = copy.copy(default) v = snapshot_data[k] if k in snapshot_data else None if is_numeric(default): setattr(state, k, parse_as_int(v) if k in snapshot_data else default) elif is_string(default): setattr(state, k, parse_as_bin(v) if k in snapshot_data else default) elif k == 'prev_headers': if k in snapshot_data: headers = [dict_to_prev_header(h) for h in v] else: headers = default setattr(state, k, headers) if executing_on_head: state.executing_on_head = True state.commit() state.changed = {} return state
def to_snapshot(self, root_only=False, no_prevblocks=False): snapshot = {} if root_only: # Smaller snapshot format that only includes the state root # (requires original DB to re-initialize) snapshot["state_root"] = '0x' + encode_hex(self.trie.root_hash) else: # "Full" snapshot snapshot["alloc"] = self.to_dict() # Save non-state-root variables for k, default in STATE_DEFAULTS.items(): default = copy.copy(default) v = getattr(self, k) if is_numeric(default): snapshot[k] = str(v) elif isinstance(default, (str, bytes)): snapshot[k] = '0x' + encode_hex(v) elif k == 'prev_headers' and not no_prevblocks: snapshot[k] = [prev_header_to_dict( h) for h in v[:self.config['PREV_HEADER_DEPTH']]] elif k == 'recent_uncles' and not no_prevblocks: snapshot[k] = {str(n): ['0x' + encode_hex(h) for h in headers] for n, headers in v.items()} return snapshot
def is_rain3(day): return 1 if 'RA' in day['codesum'] and \ is_numeric(day['preciptotal']) and 1.96<float(day['preciptotal'])<=3.94 else 0 def is_rain4(day): return 1 if 'RA' in day['codesum'] and \
def is_temp3(day): return 1 if not is_temp0(day) and is_numeric(day['depart']) and \ abs(float(day['depart']))>20 else 0 def is_TS(day): return 1 if 'TS' in day['codesum'] else 0
def is_temp2(day): return 1 if not is_temp0(day) and is_numeric(day['depart']) and \ 10<abs(float(day['depart']))<=20 else 0 def is_temp3(day): return 1 if not is_temp0(day) and is_numeric(day['depart']) and \
def is_rain2(day): return 1 if 'RA' in day['codesum'] and \ is_numeric(day['preciptotal']) and 0.99<float(day['preciptotal'])<=1.96 else 0 def is_rain3(day): return 1 if 'RA' in day['codesum'] and \
def is_rain4(day): return 1 if 'RA' in day['codesum'] and \ is_numeric(day['preciptotal']) and float(day['preciptotal'])>3.94 else 0 def is_snow0(day): return 1 if 'RA' in day['codesum'] and \
def is_snow4(day): return 1 if 'SN' in day['codesum'] and \ is_numeric(day['snowfall']) and float(day['snowfall'])>9.8 else 0 def is_temp0(day): return 1 if 'M' in day['depart'] else 0
def is_snow3(day): return 1 if 'SN' in day['codesum'] and \ is_numeric(day['snowfall']) and 7.9<float(day['snowfall'])<=9.8 else 0 def is_snow4(day): return 1 if 'SN' in day['codesum'] and \
def is_snow1(day): return 1 if 'SN' in day['codesum'] and \ is_numeric(day['snowfall']) and 2<float(day['snowfall'])<=5.9 else 0 def is_snow2(day): return 1 if 'SN' in day['codesum'] and \