def send_email_to_supervisors(sender, instance, **kwargs): if (instance.modified_by.is_superuser or ((hasattr(instance.modified_by, 'profile') and instance.modified_by.profile.is_supervisor == True))): return else: ctype = ContentType.objects.get_for_model(instance) model = ctype.model if ctype.model == 'accountledger': model = 'credit-transfer' if ctype.app_label == 'accounts': group = ['Finance'] elif ctype.app_label == 'plats': group = ['Planning'] users = User.objects.filter( Q(profile__is_supervisor=True) & Q(groups__name__in=group)) for user in users: profile = Profile.objects.filter(user=user).first() if hasattr(profile, 'is_approval_required' ) and not profile.is_approval_required: profile.is_approval_required = True profile.save() to_emails = list(users.values_list('email', flat=True)) html_template = get_template('emails/supervisor_email.html') text_template = get_template('emails/supervisor_email.txt') subject = 'LFUCG Exactions Activity: New Entry Pending Approval' from_email = settings.DEFAULT_FROM_EMAIL context = { 'baseURL': settings.BASE_URL, 'model': model, 'staticURL': settings.STATIC_URL, 'id': instance.id, } html_content = html_template.render(context) text_content = text_template.render(context) msg = EmailMultiAlternatives(subject, text_content, from_email, to_emails) msg.attach_alternative(html_content, "text/html") msg.send()
def __getitem__(self, name): import english_parser if name == "__tracebackhide__": return False # for py.test print(("Reflector __getitem__ %s" % str(name))) if name in the.params: the.result = english_parser.do_evaluate(the.params[name]) elif name in the.variables: the.result = english_parser.do_evaluate(the.variables[name].value) elif name in locals(): return locals()[name] elif name in globals(): return globals()[name] elif __builtin__.hasattr(__builtin__, name): # name in __builtin__: return __builtin__.getattr(__builtin__, name) elif name == 'reverse': return list.reverse elif name in the.methods: m = the.methods[name] if isinstance(m, nodes.FunctionDef): raise Exception("%s must be declared or imported before!" % m) m = m.body # INJECT! return m else: print(("UNKNOWN ITEM %s" % name)) return name # kast.name(name) # raise Exception("UNKNOWN ITEM %s" % name) return the.result
def get_f(self, field_name=field_name): if hasattr(self, "_"+field_name): value = getattr(self, "_"+field_name) return value else: return None
def test_methodStaticVariables(self) -> None: class Bar: def foo(self): static = StaticInitor(Bar.foo) static.Var1 = lambda: var1Value static.Var2 = lambda: var2Value var1Value = 'StaticVar_1' var2Value = 10 Bar().foo() self.assertTrue(builtins.hasattr(Bar.foo, 'Var1')) self.assertTrue(builtins.hasattr(Bar.foo, 'Var2')) self.assertEqual(Bar.foo.Var1, var1Value) self.assertEqual(Bar.foo.Var2, var2Value)
def get_f(self, field_name=field_name, session=session, s_id=s_id): if hasattr(self, "_"+field_name): return getattr(self, "_"+field_name) else: value = session.get_object(s_id) setattr(self, "_"+field_name, value) return value
def get_f(self, field_name=field_name): if hasattr(self, "_"+field_name): return getattr(self, "_"+field_name) else: value = plames_client.request_attr(self._s_id, to_camel_case(field_name)) setattr(self, "_"+field_name, value) return value
def get_ipv6(self, obj): ipv6 = [] for nic in obj.template.nics: if hasattr(nic, 'ip6_global'): ipv6.append(nic.ip6_global) if len(ipv6) > 0: return ', '.join(ipv6) else: return '-'
def __init__(self, deviceID, report_id, dtstart, duration, uid, confidence, accuracy, dataQuality, value): self.device_id = deviceID self.report_id = report_id self.dtstart = dtstart self.duration = duration self.uid = uid self.confidence = confidence self.accuracy = accuracy self.data_quality = dataQuality self.value = value try: self.account_id = request.cert['CN'] if hasattr( request, "cert") and 'CN' in request.cert else None self.aggregator_id = request.cert['O'] if hasattr( request, "cert") and 'O' in request.cert else None except: self.account_id = None self.aggregator_id = None
def test_functionStaticVariables(self) -> None: def foo() -> None: static = StaticInitor(foo) static.Var1 = lambda: var1Value var1Value = 'StaticVar_1' foo() self.assertTrue(builtins.hasattr(foo, 'Var1')) self.assertEqual(foo.Var1, var1Value)
def write_fields(output, _object, only_changes=False, session=None): if not hasattr(_object, "__types"): _object.__types = class_type_utils.get_class_fields_types(_object.class_java_name) vars_types = _object.__types if only_changes and hasattr(_object, "__changed_vars"): changed_vars = _object.__changed_vars write_int(output, len(changed_vars)) for var_name in changed_vars: var = getattr(_object, "_"+var_name) write_utf8(output, to_camel_case(var_name)) write_data(output, var, session, vars_types[to_camel_case(var_name)]) else: if hasattr(_object, "__changed_vars"): vars_names = _object.__changed_vars else: vars_names = list(_object.__dict__.keys()) for var_name in vars_names: if var_name in transient_fields: vars_names.remove(var_name) write_int(output, len(vars_names)) for var_name in vars_names: if hasattr(_object, "__changed_vars"): var = getattr(_object, "_"+var_name) else: var = getattr(_object, var_name) write_utf8(output, to_camel_case(var_name)) write_data(output, var, session, vars_types[to_camel_case(var_name)])
def write_object(output, _object, session=None): write_utf8(output, _object.class_java_name) if hasattr(_object, "_s_id"): write_int(output, _object._s_id) else: write_int(output, -1) write_fields(output, _object, True, session) _object._dirty = False
def get_group_permissions(self, obj=None): """ Returns a list of permission strings that this user has through their groups. This method queries all available auth backends. If an object is passed in, only permissions matching this object are returned. """ permissions = set() for backend in auth.get_backends(): if hasattr(backend, "get_group_permissions"): permissions.update(backend.get_group_permissions(self, obj)) return permissions
def _user_has_module_perms(user, app_label): """ A backend can raise `PermissionDenied` to short-circuit permission checking. """ for backend in auth.get_backends(): if not hasattr(backend, 'has_module_perms'): continue try: if backend.has_module_perms(user, app_label): return True except PermissionDenied: return False return False
def normalize_one_element(element): """:return: a ColumnProperties instance """ if isinstance(element, str): cp = ColumnProperties(name=element, fun=fun) elif isinstance(element, ColumnProperties): cp = copy.copy(element) cp.fun = fun if cp.fun is None else cp.fun if not hasattr(fun, "_redefines_column"): assert cp.fun is None or cp.fun is fun, f"{cp.fun}, {fun}" else: raise TypeError(type(element)) return cp
def sub_cb(topic, msg): global c print((topic, msg)) try: # 获取方法名 fun = topic.decode().split('/')[-1] print(fun) # 获取方法 if hasattr(opt, fun): fun = getattr(opt, fun) fun(msg) c.publish(topic + b"State", msg) except Exception as e: print(e)
def get_ipv4(self, obj): """ Get the IPv4s from the given VM :param obj: The VM in contention :return: Returns csv string of all IPv4s added to this VM otherwise returns "-" if no IPv4 is available """ ipv4 = [] for nic in obj.template.nics: if hasattr(nic, 'ip'): ipv4.append(nic.ip) if len(ipv4) > 0: return ', '.join(ipv4) else: return '-'
def __parse_cnc_server(self, message, line, report): if message['cnctype'] != 'classic_cnc': raise ValueError('Unable to create cnc event due to ' 'unsupported cnctype %s.' % message['cnctype']) event = self.__new_event(message, line, report) event.add('classification.type', 'c2server') event.add('classification.taxonomy', 'malicious code') event.add('source.fqdn', message['cnc']['domain']) event.add('source.ip', message['cnc']['ip']) event.add('source.port', message['cnc']['port']) if message['messagetype'] != 'cnc_message' and hasattr( self.parameters, 'unknown_messagetype_accuracy'): event.add('feed.accuracy', self.parameters.unknown_messagetype_accuracy, overwrite=True) return event
def _packet_in_handler(self, ev): #パケットから送信元のIP・MAC・宛先のIP・MAC・dataを取得 msg = ev.msg dp = Dp_obj(msg) datapath = dp.datapath dpid = dp.dpid ofproto = dp.ofproto parser = dp.parser #スイッチのポート in_port = dp.in_port #送信元MACと送信元SWのポートの対応関係を記録 self.mac_to_port.setdefault(dpid, {}) pkt = packet.Packet(msg.data) if not pkt: self.logger.info("ERROR::{}".format(pkt)) #if self.__DEBUG_MODE__: self.logger.info("packet-in {}".format(pkt)) #パケットのヘッダ情報を取得 header_list = dict((hasattr(p.protocol_name), p) for p in pkt.protocols if type(p) != str) #Transport to dst self.logger.info("Header::{}".format(header_list)) src_eth = header_list[ETHERNET].src dst_eth = header_list[ETHERNET].dst #該当するSWの中にMacAddrがあるか? if dst_eth in self.mac_to_port[dpid]: #Switch output portをテーブルから指定 out_port = self.mac_to_port[dpid][dst_eth] else: #フラッディング out_port = ofproto.OFPP_FLOOD actions = [parser.OFPActionOutput(out_port)] out = parser.OFPPacketOut(datapath=datapath, buffer_id=msg.buffer_id, in_port=in_port, actions=actions, data=msg.data) datapath.send_msg(out) return
def set_approval(sender, instance, **kwargs): if instance.modified_by.is_superuser == True or ( (hasattr(instance.modified_by, 'profile') and instance.modified_by.profile.is_supervisor == True)): instance.is_approved = True ctype = ContentType.objects.get_for_model(instance) if ctype.app_label == 'accounts': group = ['Finance'] elif ctype.app_label == 'plats': group = ['Planning'] users = User.objects.filter( Q(profile__is_supervisor=True) & Q(groups__name__in=group)) for user in users: profile = Profile.objects.filter(user=user).first() profile.is_approval_required = False profile.save() else: instance.is_approved = False return instance
def __new__(cls, name, bases, dct): if MetaTable in bases: raise SyntaxError(f"Use ATable, not MetaTable, for subclassing.") unique_bases = [] for base in bases: if base in unique_bases: continue # if any(issubclass(base, seen) for seen in unique_bases): # continue try: _ = base.column_to_properties except AttributeError: base.column_to_properties = collections.OrderedDict() unique_bases.append(base) bases = tuple(unique_bases) dct.setdefault("column_to_properties", collections.OrderedDict()) subclass = super().__new__(cls, name, bases, dct) for base in bases: try: # It is ok to update keys later decorated in the # subclasses. That happens after metracreation, # therefore overwrites the following updates subclass.column_to_properties.update(base.column_to_properties) except AttributeError: pass # Make sure that subclasses do not re-use a base class column # function name without it being decorated as column function # (unexpected behavior) for column, properties in subclass.column_to_properties.items(): try: defining_class_name = get_class_that_defined_method(properties.fun).__name__ except AttributeError: defining_class_name = None if defining_class_name != subclass.__name__: ctp_fun = properties.fun try: sc_fun = getattr(subclass, properties.fun.__name__) except AttributeError: # Not overwritten, nothing else to check here continue if ctp_fun != sc_fun: if get_defining_class_name(ctp_fun) != get_defining_class_name(sc_fun): if hasattr(sc_fun, "_redefines_column"): properties = copy.copy(properties) properties.fun = ATable.build_column_name_wrapper(fun=sc_fun, column_properties=properties) subclass.column_to_properties[column] = properties else: print(f"[W]arning: {defining_class_name}'s subclass {subclass.__name__} " f"overwrites method {properties.fun.__name__}, " f"but it does not decorate it with @atable.column_function " f"for column {column}. " f"The method from class {defining_class_name} will be used to fill " f"the table's column {column}. Consider decorating the function " f"with the same @atable.column_function as the base class, " f"or simply with @atable.redefines_column to maintain the same " f"difinition") # Add pending decorated and column_* methods (declared as columns before subclass existed) inherited_classname_fun_columnproperties_kwargs = [t for t in cls.pendingdefs_classname_fun_columnproperties_kwargs if t[0] != subclass.__name__] decorated_classname_fun_columnproperties_kwargs = [t for t in cls.pendingdefs_classname_fun_columnproperties_kwargs if t[0] == subclass.__name__] for classname, fun, cp, kwargs in inherited_classname_fun_columnproperties_kwargs: ATable.add_column_function(cls=subclass, column_properties=cp, fun=fun, **kwargs) # Column-defining functions are added to a list while the class is being defined. # After that, the subclass' column_to_properties attribute is updated according # to the column definitions. funname_to_pending_entry = {t[1].__name__: t for t in decorated_classname_fun_columnproperties_kwargs} for fun in (f for f in subclass.__dict__.values() if inspect.isfunction(f)): try: # Add decorated function classname, fun, cp, kwargs = funname_to_pending_entry[fun.__name__] ATable.add_column_function(cls=subclass, column_properties=cp, fun=fun, **kwargs) del funname_to_pending_entry[fun.__name__] except KeyError: assert all(cp.fun is not fun for cp in subclass.column_to_properties.values()) if not fun.__name__.startswith("column_"): continue column_name = fun.__name__[len("column_"):] if not column_name: raise SyntaxError(f"Function name '{fun.__name__}' not allowed in ATable subclasses") wrapper = get_auto_column_wrapper(fun=fun) cp = ColumnProperties(name=column_name, fun=wrapper) ATable.add_column_function(cls=subclass, column_properties=cp, fun=wrapper) assert len(funname_to_pending_entry) == 0, (subclass, funname_to_pending_entry) cls.pendingdefs_classname_fun_columnproperties_kwargs.clear() return subclass
def __new__(cls, name, bases, dct): assert MetaTable not in bases, f"Use ATable, not MetaTable, for subclassing" for base in bases: try: _ = base.column_to_properties except AttributeError: base.column_to_properties = collections.OrderedDict() dct.setdefault("column_to_properties", collections.OrderedDict()) subclass = super().__new__(cls, name, bases, dct) for base in bases: try: # It is ok to update keys later decorated in the # subclasses. That happens after metracreation, # therefore overwrites the following updates subclass.column_to_properties.update(base.column_to_properties) except AttributeError: pass # Make sure that subclasses do not re-use a base class column # function name without it being decorated as column function # (unexpected behavior) for column, properties in subclass.column_to_properties.items(): defining_class_name = get_class_that_defined_method( properties.fun).__name__ if defining_class_name != subclass.__name__: ctp_fun = properties.fun sc_fun = getattr(subclass, properties.fun.__name__) if ctp_fun != sc_fun: if get_defining_class_name( ctp_fun) != get_defining_class_name(sc_fun): if hasattr(sc_fun, "_redefines_column"): properties = copy.copy(properties) properties.fun = ATable.build_column_name_wrapper( fun=sc_fun, column_properties=properties) subclass.column_to_properties[column] = properties else: print( f"[W]arning: {defining_class_name}'s subclass {subclass.__name__} " f"overwrites method {properties.fun.__name__}, " f"but it does not decorate it with @atable.column_function " f"for column {column}. " f"The method from class {defining_class_name} will be used to fill " f"the table's column {column}. Consider decorating the function " f"with the same @atable.column_function as the base class, " f"or simply with @atable.redefines_column to maintain the same " f"difinition") # Add pending methods (declared as columns before subclass existed) for classname, fun, cp, kwargs in cls.pendingdefs_classname_fun_columnproperties_kwargs: if classname != name: raise SyntaxError( f"Not expected to find a decorated function {fun.__name__}, " f"classname={classname} when defining {name}.") ATable.add_column_function(cls=subclass, column_properties=cp, fun=fun, **kwargs) cls.pendingdefs_classname_fun_columnproperties_kwargs.clear() return subclass
return i int = r(_.int, lambda *a: _.int(*a) - 1) float = r(_.float, lambda v: _.float(v) + 0.001) str = r(_.str, lambda *a, **k: _.str(*a, **k)[::-1]) bool = r(_.bool, lambda v: not (_.bool(v))) len = r(_.len, lambda v: _.len(v) - 1) ord = r(_.ord, lambda v: _.ord(v.lower() if v.isupper() else v.upper())) abs = r(_.abs, lambda v: -_.abs(v)) pow = r(_.pow, lambda v, p, *a: _.pow(v, p + 1, *a)) min = r(_.min, lambda *a: _.max(*a)) max = r(_.max, lambda *a: _.min(*a)) sum = r(_.sum, lambda v, *a: reduce(op.__sub__, v)) hasattr = r(_.hasattr, lambda o, n: not (_.hasattr(o, n))) sorted = r(_.sorted, lambda *a, **k: list(_.reversed(*a, **k))) reversed = r(_.reversed, lambda v: _.sorted(v)) enumerate = r(_.enumerate, lambda v, *a: ((i + 1, _v) for i, _v in _.enumerate(v, *a))) globals = r(_.globals, locals) locals = r(_.locals, _.globals) id = r(_.id, lambda v: _.id(_.id)) help = r(_.help, lambda v: 'halp') exit = r(_.exit, print)
def hasattr(obj, name): '''Replacement for the built-in :func:`hasattr() <python:hasattr>` function.''' return builtins.hasattr(obj, name)
def shape(self) -> Tuple[int, int]: """:return: (height, width)""" if hasattr(self, 'tiles'): return self.tiles.shape return len(self.hints[0]), len(self.hints[1])
lambda *args, **kwargs: wrap(builtins.frozenset)(*args, **kwargs), builtins.frozenset) getattr = functools.update_wrapper( lambda *args, **kwargs: builtins.getattr(*args, **kwargs), builtins.getattr) getattr._ = functools.update_wrapper( lambda *args, **kwargs: wrap(builtins.getattr)(*args, **kwargs), builtins.getattr) globals = functools.update_wrapper( lambda *args, **kwargs: builtins.globals(*args, **kwargs), builtins.globals) globals._ = functools.update_wrapper( lambda *args, **kwargs: wrap(builtins.globals)(*args, **kwargs), builtins.globals) hasattr = functools.update_wrapper( lambda *args, **kwargs: builtins.hasattr(*args, **kwargs), builtins.hasattr) hasattr._ = functools.update_wrapper( lambda *args, **kwargs: wrap(builtins.hasattr)(*args, **kwargs), builtins.hasattr) hash = functools.update_wrapper( lambda *args, **kwargs: builtins.hash(*args, **kwargs), builtins.hash) hash._ = functools.update_wrapper( lambda *args, **kwargs: wrap(builtins.hash)(*args, **kwargs), builtins.hash) hex = functools.update_wrapper( lambda *args, **kwargs: builtins.hex(*args, **kwargs), builtins.hex) hex._ = functools.update_wrapper( lambda *args, **kwargs: wrap(builtins.hex)(*args, **kwargs), builtins.hex) id = functools.update_wrapper( lambda *args, **kwargs: builtins.id(*args, **kwargs), builtins.id)
def hasattr(obj, members): for member in members.split('.'): if not builtins.hasattr(obj, member): return False obj = getattr(obj, member) return True
def parse(self, oadrReport): report = get_report_models() dt_start = oadrReport.find(".//xcal:dtstart", namespaces=NAMESPACES) duration = oadrReport.find(".//xcal:duration", namespaces=NAMESPACES) reportID_p = oadrReport.find(".//ei:eiReportID", namespaces=NAMESPACES).text reportRequestID_p = oadrReport.find(".//ei:reportRequestID", namespaces=NAMESPACES).text specifierID = oadrReport.find(".//ei:reportSpecifierID", namespaces=NAMESPACES).text createdDateTime = oadrReport.find(".//ei:createdDateTime", namespaces=NAMESPACES).text duration_p = duration.find( ".//xcal:date-time", namespaces=NAMESPACES).text if duration.find( ".//xcal:date-time", namespaces=NAMESPACES) is not None else "" dt_start_p = dt_start.find( ".//xcal:date-time", namespaces=NAMESPACES).text if dt_start.find( ".//xcal:date-time", namespaces=NAMESPACES) is not None else "" r = report(dt_start_p, duration_p, reportID_p, reportRequestID_p, specifierID, createdDateTime) print("report_created") r.save() report_id = r._id intervals = oadrReport.find(".//strm:intervals", namespaces=NAMESPACES) hypertech_data = [] mongo_data = {} exception = None errors = [] try: account_id = request.cert['CN'] if hasattr( request, "cert") and 'CN' in request.cert else None aggregator_id = request.cert['O'] if hasattr( request, "cert") and 'O' in request.cert else None except: account_id = None aggregator_id = None for interval in intervals.findall(".//ei:interval", namespaces=NAMESPACES): # We will only update the status to the Device endpoint dt_start = interval.find(".//xcal:dtstart", namespaces=NAMESPACES) duration = interval.find(".//xcal:duration", namespaces=NAMESPACES) uid = interval.find(".//xcal:uid", namespaces=NAMESPACES) rid_i = interval.find(".//ei:rID", namespaces=NAMESPACES).text confidence = interval.find(".//ei:confidence", namespaces=NAMESPACES) accuracy = interval.find(".//ei:accuracy", namespaces=NAMESPACES) dataQuality = interval.find(".//ei:dataQuality", namespaces=NAMESPACES) value_i = interval.find(".//oadr:oadrCurrent", namespaces=NAMESPACES).text duration_i = duration.find( ".//xcal:date-time", namespaces=NAMESPACES).text if duration.find( ".//xcal:date-time", namespaces=NAMESPACES) is not None else "" dt_start_i = dt_start.find( ".//xcal:date-time", namespaces=NAMESPACES).text if dt_start.find( ".//xcal:date-time", namespaces=NAMESPACES) is not None else "" uid_i = uid.find( ".//xcal:text", namespaces=NAMESPACES).text if dt_start.find( ".//xcal:text", namespaces=NAMESPACES) is not None else "" confidence_i = confidence.text if confidence is not None else "" accuracy_i = accuracy.text if accuracy is not None else "" dataQuality_i = dataQuality.text if dataQuality is not None else "" hypertech_data.append({ "rid": rid_i, "value": value_i, "dt": dt_start_i }) phisical_device, pdn, groupID, spaces, load, ln, metric = parse_rid( rid_i) if metric not in status_mapping.keys(): continue json = { "report_id": report_id, # "dtstart": dt_start_i, # "duration": duration_i, # "uid": uid_i, # "confidence": confidence_i, # "accuracy": accuracy_i, # "data_quality": dataQuality_i, # "value": value_i, # "device_id": get_id_from_rid(rid_i), # "account_id": account_id, # "aggregator_id": aggregator_id, # "_updated_at": datetime.utcnow(), # "_created_at": datetime.utcnow() # } try: mongo_data[metric].append(json) except: mongo_data[metric] = [json] send_thread = threading.Thread(target=hypertech_send, args=(hypertech_data, )) send_thread.start() for metric, data in mongo_data.items(): df = pd.DataFrame.from_records(data) id_mappings = {} device_mappings = {} rids = df.device_id.unique() for rid in rids: mapping = map_rid_device_id.find_one( {map_rid_device_id.rid(): rid}) if mapping: id_mappings[rid] = mapping.device_id device_mappings[mapping.device_id] = Device.find_one( {Device.device_id(): mapping.device_id}) else: errors.append(rid) id_mappings[rid] = None device_mappings[rid] = None def get_anonimized_id(rid): try: return id_mappings[rid] except: return None df.device_id = df.device_id.apply(get_anonimized_id) df = df.dropna(subset=['device_id']) # calculate if some recent changes for the device now = datetime.utcnow() - timedelta(minutes=1) df2 = df[pd.to_datetime(df.dtstart) > now] app.logger.debug("*****************************") if not df2.empty: grouped = df2.groupby("device_id") for device_id, g in grouped: max_value = g.loc[g.dtstart == g.dtstart.max()] device = device_mappings[device_id] if device: cvalue = max_value.value.values[0] device.status[status_mapping[metric]].update( {"value": cvalue}) device.save() # save all historics TMP = get_data_model( convert_snake_case("{}_{}".format("status", status_mapping[metric]))) upload_data = df.to_dict(orient="records") TMP.__mongo__.insert_many(upload_data)
def create(self, request): data_set = request.data data_set['created_by'] = self.request.user.id data_set['modified_by'] = self.request.user.id if 'lot' in self.request.data: serializer = AccountLedgerSerializer(data=data_set) if serializer.is_valid(raise_exception=True): self.perform_create(serializer) return Response(serializer.data) elif 'plat' in self.request.data: chosen_plat = self.request.data['plat'] plat_set = Plat.objects.filter(id=chosen_plat) non_sewer_credits_per_lot = 0 sewer_credits_per_lot = 0 roads_per_lot = 0 parks_per_lot = 0 storm_per_lot = 0 open_space_per_lot = 0 sewer_trans_per_lot = 0 sewer_cap_per_lot = 0 if plat_set.exists(): buildable_lots = plat_set[0].buildable_lots if buildable_lots == 0: return Response( 'There are no buildable lots for assignment.', status=status.HTTP_400_BAD_REQUEST) try: non_sewer_credits_per_lot = round( (float(data_set['non_sewer_credits']) / buildable_lots), 2) if hasattr( data_set, 'non_sewer_credits') else 0 sewer_credits_per_lot = round( (float(data_set['sewer_credits']) / buildable_lots), 2) if hasattr(data_set, 'sewer_credits') else 0 roads_per_lot = round( (float(data_set['roads']) / buildable_lots), 2) if hasattr(data_set, 'roads') else 0 parks_per_lot = round( (float(data_set['parks']) / buildable_lots), 2) if hasattr(data_set, 'parks') else 0 storm_per_lot = round( (float(data_set['storm']) / buildable_lots), 2) if hasattr(data_set, 'storm') else 0 open_space_per_lot = round( (float(data_set['open_space']) / buildable_lots), 2) if hasattr(data_set, 'open_space') else 0 sewer_trans_per_lot = round( (float(data_set['sewer_trans']) / buildable_lots), 2) if hasattr(data_set, 'sewer_trans') else 0 sewer_cap_per_lot = round( (float(data_set['sewer_cap']) / buildable_lots), 2) if hasattr(data_set, 'sewer_cap') else 0 except Exception as exc: return Response('Invalid credit entry', status=status.HTTP_400_BAD_REQUEST) chosen_lots = Lot.objects.filter(plat=chosen_plat) if chosen_lots.exists(): for lot in chosen_lots: data_set['lot'] = lot.id data_set['non_sewer_credits'] = non_sewer_credits_per_lot data_set['sewer_credits'] = sewer_credits_per_lot data_set['roads'] = roads_per_lot data_set['parks'] = parks_per_lot data_set['storm'] = storm_per_lot data_set['open_space'] = open_space_per_lot data_set['sewer_trans'] = sewer_trans_per_lot data_set['sewer_cap'] = sewer_cap_per_lot serializer = AccountLedgerSerializer(data=data_set) if serializer.is_valid(raise_exception=True): self.perform_create(serializer) return Response('Success') else: serializer = AccountLedgerSerializer(data=data_set) if serializer.is_valid(raise_exception=True): self.perform_create(serializer) return Response(serializer.data)
from builtins import hasattr, any from django.http import HttpResponseRedirect from django.conf import settings from re import compile EXEMPT_URLS = [compile(settings.LOGIN_URL.lstrip('/'))] if hasattr(settings, 'LOGIN_EXEMPT_URLS'): EXEMPT_URLS += [compile(expr) for expr in settings.LOGIN_EXEMPT_URLS] class LoginRequiredMiddleware: def __init__(self, get_response): self.get_response = get_response # One-time configuration and initialization. def __call__(self, request): # Code to be executed for each request before # the view (and later middleware) are called. response = self.get_response(request) if not request.user.is_authenticated(): path = request.path_info.lstrip('/') if not any(m.match(path) for m in EXEMPT_URLS): return HttpResponseRedirect(settings.LOGIN_URL) return response
return inner int = rroulette(_.int, lambda v: _.int(v) - 1) float = rroulette(_.float, lambda v: _.float(v) + 0.001) str = rroulette(_.str, lambda v: _.str(v)[::-1]) bool = rroulette(_.bool, lambda v: not (_.bool(v))) len = rroulette(_.len, lambda v: _.len(v) - 1) ord = rroulette(_.ord, lambda v: _.ord(v.lower() if v.isupper() else v.upper())) abs = rroulette(_.abs, lambda v: -_.abs(v)) pow = rroulette(_.pow, lambda v, p: _.pow(v, p + 1)) min = rroulette(_.min, lambda *v: _.max(*v)) max = rroulette(_.max, lambda *v: _.min(*v)) sum = rroulette(_.sum, lambda v: reduce(op.__sub__, v)) hasattr = rroulette(_.hasattr, lambda o, n: not (_.hasattr(o, n))) sorted = rroulette(_.sorted, lambda v: _.reversed(v)) reversed = rroulette(_.reversed, lambda v: _.sorted(v)) enumerate = rroulette(_.enumerate, lambda v: ((i + 1, _v) for i, _v in _.enumerate(v))) globals = rroulette(_.globals, locals) locals = rroulette(_.locals, _.globals) id = rroulette(_.id, lambda v: _.id(_.id)) help = rroulette(_.help, lambda v: 'halp') exit = rroulette(_.exit, print)
def resource_path(relative_path): if hasattr(sys, '_MEIPASS'): return os.path.join(sys._MEIPASS, relative_path) return os.path.join(os.path.abspath("."), relative_path)