def _send_mail( scans: typing.Dict, threshold: int, email_recipients, routes: checkmarx.client.CheckmarxRoutes, ): body = checkmarx.util.assemble_mail_body( scans_above_threshold=scans.get(scans_above_threshold_const), scans_below_threshold=scans.get(scans_below_threshold_const), failed_components=scans.get(failed_components_const), threshold=threshold, routes=routes, ) try: # get standard cfg set for email cfg default_cfg_set_name = ci.util.current_config_set_name() cfg_factory = ci.util.ctx().cfg_factory() cfg_set = cfg_factory.cfg_set(default_cfg_set_name) # send mail mailutil._send_mail( email_cfg=cfg_set.email(), recipients=email_recipients, mail_template=body, subject='[Action Required] checkmarx vulnerability report', mimetype='html', ) ci.util.info('sent notification emails to: ' + ','.join(email_recipients)) except Exception: traceback.print_exc() ci.util.warning('error whilst trying to send notification-mail')
def validate_options(self, raw_options: DictType) -> bool: """ At this point, raw_options is basically taken directly from the Command kwargs raw_options *can* be modified in place Raise errors/warnings based on the initial kwarg values; implement in each class """ if raw_options.get("color_list") and raw_options.get("color_scheme"): raise ConflictingArgs( "--color-list and --color-scheme cannot both be specified.") if raw_options.get("color_scheme"): if not raw_options.get("colorvar"): self.warnings.append( f"--colorvar was not specified, so --color-scheme is ignored." ) else: cs = raw_options.get("color_scheme") if not self.validate_color_scheme(cs): self.warnings.append( f"Using default color scheme because --color-scheme argument '{cs}' does not seem to be a valid color scheme. Run `csvviz info colorschemes` to get a list of valid color schemes." ) raw_options.pop("color_scheme") return True
def is_data_consistent(cls, data: typing.Dict, raspberry_pi: RaspberryPi, ec2_instance: EC2Instance) -> bool: 'Check if cache data is consistent against current DB state' wrong_password = data['wrong_password'] lead_status = data['lead_status'] restart_required = data.get('restart_required', False) new_config_required = data.get('new_config_required', False) created = data.get('created') if not created: return False if restart_required != raspberry_pi.restart_required: return False if new_config_required != raspberry_pi.new_config_required: return False if raspberry_pi.is_proxy_tunnel: reported_hostname = data.get('reported_hostname') hostname = raspberry_pi.proxy_hostname if reported_hostname != hostname: return False if not cls.is_ec2_instance_data_consistent(data, ec2_instance): return False lead = raspberry_pi.get_lead() if lead and lead_status != lead.status: return False if lead and wrong_password != lead.is_wrong_password(): return False return True
def barChart(size: typing.Tuple[float, float, int], data: typing.Dict, output: typing.BinaryIO) -> None: d = data['x'] ind = np.arange(len(d)) ys = data['y'] width = 0.60 fig: typing.Any = Figure(figsize=(size[0], size[1]), dpi=size[2]) # type: ignore FigureCanvas(fig) # Stores canvas on fig.canvas axis = fig.add_subplot(111) axis.grid(color='r', linestyle='dotted', linewidth=0.1, alpha=0.5) bottom = np.zeros(len(ys[0]['data'])) for y in ys: axis.bar(ind, y['data'], width, bottom=bottom, label=y.get('label')) bottom += np.array(y['data']) axis.set_title(data.get('title', '')) axis.set_xlabel(data['xlabel']) axis.set_ylabel(data['ylabel']) if data.get('allTicks', True) is True: axis.set_xticks(ind) if 'xtickFnc' in data: axis.set_xticklabels([data['xtickFnc'](v) for v in axis.get_xticks()]) axis.legend() fig.savefig(output, format='png', transparent=True)
def lineChart(size: typing.Tuple[float, float, int], data: typing.Dict, output: typing.BinaryIO) -> None: x = data['x'] y = data['y'] fig: typing.Any = Figure(figsize=(size[0], size[1]), dpi=size[2]) # type: ignore FigureCanvas(fig) # Stores canvas on fig.canvas axis = fig.add_subplot(111) axis.grid(color='r', linestyle='dotted', linewidth=0.1, alpha=0.5) for i in y: yy = i['data'] axis.plot(x, yy, label=i.get('label'), marker='.', color='orange') axis.fill_between(x, yy, 0) axis.set_title(data.get('title', '')) axis.set_xlabel(data['xlabel']) axis.set_ylabel(data['ylabel']) if data.get('allTicks', True) is True: axis.set_xticks(x) if 'xtickFnc' in data: axis.set_xticklabels([data['xtickFnc'](v) for v in axis.get_xticks()]) axis.legend() fig.savefig(output, format='png', transparent=True)
def post(self, data: typing.Dict): """用户登录 登录成功的响应首部中会带有 Set-Cookie 字段,设置 cookie --- :param data: :return: """ if current_user.is_authenticated: return current_user, 200 # check 验证码 # key = current_config.CAPTCHA_FORMAT.format(data['captcha_key']) # if redis.connection.get(key) != data['captcha_code']: # abort(400, message="captcha error, please print the wright captcha code!") # 验证登录 if data.get("username"): user = MainUser.query.filter_by(username=data['username']).first() elif data.get("email"): user = MainUser.query.filter_by(username=data['email']).first() else: abort(401, message='required either username or email!!!') return if user is not None \ and user.check_password(data['password']): login_user(user, remember=data['remember_me']) return user else: abort(401, message='error username or password')
def info(self, kvpairs: typing.Dict) -> None: self.folder_name = osp.join("_".join([ kvpairs.get("env_name"), kvpairs.get("NN type") ]), kvpairs.get("algo"), str(time.strftime("%Y_%m_%dT%H_%M_%S", time.gmtime())))
def parse_tokens(regex_result: typing.Dict) -> typing.Dict: return { 'var_name': regex_result.get('var_name2') or regex_result.get('var_name'), 'comment': regex_result.get('comment2') or regex_result.get('comment'), 'default': regex_result.get('default_val') }
def get_gost_remote_ip(config: t.Dict) -> str: if config.get("ChainNodes", []): first_chain_node = config["ChainNodes"][0] ip_or_address = ( urlparse(first_chain_node).netloc.split("@")[-1].split(":")[0] ) if not ip_or_address: return "127.0.0.1" elif is_ip(ip_or_address): return ip_or_address else: return dns_query(ip_or_address) elif config.get("ServeNodes", []): tcp_nodes = list( filter(lambda r: r.startswith("tcp"), config["ServeNodes"]) ) if tcp_nodes: parsed = urlparse(tcp_nodes[0]) if parsed.path: ip_or_address = parsed.path[1:].split(":")[0] if is_ip(ip_or_address): return ip_or_address else: return dns_query(ip_or_address) return "ANYWHERE"
def from_schema(column: typing.Dict) -> "Aircolumn": return Aircolumn( id=column.get("id"), name=column.get("name"), type=column.get("type"), type_options=TypeOptions.from_obj(column.get("typeOptions")), )
def __init__(self, key: bytes, result_dict: typing.Dict): self.key = key self.token = result_dict[b'token'] self.close_triples: typing.List[typing.Tuple[bytes, bytes, int]] = result_dict.get( b'contacts', []) self.found_compact_addresses = result_dict.get(key, [])
def patch(self: Resource, args: typing.Dict, model_id: int, model_evaluate_id: int) -> typing.Tuple[typing.Dict, int]: """ 更新一条评估记录 """ update_params = {} if args.get("model_evaluate_state"): update_params.update(evaluate_task_status=status_str2int_mapper()[ args["model_evaluate_state"]]) if args.get("model_evaluate_result"): update_params.update( evaluate_task_result=args["model_evaluate_result"]) if args.get("model_evaluate_name"): update_params.update( evaluate_task_name=args["model_evaluate_name"]) if args.get("model_evaluate_desc"): update_params.update( evaluate_task_desc=args["model_evaluate_desc"]) evaluate_task = ModelEvaluateService().update_evaluate_task_by_id( evaluate_task_id=model_evaluate_id, args=update_params) result = EvaluateTaskSchema().dump(evaluate_task) return { "message": "更新成功", "result": result, }, 200
def from_dict(cls, init_dict: tg.Dict) -> 'Root': section_list = init_dict['section_list'] init_args = init_dict.get('init_args', []) init_kwargs = init_dict.get('init_kwargs', {}) seclist = [Section.from_dict(sec_dict) for sec_dict in section_list] return cls(seclist, *init_args, **init_kwargs)
def __init__(self, config: typing.Dict, hass: Optional[HomeAssistantType] = None): """Initialize an input number.""" super().__init__(config) self._entities = get_entity_ids(config) self.hass = hass if config.get( CONF_SET_VALUE_SCRIPT) is not None else None # template self._value_template = config.get(CONF_VALUE_TEMPLATE) if self._value_template is not None: self._value_template.hass = self.hass # icon template self._icon_template = config.get(CONF_ICON_TEMPLATE) if self._icon_template is not None: self._icon_template.hass = self.hass # set_value_script if config.get(CONF_SET_VALUE_SCRIPT) is not None: self._set_value_script = Script(hass, config[CONF_SET_VALUE_SCRIPT]) else: self._set_value_script = None # value_changed_script if config.get(CONF_VALUE_CHANGED_SCRIPT) is not None: self._value_changed_script = Script( hass, config[CONF_VALUE_CHANGED_SCRIPT]) else: self._value_changed_script = None
def __encode_query(self, token_object: typing.Dict, request_method: typing.AnyStr, db_name: typing.AnyStr, db_src: typing.AnyStr, query: typing.AnyStr, props: typing.Dict[str, str] = None, db_cache=''): """ create query string """ # example: # {Conf: "acme", Src: "*master", Cache: "req_id", Sql: "UU0...", IDS: "594...", User: "******", Rights: "", Login: ""} fields = { 'id': '0', 'Conf': db_name, 'Src': db_src, 'Login': '', 'Pwd': '', 'Cache': base_64_encode(db_cache), 'Sql': base_64_encode(query), 'IDS': token_object.get('IDS', ''), 'User': token_object.get('User', ''), } fields.update(props or {}) return self.__encode_fields(fields, request_method)
def _serialize_widget_state(self, state: t.Dict) -> t.Dict[str, t.Any]: """Serialize a widget state, following format in @jupyter-widgets/schema.""" return { 'model_name': state.get('_model_name'), 'model_module': state.get('_model_module'), 'model_module_version': state.get('_model_module_version'), 'state': state, }
async def process_alert(self, data: t.Dict) -> None: await self.connected.wait() if not data.get('type') == 'playing': return if not (session_data := data.get('PlaySessionStateNotification')): return
def _get_resource_descriptor_for_document_resource( self, raw_resource: typing.Dict, harvestable_resource: harvesting_models.HarvestableResource ) -> resourcedescriptor.RecordDescription: raw_date_stamp = raw_resource.get("uploaddate") date_stamp = dateutil.parser.parse( raw_date_stamp) if raw_date_stamp is not None else None country = raw_resource.get("country") point_of_contact = resourcedescriptor.RecordDescriptionContact( role="pointOfContact", name=raw_resource.get("authors"), organization=raw_resource.get("corporateauthor"), position=raw_resource.get("publisher"), address_country=country, ) author = copy.deepcopy(point_of_contact) author.role = "author" download_uri = raw_resource.get("filename") if download_uri is not None: download_url = f"{self.remote_url}/doc/{download_uri}" overview_uri = download_uri.rpartition(".")[0] + ".png" graphic_overview_url = f"{self.remote_url}/doc/{overview_uri}" else: download_url = None graphic_overview_url = None # NOTE: PDN documents do not have a UUID. As such we generate one when first importing the resource and reuse it when updating it if harvestable_resource.geonode_resource is not None: uuid_ = uuid.UUID(harvestable_resource.geonode_resource.uuid) else: uuid_ = uuid.uuid4() return resourcedescriptor.RecordDescription( uuid=uuid_, point_of_contact=point_of_contact, author=author, date_stamp=date_stamp, identification=resourcedescriptor.RecordIdentification( name=raw_resource.get("title"), title=raw_resource.get("title"), date=date_stamp, date_type="upload", abstract=raw_resource.get("description", ""), purpose=raw_resource.get("targetaudicent"), originator=author, graphic_overview_uri=graphic_overview_url, place_keywords=[country] if country is not None else [], other_keywords=tuple(), license=[], supplemental_information= (f"Cataloging source: {raw_resource.get('catalougingsource', '')}\n" f"General Note: {raw_resource.get('generalnote', '')}" f"ISBN: {raw_resource.get('isbn', '')}" f"ISSN: {raw_resource.get('issn', '')}")), distribution=resourcedescriptor.RecordDistribution( link_url=f"{self.remote_url}/document/{raw_resource['id']}", thumbnail_url=graphic_overview_url, original_format_url=download_url, ), )
def store_template(cls, es_kwargs: typing.Dict = None): es_kwargs = es_kwargs or {} body = {"script": {"lang": "mustache", "source": QUERY_SCRIPT}} host = es_kwargs.get("host", "localhost") port = es_kwargs.get("port", "9200") import requests return requests.post(f"http://{host}:{port}/_scripts/query", json=body).text
def _get_start_step(select_dict: typing.Dict) -> typing.Tuple[int, int]: """ 获取开始行和步长 :param select_dict: 查询字典 :return: """ _step = select_dict.get('step', 1000) _start = select_dict.get('start', 0) return _start, _step
def update_rule_error(server_id: int, port_id: int, facts: t.Dict): with db_session() as db: db_rule = get_forward_rule(db, server_id, port_id) db_rule.config["error"] = "\n".join([facts.get("error", "")] + [ re.search(r"\w+\[[0-9]+\]: (.*)$", line).group(1) for line in facts.get("systemd_error", "").split("\n") if re.search(r"\w+\[[0-9]+\]: (.*)$", line) ]).strip() db.add(db_rule) db.commit()
def load(self, data: typing.Dict): """ imports user's cards :param data: cards to import, json formatted """ CardInCollection.bulk_insert(data.get("collection", []), user_id=self.user_id) for deck in data.get("decks", []): self.decks.load(deck)
def from_schema(schema: typing.Dict) -> "Airtable": return Airtable( id=schema.get("id"), name=schema.get("name"), columns=[ Aircolumn.from_schema(column) for column in schema.get("columns") ], primary_column_name=schema.get("primaryColumnName"), )
def from_mapfile(mapfile: typing.Dict) -> 'MapRequest': resources = mapfile.get('resources', {}) resources = [ resource.Resource.from_spec(id, resources[id]) for id in resources ] maps = [ ValueMap.from_spec(spec, resources) for spec in mapfile.get('maps', []) ] return CSVMapper(maps)
def _get_resource_type( raw_dataset: typing.Dict, ) -> typing.Optional[models.GeonodeResourceType]: type_ = { "dataStore": models.GeonodeResourceType.VECTOR_LAYER, "coverageStore": models.GeonodeResourceType.RASTER_LAYER, }.get( raw_dataset.get("storeType", raw_dataset.get("store_type")), models.GeonodeResourceType.UNKNOWN, ) return type_
def from_token_response( cls, token: typing.Dict, token_uri: URLTypes, ): return cls(token=token["access_token"], refresh_token=token.get("refresh_token"), token_uri=token_uri, scopes=token.get("scope"), expiry=_parse_expiry(token))
def from_dict(cls, init_dict: tg.Dict) -> 'Text': type_ = init_dict['type'] init_args = init_dict.get('init_args', []) init_kwargs = init_dict.get('init_kwargs', {}) if type_ == 'text': content = init_dict['content'] else: content = '' return cls(content, *init_args, **init_kwargs)
def from_dict(cls, init_dict: tg.Dict) -> 'Paragraph': inline_list = init_dict['inline_list'] init_args = init_dict.get('init_args', []) init_kwargs = init_dict.get('init_kwargs', {}) inline_list = [ Tag.from_dict(idict) if idict["type"] == "tag" else Text.from_dict(idict) for idict in inline_list ] return cls(inline_list, *init_args, **init_kwargs)
def get(self, args: typing.Dict, doc_type_id: int) -> typing.Tuple[typing.Dict, int]: """ 获取所有条款,不分页 """ result, count = DocTypeService().get_relation_list(doc_type_id, args.get("offset"), args.get("limit"), doc_relation_ids=args.get("doc_relation_ids")) return { "message": "请求成功", "result": result, "count": count, }, 200
def from_config(cls, cfg: typing.Dict) -> SlackBot: # noqa: F821 with open(cfg['config']) as cfgfile: config = toml.load(cfgfile) if cfg.get('token', None) is not None: token = cfg['token'] else: token = config['slackmgmt']['token'] cls.debug = cfg.get('debug', False) or config['slackmgmt'].get( 'debug', False) return cls(token=token, config=config, events_api=cfg.get('events_api', False))
def get(self, key): return Dict.get(self, key)