def update_image_keywords(self, image_key, keywords): uri = image_key payload = { "KeywordArray": keywords, } headers = { "Accept": "application/json", "content-type": "application/json" } r = self.r.patch( API_ORIGIN + uri, data=json.dumps(payload), headers=headers, ) if DEBUG: logger.debug("headers %s, code %s, reason %s, content: %s", r.headers, r.status_code, r.reason, r.content) try: if r.status_code == 200: return True else: raise Exception("Status is not 200") except Exception: logger.exception("headers %s, code %s, reason %s, content: %s", r.headers, r.status_code, r.reason, r.content)
def get_acls(self, node_name, interface_name): try: acl_parser = ControllerOperations._get_acl_parser(ACL_MODEL, AclOps.GET) # TODO: add support for other models (how?) return Topology().get_acls(node_name, interface_name, acl_parser) except Exception as e: logger.exception(e.message) return False, e.message
def handler(event, context): """Handle check category SQS event.""" chrome_options = webdriver.ChromeOptions() chrome_options.add_argument("--no-sandbox") chrome_options.add_argument("--headless") for record in event["Records"]: payload = json.loads(record["body"]) print(f"{payload=}") domain_name = payload["domain"] domain = domain_manager.get(filter_data={"name": domain_name}) driver = webdriver.Remote( command_executor=f"http://{BROWSERLESS_ENDPOINT}/webdriver", desired_capabilities=chrome_options.to_capabilities(), ) driver.set_page_load_timeout(60) for k, v in get_check_proxies().items(): try: resp = v(driver, domain_name) update_submission(k, domain["is_category_submitted"], resp) driver.quit() except Exception as e: driver.quit() logger.exception(e) print(f"Updating {domain_name} with {domain['is_category_submitted']}") domain_manager.update( document_id=domain["_id"], data={"is_category_submitted": domain["is_category_submitted"]}, )
def fetch_acl(node, default=None): try: return fetch_and_persist(AbstractAclTopologyParser.url_dispatcher.GET_ACL_URL.format(node), os.path.join(acl_path, node + "_acl.json"), default=default) except HTTPError as e: logger.exception(e.message) return default
def unlaunch_domain(domain): """Unlaunch domain.""" try: domain_manager.update( document_id=domain["_id"], data={ "is_available": False, "is_delaunching": True, }, ) delete_site(domain) domain_manager.update( document_id=domain["_id"], data={ "is_active": False, "is_available": True, "is_delaunching": False, }, ) domain_manager.remove( document_id=domain["_id"], data={"acm": "", "cloudfront": ""}, ) except Exception as e: logger.exception(e) domain_manager.update( document_id=domain["_id"], data={ "is_available": True, "is_delaunching": False, }, )
def get(self, *args, **kwargs): node = self._get_node(kwargs.get('path', '')) interface = self.get_argument("if") status, error_message = 200, None if not node or not interface: status = BAD_REQUEST error_message = "No node or interface specified" logger.exception(error_message) else: try: response = ControllerOperations().get_acls(node, interface) if response: logger.debug(response) self.set_header("Content-Type", "application/json") self.write(json.dumps(response)) # self.finish() else: status = NOT_FOUND error_message = "No data found" except ValueError as e: status = INTERNAL_SERVER_ERROR error_message = e.message if error_message: logger.exception(error_message) if DEBUG: self.set_status(status, error_message) else: self.set_status(status) else: self.set_status(status)
def post(self, domain_name): """Categorize an external domain.""" category = [ category for category in CATEGORIES if category["name"] == request.json.get("category", "").title() ][0] resp = [] for k, v in get_categorize_proxies().items(): proxy_category = "".join( detail.get(k) for detail in category.get("proxies") if k in detail) driver = self.driver() driver.set_page_load_timeout(60) try: categorize = v( driver=driver, domain=domain_name, category=proxy_category, two_captcha_api_key=TWO_CAPTCHA_API_KEY, ) resp.append(categorize) driver.quit() except Exception as e: self.driver().quit() logger.exception(e) return jsonify(resp)
def launch_domain(domain): """Launch Domain.""" try: domain_manager.update( document_id=domain["_id"], data={ "is_available": False, "is_launching": True, }, ) resp = launch_site(domain) data = { "is_active": True, "is_available": True, "is_launching": False, } data.update(resp) domain_manager.update( document_id=domain["_id"], data=data, ) except Exception as e: logger.exception(e) domain_manager.update( document_id=domain["_id"], data={ "is_available": True, "is_launching": False, }, )
def get_existing_bug_link(previous_results): results_with_bug = [result for result in previous_results if result["custom_launchpad_bug"] is not None] if not results_with_bug: return for result in sorted(results_with_bug, key=lambda k: k['created_on'], reverse=True): try: bug_id = int(result["custom_launchpad_bug"].strip('/').split( '/')[-1]) except ValueError: logger.warning('Link "{0}" doesn\'t contain bug id.'.format( result["custom_launchpad_bug"])) continue try: bug = LaunchpadBug(bug_id).get_duplicate_of() except KeyError: logger.warning("Bug with id '{bug_id}' is private or \ doesn't exist.".format(bug_id=bug_id)) continue except Exception: logger.exception("Strange situation with '{bug_id}' \ issue".format(bug_id=bug_id)) continue for target in bug.targets: if target['project'] == LaunchpadSettings.project and\ target['milestone'] == LaunchpadSettings.milestone and\ target['status'] not in LaunchpadSettings.closed_statuses: target['bug_link'] = result["custom_launchpad_bug"] return target
def put(self, username): """Disable or re-enable the user.""" if not g.is_admin: abort(HTTPStatus.FORBIDDEN.value) try: dm_user = user_manager.get(filter_data={"Username": username}) if dm_user["Enabled"]: new_status = False cognito.admin_disable_user( UserPoolId=COGNTIO_USER_POOL_ID, Username=username ) else: new_status = True cognito.admin_enable_user( UserPoolId=COGNTIO_USER_POOL_ID, Username=username ) dm_user["Enabled"] = new_status user_manager.update(document_id=dm_user["_id"], data=dm_user) return jsonify( { "success": f"{username} enabled status - {new_status}", "status": new_status, } ) except Exception as e: logger.exception(e) return ( jsonify({"error": f"Failed to disable/enable user - {username}"}), HTTPStatus.BAD_REQUEST.value, )
def get_existing_bug_link(previous_results): results_with_bug = [ result for result in previous_results if result["custom_launchpad_bug"] is not None ] if not results_with_bug: return for result in sorted(results_with_bug, key=lambda k: k['created_on'], reverse=True): try: bug_id = int( result["custom_launchpad_bug"].strip('/').split('/')[-1]) except ValueError: logger.warning('Link "{0}" doesn\'t contain bug id.'.format( result["custom_launchpad_bug"])) continue try: bug = LaunchpadBug(bug_id).get_duplicate_of() except KeyError: logger.warning("Bug with id '{bug_id}' is private or \ doesn't exist.".format(bug_id=bug_id)) continue except Exception: logger.exception("Strange situation with '{bug_id}' \ issue".format(bug_id=bug_id)) continue for target in bug.targets: if target['project'] == LaunchpadSettings.project and\ target['milestone'] == LaunchpadSettings.milestone and\ target['status'] not in LaunchpadSettings.closed_statuses: target['bug_link'] = result["custom_launchpad_bug"] return target
def parse_nodes(self): node_list = [] for controller_node in self.topology[self.NODE]: node = {} prefix_array = [] node_dict = html_style(controller_node[self.NODE_ID]) attributes = controller_node[self.NODE_ATTRIBUTES] if self.PREFIX in attributes: for prefix in attributes[self.PREFIX]: prefix_array.append(prefix[self.PREFIX]) if self.ROUTER_ID in attributes: if self.NAME in attributes: node[self.NAME] = attributes[self.NAME] else: success, name = name_check(attributes[self.ROUTER_ID][0]) node[self.NAME] = name if success else node_dict[self.ROUTER] node[self.LOOPBACK] = attributes[self.ROUTER_ID][0] else: node[self.NAME] = node_dict[self.ROUTER] node[self.LOOPBACK] = "0.0.0.0" node[self.PREFIX] = prefix_array node[self.ID] = controller_node[self.NODE_ID] try: node[self.INTERFACE] = self.parse_interfaces(self.fetch_interfaces(node[self.NAME])) node_list.append(node) except HttpClientException as e: logger.error("Fetch interfaces for node '{}' failed.".format(node[self.NAME])) logger.exception(e.message) return node_list
def register(): if request.method == "POST": user_name = request.form.get("username") if not re.match(r"[^@]+@[^@]+\.[^@]+", user_name): return render_template("register.html", error="Invalid email address!") password = request.form.get("password") password2 = request.form.get("password2") if password != password2: return render_template("register.html", error="Password Confirmation Error!") if not validate_password(password): return render_template( "register.html", error="Password length should be between 8 and 30 characters.") next_page = request.form.get("next_page", url_for("home")) captcha_response = request.form.get("g-recaptcha-response") if not validate_captcha(captcha_response): return render_template("register.html", error="Recaptcha Fail") if is_abuse_check(request.remote_addr, prefix="register", threshold=10): return render_template("register.html", error="Abuse detected by your IP address.") try: User.add_user(email=user_name, password=password) except Exception as ex: logger.exception(ex) return render_template("register.html", error="Register Fail!") user = web_login(user_name, password) if not user: return render_template("register.html", error="Register Fail!") flash('Check your email to activate account.') return redirect(next_page) else: return render_template("register.html", error=None)
def monitor_remote_performance_index(self, b_ssh_run, interval=1): self._remote_login() if not self.ssh: return try: f_cpu = open(DATA_PATH['cpu'], "w") f_memory = open(DATA_PATH['memory'], "w") f_disk = open(DATA_PATH['disk'], "w") f_network = open(DATA_PATH['network'], "w") while b_ssh_run.value: # this is for test: #self.get_remote_disk_info(self.ssh,f_disk) #b_ssh_run.value=0 gevent.joinall([ gevent.spawn(self._get_remote_cpu_info, f_cpu), gevent.spawn(self._get_remote_mem_info, f_memory), gevent.spawn(self._get_remote_disk_info, f_disk), gevent.spawn(self._get_remote_network_info, f_network) ]) f_cpu.close() f_memory.close() f_disk.close() f_network.close() except Exception, e: logger.exception("监控远程信息失败!") return
def get_data(info): logger.info(len(self.acc_to_display)) payload_len = info[1] & 127 if payload_len == 126: extend_payload_len = info[2:4] mask = info[4:8] decoded = info[8:] elif payload_len == 127: extend_payload_len = info[2:10] mask = info[10:14] decoded = info[14:] else: extend_payload_len = None mask = info[2:6] decoded = info[6:] bytes_list = bytearray() # 这里我们使用字节将数据全部收集,再去字符串编码,这样不会导致中文乱码 for i in range(len(decoded)): chunk = decoded[i] ^ mask[i % 4] # 解码方式 bytes_list.append(chunk) try: body = str(bytes_list, encoding='utf-8') except UnicodeDecodeError as err: logger.exception(err) return "" return body
def fetch_interfaces(node, default=None): try: return fetch_and_persist(AbstractAclTopologyParser.url_dispatcher.GET_INTERFACES_URL.format(node), os.path.join(interfaces_path, node + "_interfaces.json")) except HTTPError as e: logger.exception(e.message) return default
def fetch_interfaces(node, default=None): try: return fetch_and_persist( AbstractAclTopologyParser.url_dispatcher.GET_INTERFACES_URL. format(node), os.path.join(interfaces_path, node + "_interfaces.json")) except HTTPError as e: logger.exception(e.message) return default
def _get_disk_data(self, dpath): fileData = "" try: f = open(dpath, "r") fileData = f.read().split("\n")[:-1] f.close() except Exception, e: logger.exception("读取文件失败!") return None, None, None, None
def get_acl_parser(self, model=ACL_MODEL, operation=None): if not self.acl_parser: parser = ControllerOperations._get_acl_parser(model, operation) if not parser: error_message = "No parser defined for this acl model" logger.exception(error_message) raise ValidationError(error_message) else: self.acl_parser = parser return self.acl_parser
def set_ipn_status(self, address, status): try: query = ("UPDATE ipn " "SET status=%s " "WHERE address=%s ") data = (status, address) cursor = self.conn.cursor() cursor.execute(query, data) self.conn.commit() cursor.close() except Exception as ex: logger.exception(ex)
def _get_remote_network_info(self, f): try: KBps, t_network = get_network_bps(self.ssh) if self.net_first_write: self.net_first_write = False f.write("time\tKBps\n") f.write(t_network + "\t" + str(KBps) + "\t") f.write("\n") except Exception, e: logger.exception("获取远程network信息失败!")
def fetch_acl(node, default=None): try: return fetch_and_persist( AbstractAclTopologyParser.url_dispatcher.GET_ACL_URL.format( node), os.path.join(acl_path, node + "_acl.json"), default=default) except HTTPError as e: logger.exception(e.message) return default
def get_received(address): """ Main service function, that returns the total received balance. :returns a tuple with total received (confirmed, unconfirmed) balance from address """ try: ldb = db.DataBase() return tuple(ldb.get_total_received(address)) except Exception as ex: logger.exception(ex)
def post(self, domain_id): """Create website.""" category = request.args.get("category") domain = domain_manager.get(document_id=domain_id) # Switch instance to unavailable to prevent user actions domain_manager.update( document_id=domain_id, data={ "is_available": False, "is_generating_template": True, }, ) try: domain_name = domain["name"] post_data = request.json post_data["domain"] = domain_name # Generate website content from a template resp = requests.post( f"{STATIC_GEN_URL}/generate/?category={category}&domain={domain_name}", json=post_data, ) # remove temp files shutil.rmtree("tmp/", ignore_errors=True) resp.raise_for_status() domain_manager.update( document_id=domain_id, data={ "s3_url": f"https://{WEBSITE_BUCKET}.s3.amazonaws.com/{domain_name}/", "category": category, "is_available": True, "is_generating_template": False, }, ) return jsonify({ "message": f"{domain_name} static site has been created from the {category} template." }) except Exception as e: logger.exception(e) domain_manager.update( document_id=domain_id, data={ "is_available": True, "is_generating_template": False, }, ) return jsonify({"error": "Error generating from template."}), 400
def parse_links(self): link_list = [] try: return map(lambda link: { self.SOURCE: link[self.SOURCE][self.SOURCE_NODE], self.TARGET: link[self.DESTINATION][self.DEST_NODE], self.METRIC: link[self.LINK_ATTRIBUTES][self.METRIC] }, self.topology[self.LINK]) except Exception as ex: logger.exception("ACL parseLinks error: %s" % ex) return link_list
def _remote_login(self): try: pkey = paramiko.RSAKey.from_private_key_file(privateKeyPath) ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(hostname=self.remote_host, port=22, username=self.username, pkey=pkey) except Exception, e: logger.exception("登录%s出错!" % (self.remote_host, )) return None
def get_user_groups(): """Get the groups a user belongs to.""" try: user = user_manager.get(filter_data={"Username": g.username}) if "Groups" in user: return user["Groups"] else: return [] except Exception as e: logger.exception(e) return []
def parse_links(self): link_list = [] try: return map( lambda link: { self.SOURCE: link[self.SOURCE][self.SOURCE_NODE], self.TARGET: link[self.DESTINATION][self.DEST_NODE], self.METRIC: link[self.LINK_ATTRIBUTES][self.METRIC] }, self.topology[self.LINK]) except Exception as ex: logger.exception("ACL parseLinks error: %s" % ex) return link_list
def get_ipns_fails(self): try: query = ("SELECT DISTINCT(address), max_confirms, url " "FROM ipn " "WHERE status='fail' ") cursor = self.conn.cursor() cursor.execute(query) data = cursor.fetchall() cursor.close() return data except Exception as ex: logger.exception(ex)
def add_unique(self, info): try: cursor = self.conn.cursor() add_output = ("INSERT IGNORE INTO {} " "(info) " "VALUES (%s)".format(self.table_name)) data = (info, ) cursor.execute(add_output, data) self.conn.commit() cursor.close() except Exception as ex: logger.exception(ex)
def add_output(self, block, address, value, hash): try: cursor = self.conn.cursor() add_output = ("INSERT INTO address_outputs " "(block, address, value, block_hash) " "VALUES (%s, %s, %s, %s)") data = (block, address, value, hash) cursor.execute(add_output, data) self.conn.commit() cursor.close() except Exception as ex: logger.exception(ex)
def get_ipn_status(self, address): try: query = ("SELECT status FROM ipn " "WHERE address=%s ") cursor = self.conn.cursor() cursor.execute(query, (address, )) row = cursor.fetchone() result = 0 if row and row[0]: result = row[0] cursor.close() return result except Exception as ex: logger.exception(ex)
def draw_disk_figure(self): ''' 因为disk的rbytes、rcount、rtime单位不同。 需要单独画图 3张子图:read_counts\write_counts、rbytes\wbytes、rtime\wtime ''' params = self._get_disk_data(DATA_PATH['disk']) if params[0] is None: return try: self._draw_disk_figure(*params) except Exception, e: logger.exception("绘制disk图形失败!")
def get_users_group_ids(): """Get applications a user belongs to.""" try: groups = get_user_groups() result = [] for group in groups: result.append(group["Application_Id"]) return result except Exception as e: logger.exception(e) return []
def job_runner(job_id, input_notebook, output_notebook, output_dir, parameters, **papermill_args): """ Task to execute notebooks. Parameters ---------- job_id: str, uuid4 the job id input_notebook: str location of input notebook to run output_notebook: str location to put output_notebook parameters: dict notebook parameters papermill_args: **kwargs extra parameters to pass too papermill execution """ log_context = dict( parameters=parameters, input_notebook=input_notebook, output_notebook=output_notebook, output_dir=output_dir, papermill_args=papermill_args ) job_status = jobs.JobStatus.RUNNING # Execute Notebook try: logger.info('notebooks.executing.started', extra=log_context) papermill.execute_notebook( input_notebook, output_notebook, parameters=parameters, **papermill_args ) job_status = jobs.JobStatus.SUCCESS log_context.update(dict(job_status=job_status)) logger.info('notebooks.executing.finished', extra=log_context) except PapermillExecutionError as e: job_status = jobs.JobStatus.FAILED log_context.update(dict(job_status=job_status)) logger.exception('notebooks.executing.error', extra=log_context) raise e return { "job_status": job_status, "output_notebook": output_notebook }
def delete_acl(self, payload): try: self.get_acl_parser(operation=AclOps.DELETE).upload_payload(payload) try: self.get_acl_parser().delete_acl() except Exception as e: error = e.response.body if hasattr(e, 'response') else e.message logger.exception(error) return False, error return True, 'ok' except Exception as e: logger.exception(e.message) return False, e.message
def delete_acl(self, payload_string): try: payload = json.loads(payload_string) self.get_acl_parser(operation=AclOps.DELETE).upload_payload(payload) try: self.get_acl_parser().delete_acl() except Exception as e: logger.exception(e.message) return False, json.loads(e.response.body) if hasattr(e, 'response') else e.message return True, 'ok' except Exception as e: logger.exception(e.message) return False, e.message
def apply_acls(self, payload_string): try: payload = json.loads(payload_string) self.get_acl_parser(operation=AclOps.APPLY).upload_payload(payload) try: self.get_acl_parser().apply_acls() except Exception as e: error = e.response.body if hasattr(e, 'response') else e.message logger.exception(error) return False, error return True, 'ok' except Exception as e: logger.exception(e.message) return False, e.message
def parse_controller_topology(self, controller_topology): try: self.set_topology_parser(controller_topology) except ValidationError as e: logger.exception("Topology validation error: " + e.message) return False, controller_topology try: self.upload_topology(controller_topology) self.id = self.get_topology_id() self.nodes = self.parse_nodes() self.links = remove_dup_links(self.parse_links()) except Exception as ex: logger.exception("ACL parse topology error: %s" % ex.message) return False, controller_topology return True, json.dumps(self, cls=self.get_topology_encoder())
def fetch_acl(node, default=None): try: return fetch_and_persist(acl_url.format(node), os.path.join(acl_path, node + "_acl.json"), default=default) except HTTPError as e: logger.exception(e.message) return default
def fetch_interfaces(node, default=None): try: return fetch_and_persist(interfaces_url.format(node), os.path.join(interfaces_path, node + "_interfaces.json")) except HTTPError as e: logger.exception(e.message) return default