def read_recursively(self, parent, data, name): # Based on the type of data, we determine how to proceed with parsing. if isinstance(data, dict): new_node = Node(str(uuid.uuid4()), name, "JSON Object") parent.children.append(new_node) # If this is a json object, traverse it's fields. for key in data.keys(): self.read_recursively(new_node, data[key], key) elif isinstance(data, list): new_node = Node(str(uuid.uuid4()), name, "JSON Array") parent.children.append(new_node) # If this is a json array, traverse all the elements. for child in data: self.read_recursively(new_node, child, "Array element") # If this is a simple type, we just copy it's value into the attribute field. # For demo purposes, string and number cases are implemented. elif isinstance(data, str): new_node = Node(str(uuid.uuid4()), name, "JSON String") new_node.attributes["value"] = data parent.children.append(new_node) elif isinstance(data, (int, float)): new_node = Node(str(uuid.uuid4()), name, "JSON Number") new_node.attributes["value"] = data parent.children.append(new_node)
def create_graph(self, file_path): num = 0 nodes = {} if file_path == "" or not os.path.isdir(file_path): return [None] for path, dirs, files in os.walk(file_path): if num > NODE_LIMIT: break num += 1 if path in nodes: node = nodes[path] else: dir_name = os.path.basename(path) node = Node(path, "Directory", dir_name) self.set_dir_attributes(path, node) for file_name in files: num += 1 full_path = os.path.join(path, file_name) new_node = Node(full_path, "File", file_name) node.neighbours.append(new_node) self.set_file_attributes(full_path, new_node) for dir_name in dirs: full_path = os.path.join(path, dir_name) new_node = Node(full_path, "Directory", dir_name) node.neighbours.append(new_node) nodes[full_path] = new_node if path == file_path: self.root = node return [self.root]
def start(self, node: models.Node): b = Benchmarks( ram_size=1024 * 1024 * 500, # 500MB storage_size=1024 * 1024 * 1024 * 10, # 10GB cpu_cores=1, cpu_sysbench_multi=500, cpu_sysbench_single=500, net_download=1024 * 1024 * settings.NODE_DOWNLOAD, net_upload=1024 * 1024 * settings.NODE_UPLOAD, ) n = NetworkParams(incoming=True, outbound=True, overlay=True) bid = BidParams( duration=0, price='0.01 USD/h', counterparty=settings.COUNTERPARTY, identity=sonm.consts.IDENTITY_ANONYMOUS, tag='sonm-cdn-node', benchmarks=b, network=n, ) bid_id = self.sonm.order.create(bid=bid)['id'] node.external_id = bid_id node.throughput = settings.NODE_UPLOAD node.save() models.SonmBid.objects.create(node=node)
def handle_data(self, data): if self.current is not None: data = data.replace("\\n", "") data = data.strip() if not data.isspace() and data != "": text_node = Node(str(uuid.uuid4()), "text", "text") text_node.attributes["content"] = data self.current.neighbours.append(text_node)
def destroy(self, node: Node): node.stopped = now() node.save() for droplet in self.get_droplets(): if str(droplet.id) == node.external_id: droplet.destroy() return raise Exception('Not found node with external ID "%s"' % node.external_id)
def read(self, file_path): soup = "" if ".html" not in file_path: raise TypeError("Must be .html file") return try: with open(file_path, encoding='utf8') as fp: soup = BeautifulSoup(fp, "html.parser") except FileNotFoundError: raise FileNotFoundError("HTML file " + file_path + " not found!") return if soup.html is None: raise TypeError("Invalid html document.") descendants = soup.html.descendants descendants_list = list(descendants) my_list = [x for x in descendants_list if x != '\n'] model_list = [] html_node = Node(str(uuid.uuid4()), 'html', 'tag') model_list.append(html_node) for child in my_list: if str(child).strip() == "": # skip whitespace continue node_name = child.name if child.name is not None else child node_type = 'tag' if child.name is not None else 'text' node = Node(str(uuid.uuid4()), node_name, node_type) if isinstance(child, Tag): node.attributes = child.attrs else: node.name = str(child).strip() if node.name != ' ' and node.name is not None and node.name != "\n" and node.name != '\t': model_list.append(node) for i, v in zip(range(len(model_list) - 1, -1, -1), reversed(model_list)): if i == model_list.__len__() - 1: continue if v.name == child.parent.name: model_list[i].children.append(node) break return [model_list[0]]
def _register_node(): """Create a simple node to register content to""" """This creates an empty node that a piece of content can be attached to. All rows should have a corresponding node created first to be attached to. """ if current_user.is_authenticated: node = Node(_version=1, _timestamp=datetime.utcnow(), user_id=current_user._id) else: node = Node(_version=1, _timestamp=datetime.utcnow(), user_id=0) db.session.add(node) db.session.commit() return node
def create(self): xos_args = self.get_xos_args() if not xos_args.get("site_deployment", None): raise Exception("Deployment is a required field of Node") node = Node(**xos_args) node.caller = self.user node.save() self.postprocess(node) self.info("Created Node '%s' on Site '%s' Deployment '%s'" % (str(node), str(node.site_deployment.site), str(node.site_deployment.deployment)))
def handle_starttag(self, tag, attrs): if self.root is None: self.root = Node(str(uuid.uuid4()), "tag", tag) self.current = self.root for attr in attrs: self.current.attributes[attr[0]] = attr[1] self.stack.append(self.current) else: new_node = Node(str(uuid.uuid4()), "tag", tag) for attr in attrs: new_node.attributes[attr[0]] = attr[1] self.current.neighbours.append(new_node) if tag not in self.void_elements: self.current = new_node self.stack.append(self.current)
def create(self): xos_args = self.get_xos_args() if not xos_args.get("site", None): raise Exception("Site is a required field of Node") if not xos_args.get("site_deployment", None): raise Exception("Deployment is a required field of Node") node = Node(**xos_args) node.caller = self.user node.save() self.postprocess(node) self.info("Created Node '%s' on Site '%s' Deployment '%s'" % (str(node), str(node.site), str(node.site_deployment.deployment)))
def read(self, file_path): # Based on the file path type, try to read from a remote or local source. try: if "https://" in file_path or "http://" in file_path or ".com" in file_path: response = urllib.request.urlopen(file_path) str_response = response.read().decode('utf-8') data = json.loads(str_response) else: with open(file_path, encoding="utf8") as data_file: data = json.load(data_file) except Exception: traceback.print_exc() raise FileNotFoundError("JSON file " + file_path + " not found!") return [None] # Create root node. root_node = Node(str(uuid.uuid4()), "JSON Object", "Root") if isinstance(data, dict): # Traverse root node's children, passing the root as a top parent. for key in data.keys(): self.read_recursively(root_node, data[key], key) else: # Consider the possibility of a empty root. self.read_recursively(root_node, data, "Root") return [root_node]
def check_node(self, node: models.Node): node_address = node.get_address() try: response = requests.get(f'http://{node_address}/bytes_sent') if response.status_code < 300: node.prev_sent_bytes = node.last_sent_bytes node.prev_sent_bytes_dt = node.last_sent_bytes_dt node.last_sent_bytes = int(response.content) node.last_sent_bytes_dt = now() node.save() if self.verbosity: print(f'Node {node.name}: {node.get_load()} Mb/sec') else: print(f'Node check load error! ' f'NAME: {node.id} ' f'ADDRESS: {node_address}.' f'CODE: {response.status_code}' f'RESPONSE: {response.content}\n') except Exception as e: print(f'Node check load error! ' f'NAME: {node.id} ' f'ADDRESS: {node_address}.' f'EXCEPTION: \n{e}.\n')
def start(self, node: Node): size_slug = 's-1vcpu-1gb' ssh_keys = self.get_ssh_keys() do_region = random.choice(REGIONS_MAP[node.region]) droplet = digitalocean.Droplet(token=settings.DO_TOKEN, name=node.name, region=do_region, image=self.get_image(), size_slug=size_slug, ssh_keys=ssh_keys, tags=[TAG_NAME], backups=False) droplet.create() node.external_id = droplet.id node.throughput = 100 # let's consider that maximum throughput 100 Mb/sec node.save()
def __iter__(self): choices_to_return = [] for model in Node.all_child_classes(): for node_object in model.objects.all(): choices_to_return.append(node_object.select_tuple) return iter(sorted( choices_to_return, key=lambda node: node[1], ))
def init_regions(self) -> List[Node]: """Network initialization""" nodes = [] for region in Node.REGIONS: node = Node(name='%s1' % region, region=region) self.start(node) nodes.append(node) return nodes
def create_graph_recursively(self, parent, data, name): if isinstance(data, dict): new_node = Node(str(uuid.uuid4()), "JSON Object", name) parent.neighbours.append(new_node) for key in data.keys(): self.create_graph_recursively(new_node, data[key], key) elif isinstance(data, list): new_node = Node(str(uuid.uuid4()), "JSON Array", name) parent.neighbours.append(new_node) for item in data: self.create_graph_recursively(new_node, item, "Array element") elif isinstance(data, str): new_node = Node(str(uuid.uuid4()), "JSON String", name) new_node.attributes["value"] = data parent.neighbours.append(new_node) elif isinstance(data, (int, float)): new_node = Node(str(uuid.uuid4()), "JSON Number", name) new_node.attributes["value"] = data parent.neighbours.append(new_node)
def __xos_save_base(self, *args, **kwds): """ Hack to allow the creation of NodeLabel objects from outside core until the ORM is extended with support for ManyToMany relations. """ if self.name and '###' in self.name: from core.models import Node self.name, node_id_str = self.name.split('###') node_ids = map(int, node_id_str.split(',')) for node_id in node_ids: node = Node.get(node_id) self.node.add(node)
def save(self, *args, **kwargs): """ Hack to allow the creation of NodeLabel objects from outside core until the ORM is extended with support for ManyToMany relations. """ if self.name and "###" in self.name: from core.models import Node self.name, node_id_str = self.name.split("###") node_ids = map(int, node_id_str.split(",")) for node_id in node_ids: node = Node.get(node_id) self.node.add(node) super(NodeLabel, self).save(*args, **kwargs)
def create_graph(self, file_path): try: if "http://" in file_path or "https://" in file_path or ".com" in file_path: response = urllib.request.urlopen(file_path) str_response = response.read().decode('utf-8') data = json.loads(str_response) else: with open(file_path, encoding="utf8") as data_file: data = json.load(data_file) except Exception: traceback.print_exc() return [None] root_node = Node(str(uuid.uuid4()), "JSON Object", "Root") if isinstance(data, dict): for key in data.keys(): self.create_graph_recursively(root_node, data[key], key) else: self.create_graph_recursively(root_node, data, "Unnamed") return [root_node]
def node_classifier(self, request): certname = request.query_params.get("certname", "") master_id = request.query_params.get("master_id", "") error_message = "" try: node = Node.objects.get(certname=certname, master_zone__id=master_id) except Node.DoesNotExist: # Instantiate a dummy node to produce an empty answer node = Node() except MasterZone.DoesNotExist: error_message = 'Master with id "%s" not found' % master_id except ValidationError: error_message = "Invalid certname or master_id parameters" if error_message: return JsonResponse( {"error": error_message}, status=status.HTTP_400_BAD_REQUEST ) serializer = self.get_serializer(node) response = Response(serializer.data, content_type="text/yaml") cd = 'attachment; filename="%s_classifier.yml"' % node.certname response["Content-Disposition"] = cd return response
def handle_startendtag(self, tag, attrs): new_node = Node(str(uuid.uuid4()), "tag", tag) for attr in attrs: new_node.attributes[attr[0]] = attr[1] self.current.neighbours.append(new_node)
def __init__(self): self.links = [] self.found = [] self.row_chars = 30 self.graph = Node()
def get_load(self, instance: models.Node): return instance.get_load()