def main(): docs = get_strings( gather_data() ) #Docs is a list of all sentences (strings) count_dict = ExternalDict("NYT/count.dict") #count_dict special key: "totNumDocs":total number of docs accounted for word_dict = ExternalDict("NYT/word.dict") word_dict = add_new_words(docs, word_dict) count_dict = update_count_dict(docs, count_dict, word_dict) word_dict.save() count_dict.save()
def multi_request(self, providers): ret = {} for provider in providers: if '/' in provider: continue try: ret[provider] = gather_data( self._get_provider_dir(provider), env) except: pass return compress(str.encode(json.dumps(ret)))[2:-4]
def multi_request(self, providers): ret = {} for provider in providers: if '/' in provider: continue try: ret[provider] = gather_data( self._get_provider_dir(provider), env ) except: pass return compress(str.encode(json.dumps(ret)))[2:-4]
def handle(self): data = self.request[0].decode('UTF-8').strip() socket = self.request[1] response = None if data.startswith("GET "): response = self.multi_request(data.split(" ")[1:]) elif '/' not in data: answer = gather_data(self._get_provider_dir(data), env) if answer: response = str.encode(json.dumps(answer)) if response: socket.sendto(response, self.client_address)
def handle(self): data = self.request[0].decode('UTF-8').strip() socket = self.request[1] response = None if data.startswith("GET "): response = self.multi_request(data.split(" ")[1:]) elif '/' not in data: answer = gather_data( self._get_provider_dir(data), env ) if answer: response = str.encode(json.dumps(answer)) if response: socket.sendto(response, self.client_address)
def main(): gather_data() with open('data.json', 'r') as jsonfile: aws = json.load(jsonfile) print "digraph G {" #print "overlap = false;" #print "splines = true;" print 'node [shape="plaintext"]' print """ splines=true; sep="+25,25"; overlap=scalexy; nodesep=0.6; node [fontsize=11]; """ already_printed = set([]) sgs = [] for sn_id, sn in aws['subnets'].items(): print "//", sn_id sn_cluster = "cluster_%s" % sn_id print 'subgraph "%s" {' % sn_cluster print 'label = "%s"' % sn_id for instance_id, instance in sn['instances'].items(): if instance_id in already_printed: continue print '"%s" [label="%s"]' % (instance_id, instance['tag_Name']) already_printed.add(instance_id) for elb_id, elb in sn['elb'].items(): if elb_id in already_printed: continue print '"%s"' % elb_id already_printed.add(elb_id) for rds_id, rds in sn['rds'].items(): if rds_id in already_printed: continue print '"%s"' % rds_id already_printed.add(rds_id) print "}" for acl_id, acl in sn['nacl'].items(): print '"%s" [label=<%s>];' % (acl_id, acl_rules(acl)) link(sn_cluster, acl_id) for instance_id, instance in sn['instances'].items(): for sg_id, sg in instance['SecurityGroups'].items(): link(instance_id, sg_id) sgs.append(sg) for elb_id, elb in sn['elb'].items(): for sg_id in elb['SecurityGroups'].keys(): link(elb_id, sg_id) for instance_id, instance in elb['Instances'].items(): link(elb_id, instance_id) for rds_id, rds in sn['rds'].items(): for sg_id in rds['SecurityGroups'].keys(): link(rds_id, sg_id) for sg in sgs: sg_id = sg['GroupId'] print '"%s" [label=<%s>];' % (sg_id, sg_rules(sg)) print "}"
#!/usr/bin/env python3 import json import argparse from gather import gather_data parser = argparse.ArgumentParser() parser.add_argument('-d', '--directory', action='store', help='structure directory', required=True) parser.add_argument('-b', '--batman', action='store', help='batman-adv device', default='bat0') args = parser.parse_args() print(json.dumps(gather_data( args.directory, {'batadv_dev': args.batman} )))
def main(): gather_data() with open('data.json', 'r') as jsonfile: aws = json.load(jsonfile) test_layout(aws)