Esempio n. 1
0
def explain(filename=None, plan=""):
    jsonParser = JsonParser(filename)
    try:
        root = jsonParser.get_tree(plan)
    except Exception as err:
        print(err)
        return "The query plan you entered is not valid!"
    return get_explain_string(root)
Esempio n. 2
0
def part_2():
    print("Verifying {} exists in {}".format(TEST_VALUE, TEST_FILE))
    with open("einat_world_bank.json", "rb") as json_file:
        for line in json_file.readlines():
            json = JsonParser(line)
            if json.has_value(TEST_VALUE):
                return
        raise Exception("Can't find '{}' in JSON".format(TEST_VALUE))
Esempio n. 3
0
    def __init__(self, uname, passwd, cpi_ipv4_addr, log):

        self.username = uname
        self.password = passwd
        self.cpi_ipv4_address = cpi_ipv4_addr
        self.logger = log
        self.parse_json = JsonParser()

        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
Esempio n. 4
0
    def test_parse(m):
        c1 = json.dumps(m)
        d0 = json.loads(c1)

        d1 = JsonParser.loads(c1)

        c2 = json.dumps(m, indent=4)
        d2 = JsonParser.loads(c2)

        assert d0 == d1
        assert d0 == d2
Esempio n. 5
0
    def apply_opt(self):
        # dataset
        if self._opt.dataset == "MNIST":
            train_data, test_data = utils.get_mnist()
            self._train_set = torch.utils.data.DataLoader(
                train_data,
                batch_size=self._opt.batch_size,
                shuffle=True,
                num_workers=self._opt.num_workers)
            self._test_set = torch.utils.data.DataLoader(
                test_data,
                batch_size=self._opt.batch_size,
                shuffle=True,
                num_workers=self._opt.num_workers)
            self._initialize_model(dims=self._opt.layer_dims)
            print("MNIST experiment")

        elif self._opt.dataset == "IBNet":
            train_data = utils.CustomDataset('2017_12_21_16_51_3_275766',
                                             train=True)
            test_data = utils.CustomDataset('2017_12_21_16_51_3_275766',
                                            train=False)
            self._train_set = torch.utils.data.DataLoader(
                train_data,
                batch_size=self._opt.batch_size,
                shuffle=True,
                num_workers=self._opt.num_workers)
            self._test_set = torch.utils.data.DataLoader(
                test_data,
                batch_size=self._opt.batch_size,
                shuffle=True,
                num_workers=self._opt.num_workers)
            self._initialize_model(dims=self._opt.layer_dims)
            print("IBnet experiment")
        else:
            raise RuntimeError(
                'Do not have {name} dataset, Please be sure to use the existing dataset'
                .format(name=self._opt.dataset))

        # construct saving directory
        save_root_dir = self._opt.save_root_dir
        dataset = self._opt.dataset
        time = datetime.datetime.today().strftime('%m_%d_%H_%M')
        model = ''.join(
            list(map(lambda x: str(x) + '_', self._model.layer_dims)))
        folder_name = dataset + '_' + self._opt.experiment_name + '_Time_' + time + '_Model_' + model
        self._path_to_dir = save_root_dir + '/' + folder_name + '/'
        print(self._path_to_dir)
        if not os.path.exists(self._path_to_dir):
            os.makedirs(self._path_to_dir)

        self._logger = Logger(opt=self._opt, plot_name=folder_name)
        self._json = JsonParser()
Esempio n. 6
0
    def __init__(self, config, log):

        self.username = config.username
        self.password = config.password
        self.cpi_ipv4_address = config.cpi_ipv4_address
        # self.username = uname
        # self.password = passwd
        # self.cpi_ipv4_address = cpi_ipv4_addr
        self.logger = log
        self.parse_json = JsonParser()
        self.parse_var = VarParser()

        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
Esempio n. 7
0
class TestApi:
    def __init__(self):

        self.parse_json = JsonParser()

    def test_method(self, values_dict, cpi_username, cpi_password,
                    cpi_ipv4_address, logger):

        api_call = Client(cpi_username, cpi_password, cpi_ipv4_address, logger)
        sw_api_call = Switch(cpi_username, cpi_password, cpi_ipv4_address,
                             logger)

        dev_id = sw_api_call.id_by_ip("172.30.28.246")
        result = sw_api_call.json_detailed(dev_id)

        key_list = [
            'queryResponse', 'entity', 0, 'inventoryDetailsDTO',
            'ethernetInterfaces', 'ethernetInterface'
        ]
        interface_list_of_dicts = self.parse_json.value(
            result, key_list, logger)

        for interface_dict in interface_list_of_dicts:
            for key in interface_dict:  # iterating over dict's return keys only
                if interface_dict[key] == 'GigabitEthernet1/0/1':
                    print(json.dumps(interface_dict, indent=4))
Esempio n. 8
0
    def __init__(self):

        self.parse_json = JsonParser()
        self.sw_list = []
        self.filename = str(sys.argv[2]) + str(
            datetime.now().strftime("%d-%m-%Y_%H%M%S"))
        self.cdp_neighbors = []
Esempio n. 9
0
def postJsonHandler():
    db.create_all()
    content = request.get_json()
    with open('temp.json', 'w') as f:
        json.dump(content, f)
    json_obj = JsonParser("temp.json")
    username = content['username']
    email = content['email']
    
    new_user = User(username, email)

    db.session.add(new_user)
    db.session.commit()
    json_obj.validate_json_data_type(content)
    json_data = json_obj.parse_json_data()

    return json_data
Esempio n. 10
0
    def __init__(self):
        self.progress_bar = 0
        self._device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") # device setup
        load_config = JsonParser() # training args
        self.model_name = 'IBNet_test_save_Time_05_27_20_09_Model_12_12_10_7_5_4_3_2_2_'
        self.path =os.path.join('./results', self.model_name)# info plane dir
        self._opt = load_config.read_json_as_argparse(self.path) # load training args

        # force the batch size to 1 for calculation convinience
        self._opt.batch_size = 1
        # dataset
        if self._opt.dataset == "MNIST":
            train_data, test_data = utils.get_mnist()

            if not self._opt.full_mi:
                # self._train_set = torch.utils.data.DataLoader(train_data, batch_size=1, shuffle=False, num_workers=0)
                self._test_set = torch.utils.data.DataLoader(test_data, batch_size=1, shuffle=False, num_workers=0)
            else:
                dataset = torch.utils.data.ConcatDataset([train_data, test_data])
                self._test_set = torch.utils.data.DataLoader(dataset, batch_size=1, shuffle=False, num_workers=0)
            print("MNIST experiment")

        elif self._opt.dataset == "IBNet":
            train_data = utils.CustomDataset('2017_12_21_16_51_3_275766', train=True)
            test_data = utils.CustomDataset('2017_12_21_16_51_3_275766', train=False)
            # self._train_set = torch.utils.data.DataLoader(train_data, batch_size=1, shuffle=False, num_workers=0)
            if not self._opt.full_mi:
                self._test_set = torch.utils.data.DataLoader(test_data, batch_size=1, shuffle=False, num_workers=0)
            else:
                dataset = torch.utils.data.ConcatDataset([train_data, test_data])
                self._test_set = torch.utils.data.DataLoader(dataset, batch_size=1, shuffle=False, num_workers=0)
            print("IBnet experiment")
        else:
            raise RuntimeError('Do not have {name} dataset, Please be sure to use the existing dataset'.format(name = self._opt.dataset))

        # get model
        self._model = Model(activation = self._opt.activation ,dims = self._opt.layer_dims, train = False)
        
        # get measure
        # self.measure = measure.kde()
        self.measure = measure.EVKL() # our new measure
Esempio n. 11
0
 def test_dumps(self):
     print('{line}TESTING dumps{line}'.format(line="-" * 50))
     for k, v in inputo.items():
         try:
             val = JsonParser.dumps(v)
             if k in specific_dumps_values.keys():
                 # print(val.__repr__())
                 self.assertEqual(specific_dumps_values[k], val)
             else:
                 self.assertEqual(inputs[k], val)
             print(
                 f'SUCCESS Tested value - {v} RESULT VALUE - {val.__repr__()}'
             )
         except SyntaxError as e:
             print(f'FAILED with value - {v}', f'WITH ERROR - {e.msg}')
Esempio n. 12
0
 def test_loads(self):
     print('{line}TESTING loads{line}'.format(line="-" * 50))
     argv.append('-s')
     for k, v in inputs.items():
         if len(argv) == 2:
             argv.append(v)
         else:
             argv[2] = v
         try:
             val = JsonParser.loads(v)
             if k in specific_loads_values.keys():
                 self.assertEqual(specific_loads_values[k], val)
             else:
                 self.assertEqual(inputo[k], val)
             print(f'SUCCESS Tested value - {v}')
         except SyntaxError as e:
             print(f'FAILED with value - {v}', f'WITH ERROR - {e.msg}')
Esempio n. 13
0
    def post():
        content = request.get_json()
        with open('temp.json', 'w') as f:
            json.dump(content, f)
        json_obj = JsonParser("temp.json")

        json_obj.validate_json_data_type(content)
        json_obj.parse_json_data()

        ml_uuid = uuidutils.generate_uuid()
        url = content['DataURL']
        ml_lib = content['variables']['mlLib']
        is_form_cluster = content['variables']['isFormCluster']
        storage_name = content['variables']['storageAccountName']
        storage_type = content['variables']['storageAccountType']

        api.mldata_create(ml_uuid, url, ml_lib, is_form_cluster, storage_name,
                          storage_type)

        resources = content['resource']
        for res in resources:
            res_uuid = uuidutils.generate_uuid()
            resource_type = res.get('InfrastrctureType')
            provider = res.get('provider')
            if res.get('APIEndpoint'):
                endpoint = res.get('APIEndpoint')
            if res.get('PublicEndpoint'):
                endpoint = res.get('PublicEndpoint')
            username = res.get('username')
            password = res.get('password')
            token = res.get('token')
            availability_zone = res.get('availabilityZone')
            region = res.get('Region')

            api.resource_create(res_uuid, resource_type, provider, endpoint,
                                username, password, token, availability_zone,
                                region)

            if res['NodeList']:
                for node in res['NodeList']:
                    uuid = uuidutils.generate_uuid()
                    resource_id = res_uuid
                    node_ip = node.get('NodeIP')
                    username = node.get('Username')
                    password = node.get('Password')
                    api.node_create(uuid, resource_id, node_ip, username,
                                    password)

        return json.dumps(201, {'ContentType': 'application/json'})
Esempio n. 14
0
 def __init__(self):
     self.find = Find()
     self.parse_json = JsonParser()
Esempio n. 15
0
class Tools:

    def __init__(self):
        self.find = Find()
        self.parse_json = JsonParser()

    def checkAlarms(self, args, config, logger):
        # email_string = ""
        num_successful=0
        num_failed=0
        alarm_api_call = Alarms(config, logger)
        device_api_call = AccessPoint(config, logger)
        crit_list = alarm_api_call.get_critical_alarm_ids()

        for alarm_id in crit_list:
            time.sleep(0.3)
            mac = alarm_api_call.devname_by_alarm_id(alarm_id)
            dev_id=device_api_call.id_by_alarm_mac_detailed(mac)
            dev_result = device_api_call.json_detailed(dev_id)

            #logger.info("------- Matching Switch #{}--------".format(dev_id_list.index(curr_id) + 1))
            dev_dict = {}
            key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'name']
            dev_dict['name'] = self.parse_json.value(dev_result, key_list, logger)
            key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'model']
            dev_dict['model'] = self.parse_json.value(dev_result, key_list, logger)
            key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'reachabilityStatus']
            dev_dict['status'] = self.parse_json.value(dev_result, key_list, logger)
            key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'cdpNeighbors', 'cdpNeighbor', 0,
                        'neighborName']
            dev_dict['nb_name'] = self.parse_json.value(dev_result, key_list, logger)
            key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'cdpNeighbors', 'cdpNeighbor', 0,
                        'neighborPort']
            dev_dict['nb_port'] = self.parse_json.value(dev_result, key_list, logger)
            key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'cdpNeighbors', 'cdpNeighbor', 0,
                        'neighborIpAddress']
            dev_dict['nb_ip'] = self.parse_json.value(dev_result, key_list, logger)
            logger.info("Processing Alarm {} / {} ID: {} ".format(crit_list.index(alarm_id)+1,len(crit_list),alarm_id))
            logger.info("AP: {} Model:{} Status:{}".format(dev_dict['name'], dev_dict['model'], dev_dict['status']))
            logger.info("Neighbor:{}({}):{}".format(dev_dict['nb_name'], dev_dict['nb_ip'], dev_dict['nb_port']))
            time.sleep(1)  # don't test for sync status too soon (CPI delay and all that)
            if args.toggle:
                Return = self.ap_reload(args,dev_dict["nb_ip"],dev_dict["nb_port"])
                success_string = "Shut/No Shut on {}({}): {}".format(dev_dict['nb_name'], dev_dict['nb_ip'],
                                                                       dev_dict['nb_port'])

                if Return.returncode == 0:
                    success_string += " Successful"
                    num_successful += 1
                    alarm_api_call.acknowledge_by_alarm_id(alarm_id) #acknowledge alarm
                else:
                    success_string += " FAILED"
                    num_failed += 1
                logger.info(success_string)
                # email_string += success_string + "\n"
        #logger.debug(email_string)

        logger.info("Total {} Alarms".format(len(crit_list)))
        logger.info("{} ports successfully reloaded ".format(num_successful))
        logger.info("{} ports failed to reload".format(num_failed))

    def ap_reload (self,args, ip,port):
        time.sleep(1)  # don't test for sync status too soon (CPI delay and all that)

        arg_run_list = "dnmt tools ap_poke {} {} ".format(ip, port)
        if args.batch:
            arg_run_list += "-s"

        Return_val = subprocess.run(arg_run_list, shell=True)  ###<TODO> EXTERNAL CALL to DNMT

        return Return_val


    def un_ack_alarms(self, args, config, logger):

        alarm_api_call = Alarms(config, logger)
        crit_list = alarm_api_call.get_acked_critical_alarm_ids()

        for alarm_id in crit_list:
            alarm_api_call.unacknowledge_by_alarm_id(alarm_id) #acknowledge alarm


    def slow_aps(self, args, config, logger):

        api_call = AccessPoint(config, logger)
        device_api_call = AccessPoint(config, logger)
        dev_id_list = device_api_call.get_slow_ports()

        if len(dev_id_list) < 1:
            sys.exit(1)
        for curr_id in dev_id_list:
            result = api_call.json_detailed(curr_id)
            logger.info("------- Occurrence #{}--------\n".format(dev_id_list.index(curr_id) + 1))

            key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'cdpNeighbors', 'cdpNeighbor',0,'neighborName']
            neigh_name = self.parse_json.value(result, key_list, logger)
            key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'cdpNeighbors', 'cdpNeighbor',0,'neighborIpAddress']
            neigh_ip = self.parse_json.value(result, key_list, logger)
            key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'cdpNeighbors', 'cdpNeighbor',0,'neighborPort']
            interface = self.parse_json.value(result, key_list, logger)
            key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'cdpNeighbors', 'cdpNeighbor',0,'interfaceSpeed']
            speed = self.parse_json.value(result, key_list, logger)
            key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'name']
            name = self.parse_json.value(result, key_list, logger)
            key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'model']
            model = self.parse_json.value(result, key_list, logger)
            key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'locationHierarchy']
            map_location = self.parse_json.value(result, key_list, logger)
            #
            logger.info("ap name :{}".format(name))
            logger.info("ap model :{}".format(model))
            logger.info("switch name :{}".format(neigh_name))
            logger.info("switch ip   :{}".format(neigh_ip))
            logger.info("interface   :{}".format(interface))
            logger.info("speed :{}".format(speed))
            logger.info("map location :{}".format(map_location))

            if args.toggle:
                Return = self.ap_reload(args, neigh_ip, interface)
                success_string = "Shut/No Shut on {}({}): {}".format(neigh_name, neigh_ip,interface)

                if Return.returncode == 0:
                    success_string += " Successful"
                else:
                    success_string += " FAILED"
                logger.info(success_string)

                #<TODO move this and previous into a function to reuse, add a sync before>
                # time.sleep(60)  # Give the AP some time to start up
                # result = api_call.json_detailed(curr_id)
                # logger.info("------- Occurrence #{} POST RELOAD--------\n".format(dev_id_list.index(curr_id) + 1))
                #
                # key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'cdpNeighbors', 'cdpNeighbor', 0,
                #             'neighborName']
                # neigh_name = self.parse_json.value(result, key_list, logger)
                # key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'cdpNeighbors', 'cdpNeighbor', 0,
                #             'neighborIpAddress']
                # neigh_ip = self.parse_json.value(result, key_list, logger)
                # key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'cdpNeighbors', 'cdpNeighbor', 0,
                #             'neighborPort']
                # interface = self.parse_json.value(result, key_list, logger)
                # key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'cdpNeighbors', 'cdpNeighbor', 0,
                #             'interfaceSpeed']
                # speed = self.parse_json.value(result, key_list, logger)
                # key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'name']
                # name = self.parse_json.value(result, key_list, logger)
                # key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'model']
                # model = self.parse_json.value(result, key_list, logger)
                # key_list = ['queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'locationHierarchy']
                # map_location = self.parse_json.value(result, key_list, logger)
                # #
                # logger.info("ap name :{}".format(name))
                # logger.info("ap model :{}".format(model))
                # logger.info("switch name :{}".format(neigh_name))
                # logger.info("switch ip   :{}".format(neigh_ip))
                # logger.info("interface   :{}".format(interface))
                # logger.info("speed :{}".format(speed))
                # logger.info("map location :{}".format(map_location))
                # End reload
            else:
                time.sleep(1)  # sleep at the end of each to prevent overruns when running without toggle
Esempio n. 16
0
from json_parser import JsonParser
from video_processor import VideoProcessor
from feedback import FeedbackSystem

j = JsonParser()
video = j.parse("flare3", 200, "json/learn", "front", [0,0])
vp = VideoProcessor(video)
angles = vp.compute_left_elbow_angle(0.4)
fs = FeedbackSystem()
out = fs.min_max(angles)
print(out)

Esempio n. 17
0
class Connector:
    def __init__(self, uname, passwd, cpi_ipv4_addr, log):

        self.username = uname
        self.password = passwd
        self.cpi_ipv4_address = cpi_ipv4_addr
        self.logger = log
        self.parse_json = JsonParser()

        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

    # --- Prime job execution and handling

    def job_complete(self, job_id):

        url = "https://{}/webacs/api/v3/op/jobService/runhistory.json?jobName=\"{}\"".format(
            self.cpi_ipv4_address, job_id)
        result = self.error_handling(requests.get, 5, url, False,
                                     self.username, self.password)
        key_list = [
            'mgmtResponse', 'job', 0, 'runInstances', 'runInstance', 0,
            'runStatus'
        ]
        status = self.parse_json.value(result.json(), key_list, self.logger)

        if status == "COMPLETED":  # job complete
            return True
        elif status == "RUNNING":  # job running
            return False
        else:
            # ADD CRITICAL LOGGING HERE
            print('critical, job not run correctly')
            sys.exit(1)

    def job_successful(self, job_id):

        url = "https://{}/webacs/api/v3/op/jobService/runhistory.json?jobName=\"{}\"".format(
            self.cpi_ipv4_address, job_id)
        result = self.error_handling(requests.get, 5, url, False,
                                     self.username, self.password)
        key_list = [
            'mgmtResponse', 'job', 0, 'runInstances', 'runInstance', 0,
            'resultStatus'
        ]
        status = self.parse_json.value(result.json(), key_list, self.logger)
        if status == "SUCCESS":
            return True
        elif status == "FAILURE":
            return False

    # --- end Prime job handling

    # a decorator-like method for error handling
    def error_handling(self, api_call_method, base_case, *args):

        try:
            if api_call_method == requests.get:  # a GET api call
                req = api_call_method(args[0],
                                      verify=args[1],
                                      auth=(args[2], args[3]))
            elif api_call_method == requests.post:  # a POST api call
                req = api_call_method(args[0],
                                      verify=args[1],
                                      auth=(args[2], args[3]),
                                      json=args[4])
            elif api_call_method == requests.put:  # a PUT api call
                req = api_call_method(args[0],
                                      verify=args[1],
                                      auth=(args[2], args[3]),
                                      json=args[4])
            else:
                pass
            req.raise_for_status()
        except requests.exceptions.HTTPError as error:
            if req.status_code == 503 or req.status_code == 403:
                # too many requests at once
                self.logger.warning(
                    "Too many API calls at once. Slight delay before retrying. Status code: {}"
                    .format(req.status_code))
                time.sleep(random.uniform(1.0, 5.0))

                base_case -= 1  # decrement base_case by one
                if base_case == 0:
                    self.logger.critical(
                        "Recursive error handler's base case reached. Abandoning API calls."
                    )
                    self.logger.critical(api_call_method)
                    self.logger.critical(args[0])
                    sys.exit(1)

                self.logger.debug(
                    "Base case just before recursive call: {}".format(
                        base_case))
                if api_call_method == requests.get:
                    req = self.error_handling(api_call_method, base_case,
                                              args[0], args[1], args[2],
                                              args[3])
                elif api_call_method == requests.post or api_call_method == requests.put:
                    req = self.error_handling(api_call_method, base_case,
                                              args[0], args[1], args[2],
                                              args[3], args[4])
                else:
                    self.logger.critical("Recursive API call failed")
                    sys.exit(1)
            elif req.status_code == 401:
                self.logger.critical("Bad authentication. Check credentials.")
                self.logger.critical(error)
                sys.exit(1)
            else:
                self.logger.critical("API call failed.")
                self.logger.critical(error)
                sys.exit(1)
        except requests.exceptions.RequestException as e:
            # catch-all failure, exit program
            self.logger.critical(e)
            sys.exit(1)
        finally:
            return req

    # --- print API calls, mainly for testing

    def test(self):
        url = "https://{}/webacs/api/v3/data/AccessPoints.json?name=\"ESQ_4-430_5e3c\""
        result = self.error_handling(requests.get, 5, url, False,
                                     self.username, self.password)
        print(json.dumps(result.json(), indent=4))
Esempio n. 18
0
def main(args):

    parser = argparse.ArgumentParser()
    parser.add_argument("-a",
                        nargs=1,
                        action="store",
                        dest="json_file_name",
                        default=["empty"])
    parser.add_argument("-o",
                        nargs=1,
                        action="store",
                        dest="image_name",
                        default=["empty"])
    parser.add_argument("--output",
                        nargs=1,
                        action="store",
                        dest="image_name",
                        default=["empty"])
    parsed_args = parser.parse_args(args)

    image_name = parsed_args.image_name[0]
    json_file_name = parsed_args.json_file_name[0]

    if json_file_name == "empty":
        print(
            "\n Welcome to my shape drawer :)\n"
            " you can use following options:\n\n"
            "  -a <json-file-name> : path to json file describing the shape or shapes\n"
            "  -o | --output <image-name.png> : allows to save your drawing\n\n"
            " working examples:\n\n"
            " python main.py -a data.json\n"
            " python main.py -a image.json -o star.png\n")
        exit(0)

    json_parser = JsonParser()
    screen, palette, figures = json_parser.parse_json(json_file_name)
    database = DataBase()

    for figure in figures:

        figure_name = figure.get('type')

        if figure_name in database.figures:

            if figure_name == 'circle':

                shape = CircleDrawer(screen, palette, figure, image_name)
                shape.draw_circle()

            elif figure_name in ['square', 'rectangle']:

                shape = RectangleDrawer(screen, palette, figure, image_name)
                shape.draw_rectangle()

            elif figure_name == 'point':

                shape = PointDrawer(screen, palette, figure, image_name)
                shape.draw_point()

            elif figure_name == 'polygon':

                shape = PolygonDrawer(screen, palette, figure, image_name)
                shape.draw_polygon()

        else:
            print("Unrecognized figure: ", figure_name)
"""
This code demonstrates simple learning and feedback process for wrong push-up posture.  
For the intermediate presentations use only. 
"""
from json_parser import JsonParser
from video_processor import VideoProcessor
from feedback import FeedbackSystem
from pathlib import Path
import subprocess
import os, re

openpose_demo_path = "D:\\OneDrive\\OneDrive - postech.ac.kr\\2019 Fall\\창의설계4\\openpose-1.5.1-binaries-win64-only_cpu-python-flir-3d\\openpose-1.5.1-binaries-win64-only_cpu-python-flir-3d\\openpose\\bin\\OpenPoseDemo.exe"
camera_offset = 1
video_name = "flare1"
json_dir = "D:\\OneDrive\\OneDrive - postech.ac.kr\\2019 Fall\\창의설계4\\code\\json\\" + video_name
model_dir = "D:\\OneDrive\\OneDrive - postech.ac.kr\\2019 Fall\\창의설계4\\openpose-1.5.1-binaries-win64-only_cpu-python-flir-3d\\openpose-1.5.1-binaries-win64-only_cpu-python-flir-3d\\openpose\\models"

tys = ["elbow", "arm", "shoulder"]
for ty in tys:
    fds = FeedbackSystem()
    fds.load("demo_front_" + ty + "_model", "front")

    # 2. Run Openpose Webcam Mode

    # 3. Give feedback
    j = JsonParser()
    count = len(os.listdir(json_dir))
    video = j.parse(video_name, count, json_dir, "front", None)
    result = fds.feedback_kmeans(video, ty)
    print(result)

def setup_tables(connection):
    SQL_CREATE_USER_USAGE_TABLE = """CREATE TABLE IF NOT EXISTS user_usage (
                                        id integer PRIMARY KEY,
                                        predicted_usage text NOT NULL,
                                        actual_usage text NOT NULL,
                                        salesforce_id integer NOT NULL,
                                        FOREIGN KEY (salesforce_id) REFERENCES users (id)
                                    ); """

    SQL_CREATE_USERS_TABLE = """CREATE TABLE IF NOT EXISTS users (
                                        id integer PRIMARY KEY, 
                                        country text NOT NULL,
                                        name text NOT NULL,
                                        owner text NOT NULL,
                                        manager text NOT NULL
                                );"""
    if connection is not None:
        create_table(connection, SQL_CREATE_USER_USAGE_TABLE)
        create_table(connection, SQL_CREATE_USERS_TABLE)


if __name__ == "__main__":
    connection = DBProvider().connection

    with connection:
        setup_tables(connection)
        insert_users(connection, JsonParser().domainY())
        insert_users_usage(connection, JsonParser().user_usage())
Esempio n. 21
0
    def __init__(self):

        self.parse_json = JsonParser()
Esempio n. 22
0
from json_parser import JsonParser
import sys

if __name__ == '__main__':
    j = JsonParser(sys.argv[1])
    j.read_all()

    print('Total records number {}'.format( j.records_counter))
    print('Actions counter :')
    for k,v in j.actions.items():
        print('\t {} : {}'.format(k,v))
    print('Success percent {:.2f}%'.format(float(j.success_counter)/j.logs_counter*100))
json_dir = "../json/output"
model_dir = "models"

for f in os.listdir(json_dir):
    os.remove(os.path.join(json_dir, f))

# 2. Run Openpose Webcam Mode
handler = subprocess.Popen([
    openpose_demo_path, "--disable_blending=false", "--camera=" +
    str(camera_offset), "--net_resolution=128x128", "--write_json=" + json_dir,
    "--model_folder=" + model_dir, "--number_people_max=1"
],
                           shell=False)

print("Start 3 push-up")
tys = ["elbow", "arm", "shoulder"]
for ty in tys:
    fds = FeedbackSystem()
    fds.load("demo_front_" + ty + "_model", "front")

    # 3. Give feedback
    #try:
    j = JsonParser()
    video = j.parse(None, 60, json_dir, "front", None)
    result = fds.feedback_kmeans(video, ty)
    print(result)
    handler.terminate()
    #except:
    #    print("Exception Occured")
    #    handler.terminate()
"""
This code demonstrates simple learning and feedback process for wrong push-up posture.  
For the intermediate presentations use only. 
"""
from json_parser import JsonParser
from feedback import FeedbackSystem
from pathlib import Path
import os

# 1. learning FeedbackSystem with pre-labelled push-up data
fds = FeedbackSystem()
j = JsonParser()

#label format [partial range or not, elbow flare or not, wide or not]
videos_with_label = [("r0e0ns1", [0, 0, 0]), ("r0e0ns2", [0, 0, 0]),
                     ("r0e0ns3", [0, 0, 0]), ("r0e0ws1", [0, 0, 1]),
                     ("r0e0ws2", [0, 0, 1]), ("r0e0ws3", [0, 0, 1]),
                     ("r0e1ns1", [0, 1, 0]), ("r0e1ns2", [0, 1, 0]),
                     ("r0e1ws1", [0, 1, 1]), ("r0e1ws2", [0, 1, 1]),
                     ("r0e1ws3", [0, 1, 1]), ("r1e0ns1", [1, 0, 0]),
                     ("r1e0ns2", [1, 0, 0]), ("r1e0ns3", [1, 0, 0]),
                     ("r1e0ns4", [1, 0, 0]), ("r1e0ws1", [1, 0, 1]),
                     ("r1e0ws2", [1, 0, 1]), ("r1e0ws3", [1, 0, 1]),
                     ("r1e1ns1", [1, 1, 0]), ("r1e1ns2", [1, 1, 0]),
                     ("r1e1ns3", [1, 1, 0]), ("r1e1ws1", [1, 1, 1]),
                     ("r1e1ws2", [1, 1, 1]), ("r1e1ws3", [1, 1, 1])]

tys = ["elbow", "shoulder", "arm"]
for ty in tys:
    for video_with_label in videos_with_label:
        path = Path("../json/" + video_with_label[0])
Esempio n. 25
0
class SaveActivations:
    def __init__(self):
        self._opt = BaseOption().parse()
        # check device
        self._device = torch.device(
            "cpu" if torch.cuda.is_available() else "cpu")  # device setup
        print("device: ", self._device)

    def apply_opt(self):
        # dataset
        if self._opt.dataset == "MNIST":
            train_data, test_data = utils.get_mnist()
            self._train_set = torch.utils.data.DataLoader(
                train_data,
                batch_size=self._opt.batch_size,
                shuffle=True,
                num_workers=self._opt.num_workers)
            self._test_set = torch.utils.data.DataLoader(
                test_data,
                batch_size=self._opt.batch_size,
                shuffle=True,
                num_workers=self._opt.num_workers)
            self._initialize_model(dims=self._opt.layer_dims)
            print("MNIST experiment")

        elif self._opt.dataset == "IBNet":
            train_data = utils.CustomDataset('2017_12_21_16_51_3_275766',
                                             train=True)
            test_data = utils.CustomDataset('2017_12_21_16_51_3_275766',
                                            train=False)
            self._train_set = torch.utils.data.DataLoader(
                train_data,
                batch_size=self._opt.batch_size,
                shuffle=True,
                num_workers=self._opt.num_workers)
            self._test_set = torch.utils.data.DataLoader(
                test_data,
                batch_size=self._opt.batch_size,
                shuffle=True,
                num_workers=self._opt.num_workers)
            self._initialize_model(dims=self._opt.layer_dims)
            print("IBnet experiment")
        else:
            raise RuntimeError(
                'Do not have {name} dataset, Please be sure to use the existing dataset'
                .format(name=self._opt.dataset))

        # construct saving directory
        save_root_dir = self._opt.save_root_dir
        dataset = self._opt.dataset
        time = datetime.datetime.today().strftime('%m_%d_%H_%M')
        model = ''.join(
            list(map(lambda x: str(x) + '_', self._model.layer_dims)))
        folder_name = dataset + '_' + self._opt.experiment_name + '_Time_' + time + '_Model_' + model
        self._path_to_dir = save_root_dir + '/' + folder_name + '/'
        print(self._path_to_dir)
        if not os.path.exists(self._path_to_dir):
            os.makedirs(self._path_to_dir)

        self._logger = Logger(opt=self._opt, plot_name=folder_name)
        self._json = JsonParser()

    def _update_opt(self, other):
        for key, val in other.items():
            setattr(self._opt, key, val)

    def _initialize_model(self, dims):
        # weight initialization
        def weights_init(m):
            if isinstance(m, nn.Linear):
                nn.init.xavier_normal_(m.weight.data)
                nn.init.constant_(m.bias.data, 0)

        # model construction
        self._model = Model(activation=self._opt.activation,
                            dims=dims,
                            train=True)
        self._model.apply(weights_init)
        # optimizer
        # self._optimizer = optim.Adam(self._model.parameters(), lr=self._opt.lr)
        self._optimizer = optim.SGD(self._model.parameters(),
                                    lr=self._opt.lr,
                                    momentum=self._opt.momentum)
        # loss
        self._criterion = nn.CrossEntropyLoss()  # loss

    def training_model(self):

        self.apply_opt()
        self._json.dump_json(opt=self._opt, path=self._path_to_dir)
        print('Begin training...')
        self._model = self._model.to(self._device)

        save_step = 100

        eta = 1.
        running_loss = 0.0
        running_acc = 0.0
        t_begin = time.time()

        # main loop for training
        for i_epoch in range(self._opt.max_epoch):
            # set to train
            self._model.train()

            # train batch
            if ((i_epoch + 1) % save_step == 0) or (i_epoch == 0):
                print('\n{}'.format(11 * '------'))

            running_acc = 0
            running_acc = 0
            for i_batch, (inputs, labels) in enumerate(self._train_set):
                inputs = inputs.to(self._device)
                labels = labels.to(self._device)
                bsize = inputs.shape[0]
                # set to learnable
                with torch.set_grad_enabled(True):
                    #forward
                    self._optimizer.zero_grad()
                    outputs = self._model(inputs)
                    loss = self._criterion(outputs, labels)
                    _, preds = torch.max(outputs, 1)
                    corrects = torch.sum(preds == labels.data).double()

                    # backprop
                    loss.backward()
                    self._optimizer.step()

                self._logger.log(
                    self._model)  # each time update weights LOG IT!

                # monitor the running loss & running accuracy
                # eta = eta / (1. + bsize*eta)
                # running_loss = (1. - bsize*eta)*running_loss + eta*loss.detach()
                # running_acc = (1. - bsize*eta)*running_acc + eta*corrects.detach()
                running_acc = float(corrects.detach() / bsize)
                running_loss = float(loss.detach())
                if ((i_epoch + 1) % save_step == 0) or (i_epoch == 0):
                    output_format = "\repoch:{epoch} batch:{batch:2d} " +\
                                    "Loss:{loss:.5e} Acc:{acc:.5f}% " +\
                                    "numacc:{num:.0f}/{tnum:.0f}"
                    print(
                        output_format.format(batch=i_batch + 1,
                                             epoch=i_epoch + 1,
                                             loss=running_loss,
                                             acc=running_acc * 100.,
                                             num=corrects,
                                             tnum=bsize))

            self._model.eval()
            for i_batch, (inputs, labels) in enumerate(self._test_set):
                inputs = inputs.to(self._device)
                labels = labels.to(self._device)
                bsize = inputs.shape[0]
                #forward
                outputs = self._model(inputs)
                _, preds = torch.max(outputs, 1)
                corrects = torch.sum(preds == labels.data).double()

                # monitor the running loss & running accuracy
                # eta = eta / (1. + bsize*eta)
                # running_loss = (1. - bsize*eta)*running_loss + eta*loss.detach()
                # running_acc = (1. - bsize*eta)*running_acc + eta*corrects.detach()
                running_acc = float(corrects.detach() / bsize)
                if ((i_epoch + 1) % save_step == 0) or (i_epoch == 0):
                    output_format = "\repoch:{epoch} batch:{batch:2d} " +\
                                    "Acc:{acc:.5f}% " +\
                                    "numacc:{num:.0f}/{tnum:.0f}"
                    print(
                        output_format.format(batch=i_batch + 1,
                                             epoch=i_epoch + 1,
                                             acc=running_acc * 100.,
                                             num=corrects,
                                             tnum=bsize))

            self._logger.update(i_epoch)  # to calculate std and mean

            if ((i_epoch + 1) % save_step == 0) or (i_epoch == 0):
                print('{}'.format(11 * '------'))
                t_end = time.time()
                print('time cost for this output period: {:.3f}(s)'.format(
                    t_end - t_begin))
                t_begin = time.time()

            # saving model for each epoch
            self.save_model(i_epoch)

        self._logger.plot_figures()
        print(
            '-------------------------training end--------------------------')

    def save_model(self, epoch):
        save_full_path = self.generate_save_fullpath(epoch + 1)
        torch.save(
            {
                'epoch': epoch,
                'model_state_dict': self._model.state_dict(),
                'optimizer_state_dict': self._optimizer.state_dict(),
            }, save_full_path)

    def generate_save_fullpath(self, epoch):
        suffix = '.pth'
        fullpath = self._path_to_dir + 'model_epoch_' + str(epoch) + suffix
        return fullpath
Esempio n. 26
0
class TaskManager():
    """
        Static class used to call functions
    """
    p = JsonParser()
    """
        gets all user ids from the data supplied
    """
    @staticmethod
    def get_all_users(data):
        users = []
        for i in data:
            if i.get("visitor_uuid") not in users:
                users.append(i.get("visitor_uuid"))
        return users

    """
        gets all document ids from the data supplied
    """

    @staticmethod
    def get_all_documents(data):
        documents = []
        for i in data:
            if i.get("subject_doc_id") not in documents:
                documents.append(i.get("subject_doc_id"))
        return documents

    """
        gets all the document ids from the data supplied which has been visited by a certain user
    """

    @staticmethod
    def get_all_documents_by_user(user_id, data):
        data = TaskManager.filter_data(data, "visitor_uuid", user_id)
        return TaskManager.get_all_documents(data)

    """
        gets all the user ids from the data supplied who have visited a certain document
    """

    @staticmethod
    def get_all_users_by_doc(doc_id, data):
        data = TaskManager.filter_data(data, "subject_doc_id", doc_id)
        return TaskManager.get_all_users(data)

    """
        loads, reads and parses in a file, using the supplied filename
    """

    @staticmethod
    def load_file(file):
        if file is not None:
            r = Reader(file)
        else:
            r = Reader("sample_100k_lines.json")
        while True:
            try:
                TaskManager.p.add(r.read_line())
            except JSONDecodeError:
                print("Completed Parsing File")
                break
        return TaskManager.p.get_all()

    """
        handles all the tasks by calling the corresponding functions which fulfill the tasks objectives
    """

    @staticmethod
    def task_handler(doc_id, user_id, task_id, data, g, cmd):
        if g is not None:
            if g.canvas is not None:
                g.canvas.get_tk_widget().destroy()
            if g.toolbar is not None:
                g.toolbar.destroy()
                g.toolbar = None
            if g.listbox is not None:
                g.listbox.destroy()
        if task_id == "2a":
            if cmd and doc_id not in TaskManager.get_all_documents(
                    data) or doc_id is None:
                print("Please Provide a Valid Document ID")
            else:
                histogram = Histograms(
                    TaskManager.get_countries(
                        doc_id,
                        TaskManager.filter_data(data, "subject_doc_id",
                                                doc_id)), "Task 2A", cmd)
                if not cmd:
                    TaskManager.plot_figure_gui(g, histogram)
        elif task_id == "2b":
            if cmd and doc_id not in TaskManager.get_all_documents(
                    data) or doc_id is None:
                print("Please Provide a Valid Document ID")
            else:
                histogram = Histograms(
                    TaskManager.get_continents(
                        doc_id,
                        TaskManager.filter_data(data, "subject_doc_id",
                                                doc_id)), "Task 2B", cmd)
                if not cmd:
                    TaskManager.plot_figure_gui(g, histogram)
        elif task_id == "3a":
            histogram = Histograms(TaskManager.simple_get_all_browser(data),
                                   "Task 3A", cmd)
            if not cmd:
                TaskManager.plot_figure_gui(g, histogram)
        elif task_id == "3b":
            histogram = Histograms(TaskManager.get_all_browser(data),
                                   "Task 3B", cmd)
            if not cmd:
                TaskManager.plot_figure_gui(g, histogram)
        elif task_id == "4":
            top10 = TaskManager.get_top_10(data)
            if cmd:
                print(top10)
            else:
                TaskManager.load_list(g, top10)
        elif task_id == "5a":
            users = TaskManager.get_all_users_by_doc(doc_id, data)
            if cmd:
                print(users)
            else:
                TaskManager.load_list(g, users)
        elif task_id == "5b":
            docs = TaskManager.get_all_documents_by_user(user_id, data)
            if cmd:
                print(docs)
            else:
                TaskManager.load_list(g, docs)
        elif task_id == "5c":
            also_likes = TaskManager.task5(data, doc_id, user_id, None)
            if cmd:
                print(also_likes)
            else:
                TaskManager.load_list(g, also_likes)
        elif task_id == "5d":
            also_likes = TaskManager.task5(data, doc_id, user_id,
                                           TaskManager.sort_by_readership)
            if cmd:
                print(also_likes)
            else:
                TaskManager.load_list(g, also_likes)
        elif task_id == "5e":
            also_likes = TaskManager.task5(data, doc_id, user_id,
                                           TaskManager.sort_by_number)
            if cmd:
                print(also_likes)
            else:
                TaskManager.load_list(g, also_likes)
        else:
            if cmd:
                print("Invalid Task")

    """
        filters data based on a specific key satisfying a specific value and return the filter list
    """

    @staticmethod
    def filter_data(data, filter_key, value):
        results = []
        for i in data:
            if i.get(filter_key) == value:
                results.append(i)
        return results

    """
        filters data based on a specific key not satisfying a specific value and return the filter list
    """

    @staticmethod
    def inverse_filter_data(data, filter_key, value):
        results = []
        for i in data:
            if not i.get(filter_key) == value:
                results.append(i)
        return results

    """
        gets the top 10 users who spend the most time reading in a descending order
    """

    @staticmethod
    def get_top_10(data):
        count = dict()
        users = TaskManager.get_all_users(data)
        for i in users:
            count.update({i: 0})
        for j in data:
            if not j.get("event_readtime") is None:
                count[j["visitor_uuid"]] += j.get("event_readtime")
        results = sorted(count, key=count.get, reverse=True)
        results = results[:10]
        return results

    """
        gets how frequently each browser has been used to visit the application, this does distinguish versions
        of browsers
    """

    @staticmethod
    def simple_get_all_browser(data):
        browsers = {}
        for i in data:
            b = httpagentparser.simple_detect(i["visitor_useragent"])[1]
            if b not in browsers:
                browsers.update({b: 1})
            else:
                browsers[b] += 1
        return browsers

    """
        gets how frequently each browser has been used to visit the application, this does not distinguish versions
        of browsers
    """

    @staticmethod
    def get_all_browser(data):
        results = {}
        browsers = TaskManager.simple_get_all_browser(data)
        for i in browsers.keys():
            r = re.findall('.+ [0-9]', i)
            for j in r:
                if j[:-2] not in results:
                    results.update({j[:-2]: browsers[i]})
                else:
                    results[j[:-2]] += browsers[i]
        return results

    """
        gets how frequently users have visited a specific document by their country
    """

    @staticmethod
    def get_countries(doc_id, data):
        countries = dict()
        for k in data:
            if k.get("subject_doc_id") == doc_id:
                if k.get("visitor_country") in countries.keys():
                    countries[k["visitor_country"]] += 1
                else:
                    countries.update({k.get("visitor_country"): 1})
        return countries

    """
        gets how frequently users have visited a specific document by their continents
    """

    @staticmethod
    def get_continents(doc_id, data):
        continents = {"AF": 0, "EU": 0, "OC": 0, "NA": 0, "SA": 0, "AS": 0}
        data = TaskManager.get_countries(doc_id, data)
        if data is None:
            return
        for i in data.keys():
            if TaskManager.cntry_to_cont[i] == "AF":
                continents["AF"] += data[i]
            elif TaskManager.cntry_to_cont[i] == "EU":
                continents["EU"] += data[i]
            elif TaskManager.cntry_to_cont[i] == "OC":
                continents["OC"] += data[i]
            elif TaskManager.cntry_to_cont[i] == "NA":
                continents["NA"] += data[i]
            elif TaskManager.cntry_to_cont[i] == "SA":
                continents["SA"] += data[i]
            elif TaskManager.cntry_to_cont[i] == "AS":
                continents["AS"] += data[i]
        return continents

    """
        gets all the documents other users have read based on a document the supplied user has read
    """

    @staticmethod
    def task5(data, doc_id, user, sorting):
        users_read = []
        if doc_id is not None:
            d = TaskManager.filter_data(data, "subject_doc_id", doc_id)
            u = TaskManager.get_all_users(d)
            if user in u:
                for i in u:
                    if i is not user:
                        u2 = TaskManager.filter_data(data, "visitor_uuid", i)
                        users_read.append(
                            TaskManager.get_all_documents(
                                TaskManager.filter_data(
                                    TaskManager.inverse_filter_data(
                                        u2, "subject_doc_id", doc_id),
                                    "event_type", "read")))
                docs = dict()
                for i in users_read:
                    for j in i:
                        if j is not None:
                            if j not in docs.keys():
                                docs.update({j: 1})
                            else:
                                docs[j] += 1
                if sorting is not None:
                    result = sorting(docs)
                else:
                    result = docs
            else:
                result = []
                print("Please Enter a Valid User ID")
        else:
            result = []
            print("Please Enter a Valid Document ID")
        return result

    """
        sorts the documents by the time spent reading them and returns them in a descending order
    """

    @staticmethod
    def sort_by_readership(data):
        result = dict()
        for i in data.keys():
            temp = TaskManager.filter_data(TaskManager.p.get_all(),
                                           "subject_doc_id", i)
            for j in temp:
                if j.get("event_readtime") is not None:
                    if i not in result:
                        result.update({i: j.get("event_readtime")})
                    else:
                        result[i] += j.get("event_readtime")
        print(result)
        return TaskManager.sort_by_number(result)

    """
        sorts the documents by the number of users who read them and returns them in a descending order
    """

    @staticmethod
    def sort_by_number(data):
        if len(data) < 11:
            return sorted(data.keys(), reverse=True, key=data.__getitem__)
        else:
            return sorted(data.keys(), reverse=True, key=data.__getitem__)[:10]

    """
        embeds a chart to the GUI
    """

    @staticmethod
    def plot_figure_gui(g, histogram):
        g.canvas = FigureCanvasTkAgg(histogram.figure, g.main)
        g.canvas.show()
        g.canvas.get_tk_widget().pack(expand=1, side=RIGHT)
        g.toolbar = NavigationToolbar2TkAgg(g.canvas, g.main)
        g.toolbar.update()
        g.main.mainloop()

    """
        embeds a listbox with the supplied data items to the GUI
    """

    @staticmethod
    def load_list(g, data):
        g.listbox = Listbox(width=60)
        g.listbox.pack(expand=True, side=BOTTOM)
        for i in data:
            g.listbox.insert(END, i)
        g.main.mainloop()

    # dictionary used to assign countries to continents
    cntry_to_cont = {
        'AP': 'AS',
        'AF': 'AS',
        'AX': 'EU',
        'AL': 'EU',
        'DZ': 'AF',
        'AS': 'OC',
        'AD': 'EU',
        'AO': 'AF',
        'AI': 'NA',
        'AQ': 'AN',
        'AG': 'NA',
        'AR': 'SA',
        'AM': 'AS',
        'AW': 'NA',
        'AU': 'OC',
        'AT': 'EU',
        'AZ': 'AS',
        'BS': 'NA',
        'BH': 'AS',
        'BD': 'AS',
        'BB': 'NA',
        'BY': 'EU',
        'BE': 'EU',
        'BZ': 'NA',
        'BJ': 'AF',
        'BM': 'NA',
        'BT': 'AS',
        'BO': 'SA',
        'BQ': 'NA',
        'BA': 'EU',
        'BW': 'AF',
        'BV': 'AN',
        'BR': 'SA',
        'IO': 'AS',
        'VG': 'NA',
        'BN': 'AS',
        'BG': 'EU',
        'BF': 'AF',
        'BI': 'AF',
        'KH': 'AS',
        'CM': 'AF',
        'CA': 'NA',
        'CV': 'AF',
        'KY': 'NA',
        'CF': 'AF',
        'TD': 'AF',
        'CL': 'SA',
        'CN': 'AS',
        'CX': 'AS',
        'CC': 'AS',
        'CO': 'SA',
        'KM': 'AF',
        'CD': 'AF',
        'CG': 'AF',
        'CK': 'OC',
        'CR': 'NA',
        'CI': 'AF',
        'HR': 'EU',
        'CU': 'NA',
        'CW': 'NA',
        'CY': 'AS',
        'CZ': 'EU',
        'DK': 'EU',
        'DJ': 'AF',
        'DM': 'NA',
        'DO': 'NA',
        'EC': 'SA',
        'EG': 'AF',
        'SV': 'NA',
        'GQ': 'AF',
        'ER': 'AF',
        'EE': 'EU',
        'ET': 'AF',
        'FO': 'EU',
        'FK': 'SA',
        'FJ': 'OC',
        'FI': 'EU',
        'FR': 'EU',
        'GF': 'SA',
        'PF': 'OC',
        'TF': 'AN',
        'GA': 'AF',
        'GM': 'AF',
        'GE': 'AS',
        'DE': 'EU',
        'GH': 'AF',
        'GI': 'EU',
        'GR': 'EU',
        'GL': 'NA',
        'GD': 'NA',
        'GP': 'NA',
        'GU': 'OC',
        'GT': 'NA',
        'GG': 'EU',
        'GN': 'AF',
        'GW': 'AF',
        'GY': 'SA',
        'HT': 'NA',
        'HM': 'AN',
        'VA': 'EU',
        'HN': 'NA',
        'HK': 'AS',
        'HU': 'EU',
        'IS': 'EU',
        'IN': 'AS',
        'ID': 'AS',
        'IR': 'AS',
        'IQ': 'AS',
        'IE': 'EU',
        'IM': 'EU',
        'IL': 'AS',
        'IT': 'EU',
        'JM': 'NA',
        'JP': 'AS',
        'JE': 'EU',
        'JO': 'AS',
        'KZ': 'AS',
        'KE': 'AF',
        'KI': 'OC',
        'KP': 'AS',
        'KR': 'AS',
        'KW': 'AS',
        'KG': 'AS',
        'LA': 'AS',
        'LV': 'EU',
        'LB': 'AS',
        'LS': 'AF',
        'LR': 'AF',
        'LY': 'AF',
        'LI': 'EU',
        'LT': 'EU',
        'LU': 'EU',
        'MO': 'AS',
        'MK': 'EU',
        'MG': 'AF',
        'MW': 'AF',
        'MY': 'AS',
        'MV': 'AS',
        'ML': 'AF',
        'MT': 'EU',
        'MH': 'OC',
        'MQ': 'NA',
        'MR': 'AF',
        'MU': 'AF',
        'YT': 'AF',
        'MX': 'NA',
        'FM': 'OC',
        'MD': 'EU',
        'MC': 'EU',
        'MN': 'AS',
        'ME': 'EU',
        'MS': 'NA',
        'MA': 'AF',
        'MZ': 'AF',
        'MM': 'AS',
        'NA': 'AF',
        'NR': 'OC',
        'NP': 'AS',
        'NL': 'EU',
        'NC': 'OC',
        'NZ': 'OC',
        'NI': 'NA',
        'NE': 'AF',
        'NG': 'AF',
        'NU': 'OC',
        'NF': 'OC',
        'MP': 'OC',
        'NO': 'EU',
        'OM': 'AS',
        'PK': 'AS',
        'PW': 'OC',
        'PS': 'AS',
        'PA': 'NA',
        'PG': 'OC',
        'PY': 'SA',
        'PE': 'SA',
        'PH': 'AS',
        'PN': 'OC',
        'PL': 'EU',
        'PT': 'EU',
        'PR': 'NA',
        'QA': 'AS',
        'RE': 'AF',
        'RO': 'EU',
        'RU': 'EU',
        'RW': 'AF',
        'BL': 'NA',
        'SH': 'AF',
        'KN': 'NA',
        'LC': 'NA',
        'MF': 'NA',
        'PM': 'NA',
        'VC': 'NA',
        'WS': 'OC',
        'SM': 'EU',
        'ST': 'AF',
        'SA': 'AS',
        'SN': 'AF',
        'RS': 'EU',
        'SC': 'AF',
        'SL': 'AF',
        'SG': 'AS',
        'SX': 'NA',
        'SK': 'EU',
        'SI': 'EU',
        'SB': 'OC',
        'SO': 'AF',
        'ZA': 'AF',
        'GS': 'AN',
        'SS': 'AF',
        'ES': 'EU',
        'LK': 'AS',
        'SD': 'AF',
        'SR': 'SA',
        'SJ': 'EU',
        'SZ': 'AF',
        'SE': 'EU',
        'CH': 'EU',
        'SY': 'AS',
        'TW': 'AS',
        'TJ': 'AS',
        'TZ': 'AF',
        'TH': 'AS',
        'TL': 'AS',
        'TG': 'AF',
        'TK': 'OC',
        'TO': 'OC',
        'TT': 'NA',
        'TN': 'AF',
        'TR': 'AS',
        'TM': 'AS',
        'TC': 'NA',
        'TV': 'OC',
        'UG': 'AF',
        'UA': 'EU',
        'AE': 'AS',
        'GB': 'EU',
        'US': 'NA',
        'UM': 'OC',
        'VI': 'NA',
        'UY': 'SA',
        'UZ': 'AS',
        'VU': 'OC',
        'VE': 'SA',
        'VN': 'AS',
        'WF': 'OC',
        'EH': 'AF',
        'YE': 'AS',
        'ZM': 'AF',
        'ZW': 'AF',
        'ZZ': 'Unknown',
        'EU': 'Unknown'
    }
Esempio n. 27
0
class Find:
    def __init__(self):

        self.parse_json = JsonParser()

    def ip_client(self, values_dict, cpi_username, cpi_password,
                  cpi_ipv4_address, logger):

        api_call = Client(cpi_username, cpi_password, cpi_ipv4_address, logger)
        dev_id = api_call.id_by_ip(values_dict['address'])
        result = api_call.json_detailed(dev_id)

        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'deviceName'
        ]
        neigh_name = self.parse_json.value(result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO',
            'deviceIpAddress', 'address'
        ]
        tmp = self.parse_json.value(result, key_list, logger)
        neigh_ip = socket.gethostbyname(
            tmp)  # resolve fqdn to IP. Prime resolves IP if possible
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'clientInterface'
        ]
        interface = self.parse_json.value(result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'ifDescr'
        ]
        description = self.parse_json.value(result, key_list, logger)
        key_list = ['queryResponse', 'entity', 0, 'clientDetailsDTO', 'vlan']
        vlan = self.parse_json.value(result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'vlanName'
        ]
        vlan_name = self.parse_json.value(result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'macAddress'
        ]
        mac_addr = self.parse_json.value(result, key_list, logger)

        logger.info("switch name :{}".format(neigh_name))
        logger.info("switch ip   :{}".format(neigh_ip))
        logger.info("interface   :{}".format(interface))
        logger.info("description :{}".format(description))
        logger.info("vlan        :{};{}".format(vlan, vlan_name))
        logger.info("mac addr    :{}".format(mac_addr))
        return neigh_name, neigh_ip, interface, description, vlan, vlan_name, mac_addr

    def ip_ap(self, values_dict, cpi_username, cpi_password, cpi_ipv4_address,
              logger):

        ap_api_call = AccessPoint(cpi_username, cpi_password, cpi_ipv4_address,
                                  logger)
        client_api_call = Client(cpi_username, cpi_password, cpi_ipv4_address,
                                 logger)
        client_id = client_api_call.id_by_ip(values_dict['address'])
        ap_id = ap_api_call.id_by_ip(values_dict['address'])
        ap_result = ap_api_call.json_detailed(ap_id)
        client_result = client_api_call.json_detailed(client_id)

        key_list = [
            'queryResponse', 'entity', 0, 'accessPointDetailsDTO',
            'cdpNeighbors', 'cdpNeighbor', 0, 'neighborName'
        ]
        neigh_name = self.parse_json.value(ap_result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'accessPointDetailsDTO',
            'cdpNeighbors', 'cdpNeighbor', 0, 'neighborIpAddress'
        ]
        tmp = self.parse_json.value(ap_result, key_list, logger)
        neigh_ip = socket.gethostbyname(
            tmp)  # resolve fqdn to IP. Prime resolves IP if possible
        key_list = [
            'queryResponse', 'entity', 0, 'accessPointDetailsDTO',
            'cdpNeighbors', 'cdpNeighbor', 0, 'neighborPort'
        ]
        interface = self.parse_json.value(ap_result, key_list, logger)
        key_list = ['queryResponse', 'entity', 0, 'clientDetailsDTO', 'vlan']
        vlan = self.parse_json.value(client_result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'vlanName'
        ]
        vlan_name = self.parse_json.value(client_result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'accessPointDetailsDTO',
            'ethernetMac'
        ]
        mac_addr = self.parse_json.value(ap_result, key_list, logger)

        logger.info("switch name :{}".format(neigh_name))
        logger.info("switch ip   :{}".format(neigh_ip))
        logger.info("interface   :{}".format(interface))
        logger.info("vlan        :{};{}".format(vlan, vlan_name))
        logger.info("ap mac addr :{}".format(mac_addr))
        return neigh_name, neigh_ip, interface, vlan, vlan_name, mac_addr

    def ip_phone(self, values_dict, cpi_username, cpi_password,
                 cpi_ipv4_address, logger):

        api_call = Client(cpi_username, cpi_password, cpi_ipv4_address, logger)
        dev_id = api_call.id_by_ip(values_dict['address'])
        result = api_call.json_detailed(dev_id)

        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'deviceName'
        ]
        neigh_name = self.parse_json.value(result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO',
            'deviceIpAddress', 'address'
        ]
        tmp = self.parse_json.value(result, key_list, logger)
        neigh_ip = socket.gethostbyname(
            tmp)  # resolve fqdn to IP. Prime resolves IP if possible
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'clientInterface'
        ]
        interface = self.parse_json.value(result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'ifDescr'
        ]
        description = self.parse_json.value(result, key_list, logger)
        key_list = ['queryResponse', 'entity', 0, 'clientDetailsDTO', 'vlan']
        vlan = self.parse_json.value(result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'vlanName'
        ]
        vlan_name = self.parse_json.value(result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'macAddress'
        ]
        mac_addr = self.parse_json.value(result, key_list, logger)

        logger.info("switch name :{}".format(neigh_name))
        logger.info("switch ip   :{}".format(neigh_ip))
        logger.info("interface   :{}".format(interface))
        logger.info("description :{}".format(description))
        logger.info("vlan        :{};{}".format(vlan, vlan_name))
        logger.info("mac addr    :{}".format(mac_addr))
        return neigh_name, neigh_ip, interface, description, vlan, vlan_name, mac_addr

    def mac_client(self, values_dict, cpi_username, cpi_password,
                   cpi_ipv4_address, logger):

        api_call = Client(cpi_username, cpi_password, cpi_ipv4_address, logger)
        dev_id = api_call.id_by_mac(values_dict['address'])
        result = api_call.json_detailed(dev_id)

        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'deviceName'
        ]
        neigh_name = self.parse_json.value(result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO',
            'deviceIpAddress', 'address'
        ]
        tmp = self.parse_json.value(result, key_list, logger)
        neigh_ip = socket.gethostbyname(
            tmp)  # resolve fqdn to IP. Prime resolves IP if possible
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'clientInterface'
        ]
        interface = self.parse_json.value(result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'ifDescr'
        ]
        description = self.parse_json.value(result, key_list, logger)
        key_list = ['queryResponse', 'entity', 0, 'clientDetailsDTO', 'vlan']
        vlan = self.parse_json.value(result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'vlanName'
        ]
        vlan_name = self.parse_json.value(result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'ipAddress',
            'address'
        ]
        ip_addr = self.parse_json.value(result, key_list, logger)

        logger.info("switch name :{}".format(neigh_name))
        logger.info("switch ip   :{}".format(neigh_ip))
        logger.info("interface   :{}".format(interface))
        logger.info("description :{}".format(description))
        logger.info("vlan        :{};{}".format(vlan, vlan_name))
        logger.info("ip addr     :{}".format(ip_addr))
        return neigh_name, neigh_ip, interface, description, vlan, vlan_name, ip_addr

    def mac_ap(self, values_dict, cpi_username, cpi_password, cpi_ipv4_address,
               logger):

        ap_api_call = AccessPoint(cpi_username, cpi_password, cpi_ipv4_address,
                                  logger)
        client_api_call = Client(cpi_username, cpi_password, cpi_ipv4_address,
                                 logger)
        client_id = client_api_call.id_by_mac(values_dict['address'])
        ap_id = ap_api_call.id_by_mac(values_dict['address'])
        ap_result = ap_api_call.json_detailed(ap_id)
        client_result = client_api_call.json_detailed(client_id)

        key_list = [
            'queryResponse', 'entity', 0, 'accessPointDetailsDTO',
            'cdpNeighbors', 'cdpNeighbor', 0, 'neighborName'
        ]
        neigh_name = self.parse_json.value(ap_result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'accessPointDetailsDTO',
            'cdpNeighbors', 'cdpNeighbor', 0, 'neighborIpAddress'
        ]
        tmp = self.parse_json.value(ap_result, key_list, logger)
        neigh_ip = socket.gethostbyname(
            tmp)  # resolve fqdn to IP. Prime resolves IP if possible
        key_list = [
            'queryResponse', 'entity', 0, 'accessPointDetailsDTO',
            'cdpNeighbors', 'cdpNeighbor', 0, 'neighborPort'
        ]
        interface = self.parse_json.value(ap_result, key_list, logger)
        key_list = ['queryResponse', 'entity', 0, 'clientDetailsDTO', 'vlan']
        vlan = self.parse_json.value(client_result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'vlanName'
        ]
        vlan_name = self.parse_json.value(client_result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'accessPointDetailsDTO', 'ipAddress'
        ]
        ip_addr = self.parse_json.value(ap_result, key_list, logger)

        logger.info("switch name :{}".format(neigh_name))
        logger.info("switch ip   :{}".format(neigh_ip))
        logger.info("interface   :{}".format(interface))
        logger.info("vlan        :{};{}".format(vlan, vlan_name))
        logger.info("ap ip addr :{}".format(ip_addr))
        return neigh_name, neigh_ip, interface, vlan, vlan_name, ip_addr

    def mac_phone(self, values_dict, cpi_username, cpi_password,
                  cpi_ipv4_address, logger):

        api_call = Client(cpi_username, cpi_password, cpi_ipv4_address, logger)
        dev_id = api_call.id_by_mac(values_dict['address'])
        result = api_call.json_detailed(dev_id)

        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'deviceName'
        ]
        neigh_name = self.parse_json.value(result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO',
            'deviceIpAddress', 'address'
        ]
        tmp = self.parse_json.value(result, key_list, logger)
        neigh_ip = socket.gethostbyname(
            tmp)  # resolve fqdn to IP. Prime resolves IP if possible
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'clientInterface'
        ]
        interface = self.parse_json.value(result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'ifDescr'
        ]
        description = self.parse_json.value(result, key_list, logger)
        key_list = ['queryResponse', 'entity', 0, 'clientDetailsDTO', 'vlan']
        vlan = self.parse_json.value(result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'vlanName'
        ]
        vlan_name = self.parse_json.value(result, key_list, logger)
        key_list = [
            'queryResponse', 'entity', 0, 'clientDetailsDTO', 'ipAddress',
            'address'
        ]
        ip_addr = self.parse_json.value(result, key_list, logger)

        logger.info("switch name :{}".format(neigh_name))
        logger.info("switch ip   :{}".format(neigh_ip))
        logger.info("interface   :{}".format(interface))
        logger.info("description :{}".format(description))
        logger.info("vlan        :{};{}".format(vlan, vlan_name))
        logger.info("ip addr    :{}".format(ip_addr))
        return neigh_name, neigh_ip, interface, description, vlan, vlan_name, ip_addr
Esempio n. 28
0
"""
This code demonstrates simple learning and feedback process for wrong push-up posture.  
For the intermediate presentations use only. 
"""
from json_parser import JsonParser
from feedback import FeedbackSystem
from pathlib import Path

# 1. learning FeedbackSystem with pre-labelled push-up data
fds = FeedbackSystem()
j = JsonParser()
#front_videos_with_label = [("correct1", 1), ("correct2", 1), ("correct3", 0), ("flare1", 1), ("flare2", 0), ("flare3", 0)]
videos_with_label = [("incorrect_squat", 1), ("correct_squat", 0)]

for video_with_label in videos_with_label:
    path = Path("../json/" + video_with_label[0])
    print(str(path))
    video = j.parse(video_with_label[0], 200, path, "squat",
                    video_with_label[1])
    fds.learn(video, threshold=0.5)

fds.save("demo_squat_model", "squat")
Esempio n. 29
0
        description="Build your own neural network. Use JSON file.")
    parser.add_argument(
        "-f",
        "--file",
        help="submit JSON file; default file architecture.json",
        type=str)
    parser.add_argument("-w",
                        "--weights",
                        help="print weights during learning",
                        action="store_true")
    args = parser.parse_args()
    if args.weights:
        print_weights = True
    if args.file:
        json_file = args.file
    json_parser = JsonParser(json_file)
    json_parser.parse_json()
    train_df = pd.read_csv(json_parser.input_train_file_path)
    test_df = pd.read_csv(json_parser.input_test_file_path)

    type_of_assigment = json_parser.type
    p_train = None
    p_test = None
    output_layer = None
    if type_of_assigment == "regression":
        p_train = RegressionProvider(train_df,
                                     batch_size=json_parser.batch_size)
        p_test = RegressionProvider(test_df, batch_size=json_parser.batch_size)
        output_layer = "linear"
    elif type_of_assigment == "classification":
        p_train = ClassifierProvider(train_df,
Esempio n. 30
0
from json_parser import JsonParser

json_string = ''

with open("sample.json") as f:
    for line in f.readlines():
        json_string += line

print(JsonParser().parse(json_string)[0])