示例#1
0
文件: reports.py 项目: tmanfree/capt
    def __init__(self):

        self.parse_json = JsonParser()
        self.sw_list = []
        self.filename = str(sys.argv[2]) + str(
            datetime.now().strftime("%d-%m-%Y_%H%M%S"))
        self.cdp_neighbors = []
示例#2
0
def part_2():
    print("Verifying {} exists in {}".format(TEST_VALUE, TEST_FILE))
    with open("einat_world_bank.json", "rb") as json_file:
        for line in json_file.readlines():
            json = JsonParser(line)
            if json.has_value(TEST_VALUE):
                return
        raise Exception("Can't find '{}' in JSON".format(TEST_VALUE))
示例#3
0
def explain(filename=None, plan=""):
    jsonParser = JsonParser(filename)
    try:
        root = jsonParser.get_tree(plan)
    except Exception as err:
        print(err)
        return "The query plan you entered is not valid!"
    return get_explain_string(root)
示例#4
0
    def __init__(self, uname, passwd, cpi_ipv4_addr, log):

        self.username = uname
        self.password = passwd
        self.cpi_ipv4_address = cpi_ipv4_addr
        self.logger = log
        self.parse_json = JsonParser()

        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
示例#5
0
    def apply_opt(self):
        # dataset
        if self._opt.dataset == "MNIST":
            train_data, test_data = utils.get_mnist()
            self._train_set = torch.utils.data.DataLoader(
                train_data,
                batch_size=self._opt.batch_size,
                shuffle=True,
                num_workers=self._opt.num_workers)
            self._test_set = torch.utils.data.DataLoader(
                test_data,
                batch_size=self._opt.batch_size,
                shuffle=True,
                num_workers=self._opt.num_workers)
            self._initialize_model(dims=self._opt.layer_dims)
            print("MNIST experiment")

        elif self._opt.dataset == "IBNet":
            train_data = utils.CustomDataset('2017_12_21_16_51_3_275766',
                                             train=True)
            test_data = utils.CustomDataset('2017_12_21_16_51_3_275766',
                                            train=False)
            self._train_set = torch.utils.data.DataLoader(
                train_data,
                batch_size=self._opt.batch_size,
                shuffle=True,
                num_workers=self._opt.num_workers)
            self._test_set = torch.utils.data.DataLoader(
                test_data,
                batch_size=self._opt.batch_size,
                shuffle=True,
                num_workers=self._opt.num_workers)
            self._initialize_model(dims=self._opt.layer_dims)
            print("IBnet experiment")
        else:
            raise RuntimeError(
                'Do not have {name} dataset, Please be sure to use the existing dataset'
                .format(name=self._opt.dataset))

        # construct saving directory
        save_root_dir = self._opt.save_root_dir
        dataset = self._opt.dataset
        time = datetime.datetime.today().strftime('%m_%d_%H_%M')
        model = ''.join(
            list(map(lambda x: str(x) + '_', self._model.layer_dims)))
        folder_name = dataset + '_' + self._opt.experiment_name + '_Time_' + time + '_Model_' + model
        self._path_to_dir = save_root_dir + '/' + folder_name + '/'
        print(self._path_to_dir)
        if not os.path.exists(self._path_to_dir):
            os.makedirs(self._path_to_dir)

        self._logger = Logger(opt=self._opt, plot_name=folder_name)
        self._json = JsonParser()
示例#6
0
    def __init__(self, config, log):

        self.username = config.username
        self.password = config.password
        self.cpi_ipv4_address = config.cpi_ipv4_address
        # self.username = uname
        # self.password = passwd
        # self.cpi_ipv4_address = cpi_ipv4_addr
        self.logger = log
        self.parse_json = JsonParser()
        self.parse_var = VarParser()

        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
示例#7
0
    def post():
        content = request.get_json()
        with open('temp.json', 'w') as f:
            json.dump(content, f)
        json_obj = JsonParser("temp.json")

        json_obj.validate_json_data_type(content)
        json_obj.parse_json_data()

        ml_uuid = uuidutils.generate_uuid()
        url = content['DataURL']
        ml_lib = content['variables']['mlLib']
        is_form_cluster = content['variables']['isFormCluster']
        storage_name = content['variables']['storageAccountName']
        storage_type = content['variables']['storageAccountType']

        api.mldata_create(ml_uuid, url, ml_lib, is_form_cluster, storage_name,
                          storage_type)

        resources = content['resource']
        for res in resources:
            res_uuid = uuidutils.generate_uuid()
            resource_type = res.get('InfrastrctureType')
            provider = res.get('provider')
            if res.get('APIEndpoint'):
                endpoint = res.get('APIEndpoint')
            if res.get('PublicEndpoint'):
                endpoint = res.get('PublicEndpoint')
            username = res.get('username')
            password = res.get('password')
            token = res.get('token')
            availability_zone = res.get('availabilityZone')
            region = res.get('Region')

            api.resource_create(res_uuid, resource_type, provider, endpoint,
                                username, password, token, availability_zone,
                                region)

            if res['NodeList']:
                for node in res['NodeList']:
                    uuid = uuidutils.generate_uuid()
                    resource_id = res_uuid
                    node_ip = node.get('NodeIP')
                    username = node.get('Username')
                    password = node.get('Password')
                    api.node_create(uuid, resource_id, node_ip, username,
                                    password)

        return json.dumps(201, {'ContentType': 'application/json'})
示例#8
0
def postJsonHandler():
    db.create_all()
    content = request.get_json()
    with open('temp.json', 'w') as f:
        json.dump(content, f)
    json_obj = JsonParser("temp.json")
    username = content['username']
    email = content['email']
    
    new_user = User(username, email)

    db.session.add(new_user)
    db.session.commit()
    json_obj.validate_json_data_type(content)
    json_data = json_obj.parse_json_data()

    return json_data
示例#9
0
    def __init__(self):
        self.progress_bar = 0
        self._device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") # device setup
        load_config = JsonParser() # training args
        self.model_name = 'IBNet_test_save_Time_05_27_20_09_Model_12_12_10_7_5_4_3_2_2_'
        self.path =os.path.join('./results', self.model_name)# info plane dir
        self._opt = load_config.read_json_as_argparse(self.path) # load training args

        # force the batch size to 1 for calculation convinience
        self._opt.batch_size = 1
        # dataset
        if self._opt.dataset == "MNIST":
            train_data, test_data = utils.get_mnist()

            if not self._opt.full_mi:
                # self._train_set = torch.utils.data.DataLoader(train_data, batch_size=1, shuffle=False, num_workers=0)
                self._test_set = torch.utils.data.DataLoader(test_data, batch_size=1, shuffle=False, num_workers=0)
            else:
                dataset = torch.utils.data.ConcatDataset([train_data, test_data])
                self._test_set = torch.utils.data.DataLoader(dataset, batch_size=1, shuffle=False, num_workers=0)
            print("MNIST experiment")

        elif self._opt.dataset == "IBNet":
            train_data = utils.CustomDataset('2017_12_21_16_51_3_275766', train=True)
            test_data = utils.CustomDataset('2017_12_21_16_51_3_275766', train=False)
            # self._train_set = torch.utils.data.DataLoader(train_data, batch_size=1, shuffle=False, num_workers=0)
            if not self._opt.full_mi:
                self._test_set = torch.utils.data.DataLoader(test_data, batch_size=1, shuffle=False, num_workers=0)
            else:
                dataset = torch.utils.data.ConcatDataset([train_data, test_data])
                self._test_set = torch.utils.data.DataLoader(dataset, batch_size=1, shuffle=False, num_workers=0)
            print("IBnet experiment")
        else:
            raise RuntimeError('Do not have {name} dataset, Please be sure to use the existing dataset'.format(name = self._opt.dataset))

        # get model
        self._model = Model(activation = self._opt.activation ,dims = self._opt.layer_dims, train = False)
        
        # get measure
        # self.measure = measure.kde()
        self.measure = measure.EVKL() # our new measure
示例#10
0
        description="Build your own neural network. Use JSON file.")
    parser.add_argument(
        "-f",
        "--file",
        help="submit JSON file; default file architecture.json",
        type=str)
    parser.add_argument("-w",
                        "--weights",
                        help="print weights during learning",
                        action="store_true")
    args = parser.parse_args()
    if args.weights:
        print_weights = True
    if args.file:
        json_file = args.file
    json_parser = JsonParser(json_file)
    json_parser.parse_json()
    train_df = pd.read_csv(json_parser.input_train_file_path)
    test_df = pd.read_csv(json_parser.input_test_file_path)

    type_of_assigment = json_parser.type
    p_train = None
    p_test = None
    output_layer = None
    if type_of_assigment == "regression":
        p_train = RegressionProvider(train_df,
                                     batch_size=json_parser.batch_size)
        p_test = RegressionProvider(test_df, batch_size=json_parser.batch_size)
        output_layer = "linear"
    elif type_of_assigment == "classification":
        p_train = ClassifierProvider(train_df,
json_dir = "../json/output"
model_dir = "models"

for f in os.listdir(json_dir):
    os.remove(os.path.join(json_dir, f))

# 2. Run Openpose Webcam Mode
handler = subprocess.Popen([
    openpose_demo_path, "--disable_blending=false", "--camera=" +
    str(camera_offset), "--net_resolution=128x128", "--write_json=" + json_dir,
    "--model_folder=" + model_dir, "--number_people_max=1"
],
                           shell=False)

print("Start 3 push-up")
tys = ["elbow", "arm", "shoulder"]
for ty in tys:
    fds = FeedbackSystem()
    fds.load("demo_front_" + ty + "_model", "front")

    # 3. Give feedback
    #try:
    j = JsonParser()
    video = j.parse(None, 60, json_dir, "front", None)
    result = fds.feedback_kmeans(video, ty)
    print(result)
    handler.terminate()
    #except:
    #    print("Exception Occured")
    #    handler.terminate()
示例#12
0
    def __init__(self):

        self.parse_json = JsonParser()
示例#13
0
文件: tools.py 项目: tmanfree/capt
 def __init__(self):
     self.find = Find()
     self.parse_json = JsonParser()
示例#14
0
def main():
    parser = JsonParser()
    parser.extract_from_json()
示例#15
0
def main(args):

    parser = argparse.ArgumentParser()
    parser.add_argument("-a",
                        nargs=1,
                        action="store",
                        dest="json_file_name",
                        default=["empty"])
    parser.add_argument("-o",
                        nargs=1,
                        action="store",
                        dest="image_name",
                        default=["empty"])
    parser.add_argument("--output",
                        nargs=1,
                        action="store",
                        dest="image_name",
                        default=["empty"])
    parsed_args = parser.parse_args(args)

    image_name = parsed_args.image_name[0]
    json_file_name = parsed_args.json_file_name[0]

    if json_file_name == "empty":
        print(
            "\n Welcome to my shape drawer :)\n"
            " you can use following options:\n\n"
            "  -a <json-file-name> : path to json file describing the shape or shapes\n"
            "  -o | --output <image-name.png> : allows to save your drawing\n\n"
            " working examples:\n\n"
            " python main.py -a data.json\n"
            " python main.py -a image.json -o star.png\n")
        exit(0)

    json_parser = JsonParser()
    screen, palette, figures = json_parser.parse_json(json_file_name)
    database = DataBase()

    for figure in figures:

        figure_name = figure.get('type')

        if figure_name in database.figures:

            if figure_name == 'circle':

                shape = CircleDrawer(screen, palette, figure, image_name)
                shape.draw_circle()

            elif figure_name in ['square', 'rectangle']:

                shape = RectangleDrawer(screen, palette, figure, image_name)
                shape.draw_rectangle()

            elif figure_name == 'point':

                shape = PointDrawer(screen, palette, figure, image_name)
                shape.draw_point()

            elif figure_name == 'polygon':

                shape = PolygonDrawer(screen, palette, figure, image_name)
                shape.draw_polygon()

        else:
            print("Unrecognized figure: ", figure_name)
示例#16
0
from json_parser import JsonParser
import sys

if __name__ == '__main__':
    j = JsonParser(sys.argv[1])
    j.read_all()

    print('Total records number {}'.format( j.records_counter))
    print('Actions counter :')
    for k,v in j.actions.items():
        print('\t {} : {}'.format(k,v))
    print('Success percent {:.2f}%'.format(float(j.success_counter)/j.logs_counter*100))
示例#17
0
from json_parser import JsonParser

json_string = ''

with open("sample.json") as f:
    for line in f.readlines():
        json_string += line

print(JsonParser().parse(json_string)[0])
    def start_feedback(self): 
        #time.sleep(5)
        #collect data 
        
        
        print("feedback start")
        print("GET READY")
        time.sleep(3)
        print("START")

        #for i in reversed(range(self.sub2_layout.count())):
        #    self.sub2_layout.itemAt(i).widget().setParent(None)
 
        #go_img = QLabel("GO")
        #go_img.setPixmap(QPixmap("../pictures/go.JPG").scaledToWidth(320))
        #go_img.setAlignment(Qt.AlignCenter)
        #self.sub2_layout.addWidget(go_img)




        start_point = len(os.listdir(json_dir))
        j = JsonParser(start_point=start_point)
   
        # incremental try
        frame_no_list = [i*10 for i in range(4,10)]
        err = 0
        
        tys = ["elbow", "arm", "shoulder"]
        result_dict = {} 
        

        for frame_no in frame_no_list:  
            print(str(frame_no) + " frame test")
            video = j.parse(None, frame_no , json_dir, "front", None)
            result_dict = {}
            err = 0 
            for ty in tys:
                print("doing " + ty)
                fds = FeedbackSystem()
                fds.load("demo_front_" + ty + "_model", "front")
                result, div_zero = fds.feedback_kmeans(video, ty, threshold=0.3)
                if div_zero:
                    err = 1
                else:
                    result_dict[ty] = result 

            if err is 0:
                break
            
        if err is 1:
            self.stop_op_screen("Posture is not detected. Please adjust webcam position") 
            return
         
        fdm = FeedbackMsg(result_dict)
        msg = fdm.get_feedback_msg()
        #self.op_handler.terminate()


        # now print out feedback msg
        #self.stop_op_screen("Result")
              
        need_cor = msg[0]
        cor_msg = msg[1:]

        #top_layout = QVBoxLayout() 
        #bottom_layout = QVBoxLayout()

        """ 
        for m in cor_msg:  
            op_tmp = QLabel(m)
            op_tmp.setAlignment(Qt.AlignCenter)
            self.op_layout.addWidget(op_tmp)
        """
        
        for i in reversed(range(self.sub2_layout.count())):
            self.sub2_layout.itemAt(i).widget().setParent(None)
       
        if need_cor:
            bad_img = QLabel()
            bad_img.setPixmap(QPixmap("../pictures/bad.JPG").scaledToWidth(260))
            bad_img.setAlignment(Qt.AlignCenter)
            self.sub2_layout.addWidget(bad_img)
        else:
            nice_img = QLabel()
            nice_img.setPixmap(QPixmap("../pictures/nice.JPG").scaledToWidth(260))
            nice_img.setAlignment(Qt.AlignCenter)
            self.sub2_layout.addWidget(nice_img)

        feedback_msg = ""
        for m in cor_msg:  
            feedback_msg += m + "\n"

        op_tmp = QLabel(feedback_msg)
        op_tmp.setAlignment(Qt.AlignCenter)
        op_tmp.setSizePolicy(QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed))
        self.sub2_layout.addWidget(op_tmp)

        """
        'Content-Type':
        'application/json; charset=UTF-8',
        'Host':
        'rejestrzlobkow.mpips.gov.pl:8443',
        'Origin':
        'https://rejestrzlobkow.mpips.gov.pl:8443',
        'RequestResponseContentType':
        'application/json',
        'Sec-Fetch-Dest':
        'empty',
        'Sec-Fetch-Mode':
        'cors',
        'Sec-Fetch-Site':
        'same-origin',
        'Referer':
        'https://rejestrzlobkow.mpips.gov.pl:8443/lista/zk?&',
        'User-Agent':
        'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.102 Safari/537.36'
    })

json_parser = JsonParser(response.text)
aa = json_parser.get_parsed_nursery_json()
print(aa)

#TODO jakas lekka baza danych żeby te dane wciagnac

#TODO jakis sposob zeby wszystko wciagnac
#sqlite!

#print (response.text)
#print(response)

def setup_tables(connection):
    SQL_CREATE_USER_USAGE_TABLE = """CREATE TABLE IF NOT EXISTS user_usage (
                                        id integer PRIMARY KEY,
                                        predicted_usage text NOT NULL,
                                        actual_usage text NOT NULL,
                                        salesforce_id integer NOT NULL,
                                        FOREIGN KEY (salesforce_id) REFERENCES users (id)
                                    ); """

    SQL_CREATE_USERS_TABLE = """CREATE TABLE IF NOT EXISTS users (
                                        id integer PRIMARY KEY, 
                                        country text NOT NULL,
                                        name text NOT NULL,
                                        owner text NOT NULL,
                                        manager text NOT NULL
                                );"""
    if connection is not None:
        create_table(connection, SQL_CREATE_USER_USAGE_TABLE)
        create_table(connection, SQL_CREATE_USERS_TABLE)


if __name__ == "__main__":
    connection = DBProvider().connection

    with connection:
        setup_tables(connection)
        insert_users(connection, JsonParser().domainY())
        insert_users_usage(connection, JsonParser().user_usage())
示例#21
0
class TaskManager():
    """
        Static class used to call functions
    """
    p = JsonParser()
    """
        gets all user ids from the data supplied
    """
    @staticmethod
    def get_all_users(data):
        users = []
        for i in data:
            if i.get("visitor_uuid") not in users:
                users.append(i.get("visitor_uuid"))
        return users

    """
        gets all document ids from the data supplied
    """

    @staticmethod
    def get_all_documents(data):
        documents = []
        for i in data:
            if i.get("subject_doc_id") not in documents:
                documents.append(i.get("subject_doc_id"))
        return documents

    """
        gets all the document ids from the data supplied which has been visited by a certain user
    """

    @staticmethod
    def get_all_documents_by_user(user_id, data):
        data = TaskManager.filter_data(data, "visitor_uuid", user_id)
        return TaskManager.get_all_documents(data)

    """
        gets all the user ids from the data supplied who have visited a certain document
    """

    @staticmethod
    def get_all_users_by_doc(doc_id, data):
        data = TaskManager.filter_data(data, "subject_doc_id", doc_id)
        return TaskManager.get_all_users(data)

    """
        loads, reads and parses in a file, using the supplied filename
    """

    @staticmethod
    def load_file(file):
        if file is not None:
            r = Reader(file)
        else:
            r = Reader("sample_100k_lines.json")
        while True:
            try:
                TaskManager.p.add(r.read_line())
            except JSONDecodeError:
                print("Completed Parsing File")
                break
        return TaskManager.p.get_all()

    """
        handles all the tasks by calling the corresponding functions which fulfill the tasks objectives
    """

    @staticmethod
    def task_handler(doc_id, user_id, task_id, data, g, cmd):
        if g is not None:
            if g.canvas is not None:
                g.canvas.get_tk_widget().destroy()
            if g.toolbar is not None:
                g.toolbar.destroy()
                g.toolbar = None
            if g.listbox is not None:
                g.listbox.destroy()
        if task_id == "2a":
            if cmd and doc_id not in TaskManager.get_all_documents(
                    data) or doc_id is None:
                print("Please Provide a Valid Document ID")
            else:
                histogram = Histograms(
                    TaskManager.get_countries(
                        doc_id,
                        TaskManager.filter_data(data, "subject_doc_id",
                                                doc_id)), "Task 2A", cmd)
                if not cmd:
                    TaskManager.plot_figure_gui(g, histogram)
        elif task_id == "2b":
            if cmd and doc_id not in TaskManager.get_all_documents(
                    data) or doc_id is None:
                print("Please Provide a Valid Document ID")
            else:
                histogram = Histograms(
                    TaskManager.get_continents(
                        doc_id,
                        TaskManager.filter_data(data, "subject_doc_id",
                                                doc_id)), "Task 2B", cmd)
                if not cmd:
                    TaskManager.plot_figure_gui(g, histogram)
        elif task_id == "3a":
            histogram = Histograms(TaskManager.simple_get_all_browser(data),
                                   "Task 3A", cmd)
            if not cmd:
                TaskManager.plot_figure_gui(g, histogram)
        elif task_id == "3b":
            histogram = Histograms(TaskManager.get_all_browser(data),
                                   "Task 3B", cmd)
            if not cmd:
                TaskManager.plot_figure_gui(g, histogram)
        elif task_id == "4":
            top10 = TaskManager.get_top_10(data)
            if cmd:
                print(top10)
            else:
                TaskManager.load_list(g, top10)
        elif task_id == "5a":
            users = TaskManager.get_all_users_by_doc(doc_id, data)
            if cmd:
                print(users)
            else:
                TaskManager.load_list(g, users)
        elif task_id == "5b":
            docs = TaskManager.get_all_documents_by_user(user_id, data)
            if cmd:
                print(docs)
            else:
                TaskManager.load_list(g, docs)
        elif task_id == "5c":
            also_likes = TaskManager.task5(data, doc_id, user_id, None)
            if cmd:
                print(also_likes)
            else:
                TaskManager.load_list(g, also_likes)
        elif task_id == "5d":
            also_likes = TaskManager.task5(data, doc_id, user_id,
                                           TaskManager.sort_by_readership)
            if cmd:
                print(also_likes)
            else:
                TaskManager.load_list(g, also_likes)
        elif task_id == "5e":
            also_likes = TaskManager.task5(data, doc_id, user_id,
                                           TaskManager.sort_by_number)
            if cmd:
                print(also_likes)
            else:
                TaskManager.load_list(g, also_likes)
        else:
            if cmd:
                print("Invalid Task")

    """
        filters data based on a specific key satisfying a specific value and return the filter list
    """

    @staticmethod
    def filter_data(data, filter_key, value):
        results = []
        for i in data:
            if i.get(filter_key) == value:
                results.append(i)
        return results

    """
        filters data based on a specific key not satisfying a specific value and return the filter list
    """

    @staticmethod
    def inverse_filter_data(data, filter_key, value):
        results = []
        for i in data:
            if not i.get(filter_key) == value:
                results.append(i)
        return results

    """
        gets the top 10 users who spend the most time reading in a descending order
    """

    @staticmethod
    def get_top_10(data):
        count = dict()
        users = TaskManager.get_all_users(data)
        for i in users:
            count.update({i: 0})
        for j in data:
            if not j.get("event_readtime") is None:
                count[j["visitor_uuid"]] += j.get("event_readtime")
        results = sorted(count, key=count.get, reverse=True)
        results = results[:10]
        return results

    """
        gets how frequently each browser has been used to visit the application, this does distinguish versions
        of browsers
    """

    @staticmethod
    def simple_get_all_browser(data):
        browsers = {}
        for i in data:
            b = httpagentparser.simple_detect(i["visitor_useragent"])[1]
            if b not in browsers:
                browsers.update({b: 1})
            else:
                browsers[b] += 1
        return browsers

    """
        gets how frequently each browser has been used to visit the application, this does not distinguish versions
        of browsers
    """

    @staticmethod
    def get_all_browser(data):
        results = {}
        browsers = TaskManager.simple_get_all_browser(data)
        for i in browsers.keys():
            r = re.findall('.+ [0-9]', i)
            for j in r:
                if j[:-2] not in results:
                    results.update({j[:-2]: browsers[i]})
                else:
                    results[j[:-2]] += browsers[i]
        return results

    """
        gets how frequently users have visited a specific document by their country
    """

    @staticmethod
    def get_countries(doc_id, data):
        countries = dict()
        for k in data:
            if k.get("subject_doc_id") == doc_id:
                if k.get("visitor_country") in countries.keys():
                    countries[k["visitor_country"]] += 1
                else:
                    countries.update({k.get("visitor_country"): 1})
        return countries

    """
        gets how frequently users have visited a specific document by their continents
    """

    @staticmethod
    def get_continents(doc_id, data):
        continents = {"AF": 0, "EU": 0, "OC": 0, "NA": 0, "SA": 0, "AS": 0}
        data = TaskManager.get_countries(doc_id, data)
        if data is None:
            return
        for i in data.keys():
            if TaskManager.cntry_to_cont[i] == "AF":
                continents["AF"] += data[i]
            elif TaskManager.cntry_to_cont[i] == "EU":
                continents["EU"] += data[i]
            elif TaskManager.cntry_to_cont[i] == "OC":
                continents["OC"] += data[i]
            elif TaskManager.cntry_to_cont[i] == "NA":
                continents["NA"] += data[i]
            elif TaskManager.cntry_to_cont[i] == "SA":
                continents["SA"] += data[i]
            elif TaskManager.cntry_to_cont[i] == "AS":
                continents["AS"] += data[i]
        return continents

    """
        gets all the documents other users have read based on a document the supplied user has read
    """

    @staticmethod
    def task5(data, doc_id, user, sorting):
        users_read = []
        if doc_id is not None:
            d = TaskManager.filter_data(data, "subject_doc_id", doc_id)
            u = TaskManager.get_all_users(d)
            if user in u:
                for i in u:
                    if i is not user:
                        u2 = TaskManager.filter_data(data, "visitor_uuid", i)
                        users_read.append(
                            TaskManager.get_all_documents(
                                TaskManager.filter_data(
                                    TaskManager.inverse_filter_data(
                                        u2, "subject_doc_id", doc_id),
                                    "event_type", "read")))
                docs = dict()
                for i in users_read:
                    for j in i:
                        if j is not None:
                            if j not in docs.keys():
                                docs.update({j: 1})
                            else:
                                docs[j] += 1
                if sorting is not None:
                    result = sorting(docs)
                else:
                    result = docs
            else:
                result = []
                print("Please Enter a Valid User ID")
        else:
            result = []
            print("Please Enter a Valid Document ID")
        return result

    """
        sorts the documents by the time spent reading them and returns them in a descending order
    """

    @staticmethod
    def sort_by_readership(data):
        result = dict()
        for i in data.keys():
            temp = TaskManager.filter_data(TaskManager.p.get_all(),
                                           "subject_doc_id", i)
            for j in temp:
                if j.get("event_readtime") is not None:
                    if i not in result:
                        result.update({i: j.get("event_readtime")})
                    else:
                        result[i] += j.get("event_readtime")
        print(result)
        return TaskManager.sort_by_number(result)

    """
        sorts the documents by the number of users who read them and returns them in a descending order
    """

    @staticmethod
    def sort_by_number(data):
        if len(data) < 11:
            return sorted(data.keys(), reverse=True, key=data.__getitem__)
        else:
            return sorted(data.keys(), reverse=True, key=data.__getitem__)[:10]

    """
        embeds a chart to the GUI
    """

    @staticmethod
    def plot_figure_gui(g, histogram):
        g.canvas = FigureCanvasTkAgg(histogram.figure, g.main)
        g.canvas.show()
        g.canvas.get_tk_widget().pack(expand=1, side=RIGHT)
        g.toolbar = NavigationToolbar2TkAgg(g.canvas, g.main)
        g.toolbar.update()
        g.main.mainloop()

    """
        embeds a listbox with the supplied data items to the GUI
    """

    @staticmethod
    def load_list(g, data):
        g.listbox = Listbox(width=60)
        g.listbox.pack(expand=True, side=BOTTOM)
        for i in data:
            g.listbox.insert(END, i)
        g.main.mainloop()

    # dictionary used to assign countries to continents
    cntry_to_cont = {
        'AP': 'AS',
        'AF': 'AS',
        'AX': 'EU',
        'AL': 'EU',
        'DZ': 'AF',
        'AS': 'OC',
        'AD': 'EU',
        'AO': 'AF',
        'AI': 'NA',
        'AQ': 'AN',
        'AG': 'NA',
        'AR': 'SA',
        'AM': 'AS',
        'AW': 'NA',
        'AU': 'OC',
        'AT': 'EU',
        'AZ': 'AS',
        'BS': 'NA',
        'BH': 'AS',
        'BD': 'AS',
        'BB': 'NA',
        'BY': 'EU',
        'BE': 'EU',
        'BZ': 'NA',
        'BJ': 'AF',
        'BM': 'NA',
        'BT': 'AS',
        'BO': 'SA',
        'BQ': 'NA',
        'BA': 'EU',
        'BW': 'AF',
        'BV': 'AN',
        'BR': 'SA',
        'IO': 'AS',
        'VG': 'NA',
        'BN': 'AS',
        'BG': 'EU',
        'BF': 'AF',
        'BI': 'AF',
        'KH': 'AS',
        'CM': 'AF',
        'CA': 'NA',
        'CV': 'AF',
        'KY': 'NA',
        'CF': 'AF',
        'TD': 'AF',
        'CL': 'SA',
        'CN': 'AS',
        'CX': 'AS',
        'CC': 'AS',
        'CO': 'SA',
        'KM': 'AF',
        'CD': 'AF',
        'CG': 'AF',
        'CK': 'OC',
        'CR': 'NA',
        'CI': 'AF',
        'HR': 'EU',
        'CU': 'NA',
        'CW': 'NA',
        'CY': 'AS',
        'CZ': 'EU',
        'DK': 'EU',
        'DJ': 'AF',
        'DM': 'NA',
        'DO': 'NA',
        'EC': 'SA',
        'EG': 'AF',
        'SV': 'NA',
        'GQ': 'AF',
        'ER': 'AF',
        'EE': 'EU',
        'ET': 'AF',
        'FO': 'EU',
        'FK': 'SA',
        'FJ': 'OC',
        'FI': 'EU',
        'FR': 'EU',
        'GF': 'SA',
        'PF': 'OC',
        'TF': 'AN',
        'GA': 'AF',
        'GM': 'AF',
        'GE': 'AS',
        'DE': 'EU',
        'GH': 'AF',
        'GI': 'EU',
        'GR': 'EU',
        'GL': 'NA',
        'GD': 'NA',
        'GP': 'NA',
        'GU': 'OC',
        'GT': 'NA',
        'GG': 'EU',
        'GN': 'AF',
        'GW': 'AF',
        'GY': 'SA',
        'HT': 'NA',
        'HM': 'AN',
        'VA': 'EU',
        'HN': 'NA',
        'HK': 'AS',
        'HU': 'EU',
        'IS': 'EU',
        'IN': 'AS',
        'ID': 'AS',
        'IR': 'AS',
        'IQ': 'AS',
        'IE': 'EU',
        'IM': 'EU',
        'IL': 'AS',
        'IT': 'EU',
        'JM': 'NA',
        'JP': 'AS',
        'JE': 'EU',
        'JO': 'AS',
        'KZ': 'AS',
        'KE': 'AF',
        'KI': 'OC',
        'KP': 'AS',
        'KR': 'AS',
        'KW': 'AS',
        'KG': 'AS',
        'LA': 'AS',
        'LV': 'EU',
        'LB': 'AS',
        'LS': 'AF',
        'LR': 'AF',
        'LY': 'AF',
        'LI': 'EU',
        'LT': 'EU',
        'LU': 'EU',
        'MO': 'AS',
        'MK': 'EU',
        'MG': 'AF',
        'MW': 'AF',
        'MY': 'AS',
        'MV': 'AS',
        'ML': 'AF',
        'MT': 'EU',
        'MH': 'OC',
        'MQ': 'NA',
        'MR': 'AF',
        'MU': 'AF',
        'YT': 'AF',
        'MX': 'NA',
        'FM': 'OC',
        'MD': 'EU',
        'MC': 'EU',
        'MN': 'AS',
        'ME': 'EU',
        'MS': 'NA',
        'MA': 'AF',
        'MZ': 'AF',
        'MM': 'AS',
        'NA': 'AF',
        'NR': 'OC',
        'NP': 'AS',
        'NL': 'EU',
        'NC': 'OC',
        'NZ': 'OC',
        'NI': 'NA',
        'NE': 'AF',
        'NG': 'AF',
        'NU': 'OC',
        'NF': 'OC',
        'MP': 'OC',
        'NO': 'EU',
        'OM': 'AS',
        'PK': 'AS',
        'PW': 'OC',
        'PS': 'AS',
        'PA': 'NA',
        'PG': 'OC',
        'PY': 'SA',
        'PE': 'SA',
        'PH': 'AS',
        'PN': 'OC',
        'PL': 'EU',
        'PT': 'EU',
        'PR': 'NA',
        'QA': 'AS',
        'RE': 'AF',
        'RO': 'EU',
        'RU': 'EU',
        'RW': 'AF',
        'BL': 'NA',
        'SH': 'AF',
        'KN': 'NA',
        'LC': 'NA',
        'MF': 'NA',
        'PM': 'NA',
        'VC': 'NA',
        'WS': 'OC',
        'SM': 'EU',
        'ST': 'AF',
        'SA': 'AS',
        'SN': 'AF',
        'RS': 'EU',
        'SC': 'AF',
        'SL': 'AF',
        'SG': 'AS',
        'SX': 'NA',
        'SK': 'EU',
        'SI': 'EU',
        'SB': 'OC',
        'SO': 'AF',
        'ZA': 'AF',
        'GS': 'AN',
        'SS': 'AF',
        'ES': 'EU',
        'LK': 'AS',
        'SD': 'AF',
        'SR': 'SA',
        'SJ': 'EU',
        'SZ': 'AF',
        'SE': 'EU',
        'CH': 'EU',
        'SY': 'AS',
        'TW': 'AS',
        'TJ': 'AS',
        'TZ': 'AF',
        'TH': 'AS',
        'TL': 'AS',
        'TG': 'AF',
        'TK': 'OC',
        'TO': 'OC',
        'TT': 'NA',
        'TN': 'AF',
        'TR': 'AS',
        'TM': 'AS',
        'TC': 'NA',
        'TV': 'OC',
        'UG': 'AF',
        'UA': 'EU',
        'AE': 'AS',
        'GB': 'EU',
        'US': 'NA',
        'UM': 'OC',
        'VI': 'NA',
        'UY': 'SA',
        'UZ': 'AS',
        'VU': 'OC',
        'VE': 'SA',
        'VN': 'AS',
        'WF': 'OC',
        'EH': 'AF',
        'YE': 'AS',
        'ZM': 'AF',
        'ZW': 'AF',
        'ZZ': 'Unknown',
        'EU': 'Unknown'
    }