Exemple #1
0
    def __init__(self, name, address, cuisine_type, sections=None, id=DEFAULT_TEST_UUID if DEMO_MODE else None):
        super().__init__(True, id)
        self._json['name'] = name
        self._json['address'] = address
        self._json['cuisineType'] = cuisine_type

        if sections is not None:
            self._json['sections'] = sections
        elif DEMO_MODE:
            print(self.post())
            section = Section('booths', id)
            print(section.post())
            table = Table(3, 25, 25, 0, section.section_id)
            print(table.post())
            reservation = Reservation(table.table_id, id)
            print(reservation.post())
            account = Account()
            print(account.post())
            shift = Shift(None, section.id)
            print(shift.post())
            visit = Visit(shift.id, id)
            print(visit.post())
            order = Order(Status.COOK, 10, account.id,
                          'Steak was undercooked last time.', shift.id, visit.id)
            print(order.post())
            order_two = Order(Status.PREP, 30, account.id,
                              'Less pie flavor please.', shift.id, visit.id)
            print(order_two.post())
Exemple #2
0
    def crawl(self, num_batches=cm.NUM_BATCHES, num_instances=cm.NUM_INSTANCES, start_line=0):
        wl_log.info('Crawl configuration: batches %s, instances: %s, tbb_version %s, no of URLs: %s, crawl dir: %s, XVFB: %s, screenshot: %s'
                    % (num_batches, num_instances, self.tbb_version, len(self.urls), self.crawl_dir, self.xvfb, self.capture_screen))

        # for each batch
        for batch_num in range(num_batches):
            wl_log.info('********** Starting batch %s **********' % batch_num)
            site_num = start_line
            bg_site = None
            batch_dir = ut.create_dir(
                os.path.join(self.crawl_dir, str(batch_num)))

            # init/reset tor process to have a different circuit.
            # make sure that we're not using the same guard node again
            wl_log.info('********** Restarting Tor Before Batch **********')
            self.tor_controller.restart_tor()
            sites_crawled_with_same_proc = 0

            # for each site
            for page_url in self.urls:
                sites_crawled_with_same_proc += 1
                if sites_crawled_with_same_proc > cm.MAX_SITES_PER_TOR_PROCESS:
                    wl_log.info('********** Restarting Tor Process **********')
                    self.tor_controller.restart_tor()
                    sites_crawled_with_same_proc = 0

                wl_log.info('********** Crawling %s **********' % page_url)
                page_url = page_url[:cm.MAX_FNAME_LENGTH]
                site_dir = ut.create_dir(os.path.join(
                    batch_dir, ut.get_filename_from_url(page_url, site_num)))

                for instance_num in range(num_instances):
                    wl_log.info('********** Visit #%s to %s **********' %
                                (instance_num, page_url))
                    self.visit = None
                    try:
                        self.visit = Visit(batch_num, site_num, instance_num, page_url, site_dir,
                                           self.tbb_version, self.tor_controller, bg_site, self.xvfb, self.capture_screen)
                        self.visit.get()
                    except KeyboardInterrupt:  # CTRL + C
                        raise KeyboardInterrupt
                    except (ut.TimeExceededError, TimeoutException) as exc:
                        wl_log.critical('Visit to %s timed out! %s %s' % (
                            page_url, exc, type(exc)))
                        if self.visit:
                            self.visit.cleanup_visit()
                    except Exception:
                        wl_log.critical('Exception crawling %s' %
                                        page_url, exc_info=True)
                        if self.visit:
                            self.visit.cleanup_visit()

                # END - for each visit
                site_num += 1
                time.sleep(cm.PAUSE_BETWEEN_SITES)
    def __visit_neighbours(self, node_name: str):
        """
        Visits all the neighbours of given node
        :param node_name: Name of the node to visit neighbours of
        """
        # Get the actual node
        node = self.__graph.get_node_by_name(node_name)
        # Get the neighbours
        node_neighbours = self.__graph.get_neighbours(node_name)

        # Loop over the neighbours and visit them all
        for neighbour in node_neighbours:
            # Get the edge (with weight) between node and neighbour
            edge = node.get_edge(neighbour.name)

            # Set the neighbour's visited_by's list to have been visited by the node
            neighbour.visit(node_name)

            # Check if node has already been visited
            if self.__check_node_is_already_visited(neighbour.name):
                # Get the visited node class
                visited_node = self.__get_visited_node(neighbour.name)

                # Checks the current value for the visit, if the new path is shorter update it
                if visited_node.cost > self.__get_visited_node(visited_node.visited_by).cost + edge.weight:
                    visited_node.visited_by = node_name
                    visited_node.cost = self.__get_visited_node(visited_node.visited_by).cost + edge.weight
                    self.__update_visited_node(visited_node)
            else:
                # Create a new visit class
                visit = Visit(neighbour.name, node_name, self.__get_visited_node(node_name).cost + edge.weight)
                self.__priority_queue.append(visit)
    def solve(self, fr: str, to: str):
        """
        Carries out the algorithm
        :param fr: The name of the node to start from
        :param to: The name of the node to end at
        """
        self.__fr, self.__to = fr, to

        self.__priority_queue.append(Visit(self.__fr, self.__fr, 0))

        # Set the first node to the starting node
        visit = fr

        # Constantly loops over until the priority queue is empty or the end/to node has been fully visited
        while not self.__is_solved:
            # Visit the current node
            self.__visit_neighbours(visit)
            # Checks to see if the current node is fully visited
            self.__update_node_fully_visited(visit)
            # Update the priority queue
            self.__update_priority_queue()

            # Check for end state
            if self.__priority_queue and not self.__is_to_value_fully_visited:
                # If the algorithm hasn't finished find the next node to visit
                visit = self.__get_next_visit_node()
            else:
                self.__is_solved = True
Exemple #5
0
 def process_night_vote(self, votes):
     (counts, target) = vote(votes)
     if target == VOTE_NL:  # tied vote or nl - same thing
         return []
     else:
         return [
             Visit(self.player.player_number, target, self.save,
                   VisitPriority.Save)
         ]
Exemple #6
0
 def process_day_vote(self,
                      votes):  # process day vote (shared by all roles)
     (counts, target) = vote(votes)
     if target == VOTE_NL:  # tied vote or nl - same thing
         return []
     else:
         return [
             Visit(self.player.player_number, target, self.dayvote,
                   VisitPriority.Vote)
         ]
Exemple #7
0
 def addVisit(self,
              id,
              lat,
              lng,
              name=None,
              start=None,
              end=None,
              duration=None,
              load=None,
              type=None,
              priority=None):
     self.visits[id] = Visit(lat, lng, name, start, end, duration, load,
                             type, priority)
Exemple #8
0
    def set_visit(self, id_: str, params: dict) -> None:
        """
            Sets a visit with the specified id and parameters:
                location (required): Object representing the location of the visit.
                    lat: Latitude of this location
                    lng: Longitude of this location
                    name: (optional) Name of the location
                start: the earliest time for this visit. Default value is 00:00, if not specified.
                end: the latest time for this visit. Default value is 23:59, if not specified.
                duration: the length of this visit in minutes
                demand: the capacity that this visit requires
                priority: higher priority visits are more likely to be served
                type: restrict the vehicle that can serve this visit
                time_windows: specify different time-windows for serving the visit. It should be an array of dicts: [ { "start": "08:00", "end": "12:00" } ]
        """
        if id_ in self._visits:
            warnings.warn(
                "ID for this route already exists; overwriting previous definition"
            )

        self._visits[id_] = Visit(params)
Exemple #9
0
    #g.add_node('Place_'+str(row['PlaceId']))

#Reading the person details and creating an object for each person and adding to the dictionary
person_df = pd.read_csv('person.csv')
person_dict = dict()
for index, row in person_df.iterrows():
    t = Person(row['PersonId'], row['PersonName'], row['HealthStatus'],
               row['ConfirmedTime'], row['AddressLat'], row['AddressLong'])
    person_dict[row['PersonId']] = t
    #g.add_node('Person_'+str(row['PersonId']))

#Reading the visit details of each person and creating the object for each visit and adding to the dictionary
visit_df = pd.read_csv('visit_place.csv')
visit_dict = dict()
for index, row in visit_df.iterrows():
    v = Visit(row['VisitId'], row['PersonId'], row['PlaceId'],
              row['StartTime'], row['EndTime'])
    person_dict[row['VisitId']] = v

labels = dict()
#Adding the nodes of persons based on the visit file, simultaneosuly adding their names to the labels
for node in visit_df['PersonId']:
    g.add_node(node)
    labels[node] = person_dict[node].get_person_name()

#Adding the nodes of places based on the visit file, simultaneosly adding their names to the labels
for node in visit_df['PlaceId']:
    g.add_node(node)
    labels[node] = place_dict[node].get_place_name()

#Fetching the start and end time of each visit and add it as weight to the edge
#Conditions if person to person 15 mins
class Crawler(object):
    """Provides methods to collect traffic traces."""

    def __init__(self, url_list, torrc_dict,
                 experiment=cm.EXP_TYPE_WANG_AND_GOLDBERG, xvfb=False,
                 capture_screen=True, output=cm.RESULTS_DIR):
        # Create instance of Tor controller and sniffer used for the crawler
        self.crawl_dir = None
        self.crawl_logs_dir = None
        self.visit = None
        self.output = abspath(output)
        self.urls = url_list  # keep list of urls we'll visit
        self.init_crawl_dirs()  # initializes crawl_dir
        self.tor_log = os.path.join(self.crawl_logs_dir, "tor.log")
        linkname = os.path.join(self.output, 'latest_tor_log')
        add_symlink(linkname, self.tor_log)
        self.tbb_version = cm.RECOMMENDED_TBB_VERSION
        self.experiment = experiment
        self.tor_controller = TorController(tbb_path=cm.TBB_PATH,
                                            torrc_dict=torrc_dict,
                                            tor_log=self.tor_log)
        self.tor_process = None
        self.tb_driver = None
        self.capture_screen = capture_screen
        self.xvfb = xvfb
        add_log_file_handler(wl_log, self.log_file)
        linkname = os.path.join(self.output, 'latest_crawl_log')
        add_symlink(linkname, self.log_file)  # add a symbolic link

    def crawl(self, num_batches=cm.NUM_BATCHES,
              num_instances=cm.NUM_INSTANCES, start_line=0):
        wl_log.info("Crawl configuration: batches: %s, instances: %s,"
                    " tbb_version: %s, experiment: %s, no of URLs: %s, "
                    "crawl dir: %s, XVFB: %s, screenshot: %s"
                    % (num_batches, num_instances, self.tbb_version,
                       self.experiment, len(self.urls), self.crawl_dir,
                       self.xvfb, self.capture_screen))
        # for each batch
        for batch_num in xrange(num_batches):
            wl_log.info("********** Starting batch %s **********" % batch_num)
            site_num = start_line
            bg_site = None
            batch_dir = ut.create_dir(os.path.join(self.crawl_dir,
                                                   str(batch_num)))
            # init/reset tor process to have a different circuit.
            # make sure that we're not using the same guard node again
            wl_log.info("********** Restarting Tor Before Batch **********")
            self.tor_controller.restart_tor()
            sites_crawled_with_same_proc = 0

            # for each site
            for page_url in self.urls:
                sites_crawled_with_same_proc += 1
                if sites_crawled_with_same_proc > cm.MAX_SITES_PER_TOR_PROCESS:
                    wl_log.info("********** Restarting Tor Process **********")
                    self.tor_controller.restart_tor()
                    sites_crawled_with_same_proc = 0

                wl_log.info("********** Crawling %s **********" % page_url)
                page_url = page_url[:cm.MAX_FNAME_LENGTH]
                site_dir = ut.create_dir(os.path.join(
                    batch_dir, ut.get_filename_from_url(page_url, site_num)))

                if self.experiment == cm.EXP_TYPE_MULTITAB_ALEXA:
                    bg_site = choice(self.urls)
                # for each visit
                for instance_num in range(num_instances):
                    wl_log.info("********** Visit #%s to %s **********" %
                                (instance_num, page_url))
                    self.visit = None
                    try:
                        self.visit = Visit(batch_num, site_num, instance_num, page_url, site_dir, self.tor_controller,
                                           bg_site, self.experiment, self.xvfb, self.capture_screen)

                        self.visit.get()
                    except KeyboardInterrupt:  # CTRL + C
                        raise KeyboardInterrupt
                    except (ut.TimeExceededError, TimeoutException) as exc:
                        wl_log.critical("Visit to %s timed out! %s %s" %
                                        (page_url, exc, type(exc)))
                        if self.visit:
                            self.visit.cleanup_visit()
                    except Exception:
                        wl_log.critical("Exception crawling %s" % page_url,
                                        exc_info=True)
                        if self.visit:
                            self.visit.cleanup_visit()
                # END - for each visit
                site_num += 1
                time.sleep(cm.PAUSE_BETWEEN_SITES)

    def init_crawl_dirs(self):
        """Creates results and logs directories for this crawl."""
        self.crawl_dir, self.crawl_logs_dir = self.create_crawl_dir()
        sym_link = os.path.join(self.output, 'latest')
        add_symlink(sym_link, self.crawl_dir)  # add a symbolic link
        # Create crawl log
        self.log_file = os.path.join(self.crawl_logs_dir, "crawl.log")

    def init_logger(self):
        """Configure logging for crawler."""
        add_log_file_handler(wl_log, self.log_file)

    def stop_crawl(self, pack_results=True):
        """ Cleans up crawl and kills tor process in case it's running."""
        wl_log.info("Stopping crawl...")
        if self.visit:
            self.visit.cleanup_visit()
        self.tor_controller.kill_tor_proc()
        if pack_results:
            ut.pack_crawl_data(self.crawl_dir)

    def create_crawl_dir(self):
        """Create a timestamped crawl."""
        ut.create_dir(self.output)  # ensure that we've a results dir
        crawl_dir_wo_ts = os.path.join(self.output, 'crawl')
        crawl_dir = ut.create_dir(ut.append_timestamp(crawl_dir_wo_ts))
        crawl_logs_dir = os.path.join(crawl_dir, 'logs')
        ut.create_dir(crawl_logs_dir)
        return crawl_dir, crawl_logs_dir
    path = list(reversed(path))

    return path, isPerfect


if __name__ == "__main__":
    from labyrinth import *
    from visit import Visit
    import argparse

    parser = argparse.ArgumentParser("Visit a labyrinth with shortest path")
    parser.add_argument("file", type = argparse.FileType('r'), help = "input file")
    parser.add_argument("-q", dest = "display", action = "store_false",
                        help = "Turn off displaying")
    parser.add_argument("-d", dest = "delay", type = float, default = 0.05,
                        help = "Time delay between two displays")
    parser.add_argument("-i", dest = "interval", type = int, default = 1,
                        help = "Number of step between two displays")
    parser.add_argument("--draw", default=None, help="Draw the labyrinth and the path in a file")
    
    args = parser.parse_args()
    lab = Labyrinth2DFromFile(args.file)
    path, isPerfect = shortestPath(lab)
    if not isPerfect: print("Imperfect labyrinth !")
    visit = Visit(lab, display = args.display, sleepTime = args.delay, displayFrequency = args.interval)
    for p in path:
        visit.moveTo(p)
    assert visit.isFinish()
    if args.draw:
        lab.draw(args.draw, path=path)
Exemple #12
0
def db_insertvisit(d: Visit):
    cur.execute('INSERT INTO Visits (indate, outdate, paid, firstvisit) \
      VALUES (?, ?, ?, ?)'                           ,\
       (d.get_strin(), d.get_strout(), d.get_strpaid(), d.get_strisFirstVisit()))
    cur.execute('SELECT last_insert_rowid()')
    d.id = cur.fetchone()[0]
Exemple #13
0
 def getVisits(self):
     visits = []
     visits.append(Visit(1,2))
     for visit in visits:
         print(visit)
Exemple #14
0
class Crawler(object):
    """ Provides methods to collect traffic traces. """
    def __init__(self,
                 torrc_dit,
                 url_list,
                 tbb_version,
                 xvfb=False,
                 capture_screen=True):
        self.crawl_dir = None
        self.crawl_logs_dir = None
        self.visit = None
        self.urls = url_list
        self.init_crawl_dirs()
        self.tor_log = os.path.join(self.crawl_logs_dir, 'tor.log')
        linkname = os.path.join(cm.RESULTS_DIR, 'latest_tor_log')
        add_symlink(linkname, self.tor_log)
        self.tbb_version = tbb_version
        self.tor_controller = TorController(torrc_dit, tbb_version,
                                            self.tor_log)
        self.tor_process = None
        self.tb_driver = None
        self.capture_screen = capture_screen
        self.xvfb = xvfb
        add_log_file_handler(wl_log, self.log_file)
        linkname = os.path.join(cm.RESULTS_DIR, 'latest_crawl_log')
        add_symlink(linkname, self.log_file)

    def crawl(self,
              num_batches=cm.NUM_BATCHES,
              num_instances=cm.NUM_INSTANCES,
              start_line=0):
        wl_log.info(
            'Crawl configuration: batches %s, instances: %s, tbb_version %s, no of URLs: %s, crawl dir: %s, XVFB: %s, screenshot: %s'
            % (num_batches, num_instances, self.tbb_version, len(
                self.urls), self.crawl_dir, self.xvfb, self.capture_screen))

        # for each batch
        for batch_num in range(num_batches):
            wl_log.info('********** Starting batch %s **********' % batch_num)
            site_num = start_line
            bg_site = None
            batch_dir = ut.create_dir(
                os.path.join(self.crawl_dir, str(batch_num)))

            # init/reset tor process to have a different circuit.
            # make sure that we're not using the same guard node again
            wl_log.info('********** Restarting Tor Before Batch **********')
            self.tor_controller.restart_tor()
            sites_crawled_with_same_proc = 0

            # for each site
            for page_url in self.urls:
                sites_crawled_with_same_proc += 1
                if sites_crawled_with_same_proc > cm.MAX_SITES_PER_TOR_PROCESS:
                    wl_log.info('********** Restarting Tor Process **********')
                    self.tor_controller.restart_tor()
                    sites_crawled_with_same_proc = 0

                wl_log.info('********** Crawling %s **********' % page_url)
                page_url = page_url[:cm.MAX_FNAME_LENGTH]
                site_dir = ut.create_dir(
                    os.path.join(batch_dir,
                                 ut.get_filename_from_url(page_url, site_num)))

                for instance_num in range(num_instances):
                    wl_log.info('********** Visit #%s to %s **********' %
                                (instance_num, page_url))
                    self.visit = None
                    try:
                        self.visit = Visit(batch_num, site_num, instance_num,
                                           page_url, site_dir,
                                           self.tbb_version,
                                           self.tor_controller, bg_site,
                                           self.xvfb, self.capture_screen)
                        self.visit.get()
                    except KeyboardInterrupt:  # CTRL + C
                        raise KeyboardInterrupt
                    except (ut.TimeExceededError, TimeoutException) as exc:
                        wl_log.critical('Visit to %s timed out! %s %s' %
                                        (page_url, exc, type(exc)))
                        if self.visit:
                            self.visit.cleanup_visit()
                    except Exception:
                        wl_log.critical('Exception crawling %s' % page_url,
                                        exc_info=True)
                        if self.visit:
                            self.visit.cleanup_visit()

                # END - for each visit
                site_num += 1
                time.sleep(cm.PAUSE_BETWEEN_SITES)

    def init_crawl_dirs(self):
        """ Create results and logs directions for this crawl. """
        self.crawl_dir, self.crawl_logs_dir = self.create_crawl_dir()
        sym_link = os.path.join(cm.RESULTS_DIR, 'latest')
        add_symlink(sym_link, self.crawl_dir)  # add a symbolic link
        # Create crawl log
        self.log_file = os.path.join(self.crawl_logs_dir, 'crawl.log')

    def init_logger(self):
        """ Configure logging for crawler. """
        add_log_file_handler(wl_log, self.log_file)

    def stop_crawl(self):
        """ Cleans up crawl and kills tor process in case it's running. """
        wl_log.info('Stopping crawl...')
        if self.visit:
            self.visit.cleanup_visit()
        self.tor_controller.kill_tor_proc()

    def create_crawl_dir(self):
        """ Create a timestamped crawl. """
        ut.create_dir(cm.RESULTS_DIR)
        crawl_dir_wo_ts = os.path.join(cm.RESULTS_DIR, 'crawl')
        crawl_dir = ut.create_dir(ut.append_timestamp(crawl_dir_wo_ts))
        crawl_logs_dir = os.path.join(crawl_dir, 'logs')
        ut.create_dir(crawl_logs_dir)
        return crawl_dir, crawl_logs_dir
Exemple #15
0
 def __init__(self, id, visit):
     self.id = id;
     self.visit = Visit.how_many()
def get_doctors_visit():
    neurologist = Neurologist()
    # print(neurologist.visit_details())
    cardiologist = Cardiologist()
    # print(cardiologist.visit_details())
    f = Visit("FRI1203", neurologist)
    f.allocate_patient("Jan M.", "2B")
    f.allocate_patient("Alina Z.", "3E")
    f.relocate_patient("2B", "2E")
    f.relocate_patient("3E", "2D")
    # print(f.visit_time)
    # print(f.get_day())
    # print(f.get_date())
    # print(f.get_specialist())
    pprint(f.numbers)
    print(f.get_empty_terms())
    printer("Jan Kowal", "6C", "Cardiologist", "FRI1203")
    f.print_visit_details(printer)
    def crawl(self, num_batches=cm.NUM_BATCHES,
              num_instances=cm.NUM_INSTANCES, start_line=0):
        wl_log.info("Crawl configuration: batches: %s, instances: %s,"
                    " tbb_version: %s, experiment: %s, no of URLs: %s, "
                    "crawl dir: %s, XVFB: %s, screenshot: %s"
                    % (num_batches, num_instances, self.tbb_version,
                       self.experiment, len(self.urls), self.crawl_dir,
                       self.xvfb, self.capture_screen))
        # for each batch
        for batch_num in xrange(num_batches):
            wl_log.info("********** Starting batch %s **********" % batch_num)
            site_num = start_line
            bg_site = None
            batch_dir = ut.create_dir(os.path.join(self.crawl_dir,
                                                   str(batch_num)))
            # init/reset tor process to have a different circuit.
            # make sure that we're not using the same guard node again
            wl_log.info("********** Restarting Tor Before Batch **********")
            self.tor_controller.restart_tor()
            sites_crawled_with_same_proc = 0

            # for each site
            for page_url in self.urls:
                sites_crawled_with_same_proc += 1
                if sites_crawled_with_same_proc > cm.MAX_SITES_PER_TOR_PROCESS:
                    wl_log.info("********** Restarting Tor Process **********")
                    self.tor_controller.restart_tor()
                    sites_crawled_with_same_proc = 0

                wl_log.info("********** Crawling %s **********" % page_url)
                page_url = page_url[:cm.MAX_FNAME_LENGTH]
                site_dir = ut.create_dir(os.path.join(
                    batch_dir, ut.get_filename_from_url(page_url, site_num)))

                if self.experiment == cm.EXP_TYPE_MULTITAB_ALEXA:
                    bg_site = choice(self.urls)
                # for each visit
                for instance_num in range(num_instances):
                    wl_log.info("********** Visit #%s to %s **********" %
                                (instance_num, page_url))
                    self.visit = None
                    try:
                        self.visit = Visit(batch_num, site_num, instance_num, page_url, site_dir, self.tor_controller,
                                           bg_site, self.experiment, self.xvfb, self.capture_screen)

                        self.visit.get()
                    except KeyboardInterrupt:  # CTRL + C
                        raise KeyboardInterrupt
                    except (ut.TimeExceededError, TimeoutException) as exc:
                        wl_log.critical("Visit to %s timed out! %s %s" %
                                        (page_url, exc, type(exc)))
                        if self.visit:
                            self.visit.cleanup_visit()
                    except Exception:
                        wl_log.critical("Exception crawling %s" % page_url,
                                        exc_info=True)
                        if self.visit:
                            self.visit.cleanup_visit()
                # END - for each visit
                site_num += 1
                time.sleep(cm.PAUSE_BETWEEN_SITES)
Exemple #18
0
    cur.execute('SELECT last_insert_rowid()')
    v.id = cur.fetchone()[0]


def db_updatevisitor(v: Visitor):
    cur.execute('UPDATE Visitors SET \
      fname=?, lname=?, gender=?, dob=?, mobile=?, email=? \
      WHERE id = ?'                    ,\
       (v.fname, v.lname, v.gender, v.get_strdob(), v.mobile, v.email, v.id))


def db_insertvisit(d: Visit):
    cur.execute('INSERT INTO Visits (indate, outdate, paid, firstvisit) \
      VALUES (?, ?, ?, ?)'                           ,\
       (d.get_strin(), d.get_strout(), d.get_strpaid(), d.get_strisFirstVisit()))
    cur.execute('SELECT last_insert_rowid()')
    d.id = cur.fetchone()[0]


db_init()
visitor1 = Visitor('Anishka', 'Patel', datetime.today(), 'M', '87459712351',
                   '*****@*****.**')
visit1 = Visit(datetime.today(), datetime.today())
db_insertvisitor(visitor1)
db_insertvisit(visit1)
conn.commit()
conn.close()

visitor1.toConsole()
visit1.toConsole()
    def body(self, master, candidate):
        """Creates the body of 'datawindow'.  param candidate is the candidate.uuid"""
        try:
            data = dict(DataManagement.read_candidate_data())  # TODO better way to do this
            candidate = data.get(candidate)
        except Exception as e:
            print "datawindow.body ", str(e)  # TODO manage exceptions
        # Candidate section
        self.candidate_pane = Labelframe(self, text=MultiLanguage.candidate_pane, width=250, height=350, borderwidth=10)
        self.candidate_pane.pack(side=TOP, expand=YES, fill=BOTH, padx=5, pady=5)
        # object unique id - does not appear on gui but needed to keep track of this candidate
        self.candidate_uid = candidate.uid
        # PSCID
        self.label_pscid = Label(self.candidate_pane, text=MultiLanguage.candidate_pscid)
        self.label_pscid.grid(column=0, row=0, padx=10, pady=5, sticky=N+S+E+W)
        self.text_pscid_var = StringVar()
        self.text_pscid_var.set(candidate.pscid)
        self.text_pscid = Entry(self.candidate_pane, textvariable=self.text_pscid_var)
        self.text_pscid.grid(column=0, row=1, padx=10, pady=5, sticky=N+S+E+W)
        # status
        self.label_status = Label(self.candidate_pane, text=MultiLanguage.candidate_status)
        self.label_status.grid(column=1, row=0, padx=10, pady=5, sticky=N+S+E+W)
        self.text_status_var = StringVar()
        self.text_status_var.set(candidate.status)
        self.text_status = Entry(self.candidate_pane, textvariable=self.text_status_var)
        self.text_status.grid(column=1, row=1, padx=10, pady=5, sticky=N+S+E+W)
        # firstname
        self.label_firstname = Label(self.candidate_pane, text=MultiLanguage.candidate_firstname)
        self.label_firstname.grid(column=0, row=2, padx=10, pady=5, sticky=N+S+E+W)
        self.text_firstname_var = StringVar()
        self.text_firstname_var.set(candidate.firstname)
        self.text_firstname = Entry(self.candidate_pane, textvariable=self.text_firstname_var)
        self.text_firstname.grid(column=0, row=3, padx=10, pady=5, sticky=N+S+E+W)
        # lastname
        self.label_lastname = Label(self.candidate_pane, text=MultiLanguage.candidate_lastname)
        self.label_lastname.grid(column=1, row=2, padx=10, pady=5, sticky=N+S+E+W)
        self.text_lastname_var = StringVar()
        self.text_lastname_var.set(candidate.lastname)
        self.text_lastname = Entry(self.candidate_pane, textvariable=self.text_lastname_var)
        self.text_lastname.grid(column=1, row=3, padx=10, pady=5, sticky=N+S+E+W)
        # phone number
        self.label_phone = Label(self.candidate_pane, text=MultiLanguage.candidate_phone)
        self.label_phone.grid(column=2, row=2, padx=10, pady=5, sticky=N+S+E+W)
        self.text_phone_var = StringVar()
        self.text_phone_var.set(candidate.phone)
        self.text_phone = Entry(self.candidate_pane, textvariable=self.text_phone_var)
        self.text_phone.grid(column=2, row=3, padx=10, pady=5, sticky=N+S+E+W)

        # Schedule Section - displayed as a table
        self.schedule_pane = Labelframe(self, text=MultiLanguage.schedule_pane, width=250, height=350, borderwidth=10)
        self.schedule_pane.pack(side=TOP, expand=YES, fill=BOTH, padx=5, pady=5)
        # top row (header)
        self.label_visit_rank = Label(self.schedule_pane, text=MultiLanguage.schedule_visit_rank)
        self.label_visit_rank.grid(column=0, row=0, padx=5, pady=5, sticky=N+S+E+W)
        self.label_visit_label = Label(self.schedule_pane, text=MultiLanguage.col_visitlabel)
        self.label_visit_label.grid(column=1, row=0, padx=5, pady=5, sticky=N+S+E+W)
        self.label_visit_when = Label(self.schedule_pane, text=MultiLanguage.col_when)
        self.label_visit_when.grid(column=2, row=0, padx=5, pady=5, sticky=NSEW)
        self.label_visit_status = Label(self.schedule_pane, text=MultiLanguage.col_where)
        self.label_visit_status.grid(column=3, row=0, padx=5, pady=5, sticky=N+S+E+W)
        self.label_visit_status = Label(self.schedule_pane, text=MultiLanguage.col_withwhom)
        self.label_visit_status.grid(column=4, row=0, padx=5, pady=5, sticky=N+S+E+W)
        self.label_visit_status = Label(self.schedule_pane, text=MultiLanguage.col_status)
        self.label_visit_status.grid(column=5, row=0, padx=5, pady=5, sticky=N+S+E+W)

        """
        PSEUDOCODE
        1. Get candidate.visitset
        2. Parse into a sorted list (sorted on visit.rank)
        3. Print data on screen


        visit_set = candidate.visitset
        for key, value in study_setup.iteritems():
            visit_list.append(study_setup[key])
        visit_list = sorted(visit_list, key=lambda visit: visit.rank)

        for key, value in visit_list.iteritems():

        """
        # TODO add logic "foreach" to create a table showing each visit
        import lib.utilities as Utilities # TODO delete when done
        # 1- Get candidate visitset and parse into a list
        visit_list = []
        visitset = candidate.visitset
        if visitset is None:
            print 'no visit yet'
        else:
            for key, value in visitset.iteritems():
                visit_list.append(visitset[key])
            # 2- Sort list on visit.rank
            visit_list = sorted(visit_list, key=lambda visit: visit.rank)
            # 3- 'print' values on ui
            x = 0
            for x in range(len(visit_list)):
                # rank
                label_visit_rank = Label(self.schedule_pane, text=visit_list[x].rank)
                label_visit_rank.grid(column=0, row=x+1, padx=5, pady=5, sticky=N+S+E+W)
                # visitlabel
                label_visit_label = Label(self.schedule_pane, text=visit_list[x].visitlabel)
                label_visit_label.grid(column=1, row=x+1, padx=5, pady=5, sticky=N+S+E+W)
                # when
                if visit_list[x].when == None:
                    visit = visit_list[x]
                    date_range = Visit.visit_date_range(visit)
                    label_visit_when = Label(self.schedule_pane, text=date_range)
                    label_visit_when.grid(column=2, row=x+1, padx=5, pady=5, sticky=N+S+E+W)
                else:
                    label_visit_when = Label(self.schedule_pane, text=visit_list[x].when)
                    label_visit_when.grid(column=2, row=x+1, padx=5, pady=5, sticky=N+S+E+W)
                # where
                label_visit_where = Label(self.schedule_pane, text=visit_list[x].where)
                label_visit_where.grid(column=3, row=x+1, padx=5, pady=5, sticky=N+S+E+W)
                # withwhom
                label_visit_where = Label(self.schedule_pane, text=visit_list[x].withwhom)
                label_visit_where.grid(column=4, row=x+1, padx=5, pady=5, sticky=N+S+E+W)
                # status
                label_visit_where = Label(self.schedule_pane, text=visit_list[x].status)
                label_visit_where.grid(column=5, row=x+1, padx=5, pady=5, sticky=N+S+E+W)