Exemplo n.º 1
0
    def newGame(self, fn, root):
        """Start a new game"""

        #create the new save file
        self.savegame = save.Save(fn)

        #get the new game node from the game xml file
        newgameNode = data.getChild(root, "newgame")

        #create the map, loading connections since it's the active one
        self.map = tilemap.Tilemap(
            os.path.join(settings.path, "data",
                         data.getAttr(newgameNode, "map", data.D_STRING)))
        self.map.loadConnections()

        #create the player
        playerNode = data.getChild(root, "player")
        self.player = player.Player(
            playerNode, self.map,
            data.getAttr(newgameNode, "position", data.D_INT2LIST),
            data.getAttr(newgameNode, "level", data.D_INT))

        #if there's a new game script, run it
        script = data.getOptionalChild(newgameNode, "script")
        if script is not None:
            s = script_engine.Script(script)
            self.scriptEngine.run(s)
Exemplo n.º 2
0
    def add_save_job(self, username, title):

        list_application = []  #keep title of applications of the user
        with open(FILENAME_APP, "r") as file:
            reader_csv = csv.reader(file)
            for row in reader_csv:
                if row != [] and row[0] == username:
                    list_application.append(row[1])

        for element in list_application:
            if element == title:
                print(
                    "You have already applied to this job! Don't need to save the job!"
                )
                return False

        for element in self.__list_save_job:
            if element.get_username() == username and element.get_title(
            ) == title:
                print(
                    "This job existed in your data. Please choose another job!"
                )
                return False

        self.__list_save_job.append(sa.Save(username, title))

        with open(FILENAME_SAVE_JOB, "a") as file:
            writer_csv = csv.writer(file)
            writer_csv.writerow((username, title))
        print("The job is saved!")

        return True
Exemplo n.º 3
0
    def delete_job(self, name, title):
        self.__list_job.clear()
        with open(FILENAME_JOB, "r") as file:
            reader_csv = csv.reader(file)
            for row in reader_csv:
                if row != [] and (row[5] != name or row[0] != title):
                    self.__list_job.append(
                        j.Job(row[0], row[1], row[2], row[3], row[4], row[5]))

        with open(FILENAME_JOB, "w") as file:
            writer_csv = csv.writer(file)
            for element in self.__list_job:
                writer_csv.writerow(
                    (element.get_title(), element.get_description(),
                     element.get_employer(), element.get_location(),
                     element.get_salary(), element.get_post_name()))

        #should delete the rows in save_job.csv that relative to the job deleted
        print(title)
        self.__list_save_job.clear()
        with open(FILENAME_SAVE_JOB, "r") as file:
            reader_csv = csv.reader(file)
            for row in reader_csv:
                if row != [] and row[1] != title:
                    self.__list_save_job.append(sa.Save(row[0], row[1]))

        with open(FILENAME_SAVE_JOB, "w") as file:
            writer_csv = csv.writer(file)
            for element in self.__list_save_job:
                writer_csv.writerow(
                    (element.get_username(), element.get_title()))
Exemplo n.º 4
0
def main():
    player = Player()
    globals.PLAYER = player
    globals.NAME = globals.NAME.get_value()
    globals.SCORE = score.Score(globals.NAME)
    save = s.Save()
    globals.NB_MORTS = globals.SCORE.get()
    if globals.LOGS:
        print("NB_morts", globals.NB_MORTS)
    globals.NUM_LVL = save.get_lvl(globals.NAME)
    lvl = globals.NUM_LVL
    run = True
    load_lvl(lvl)
    fonts.font_init()
    while run:
        globals.LT = clock.tick(60)
        gameIcon()

        if globals.LVL_CHANGED:
            load_lvl(globals.NUM_LVL)

        globals.WIN.fill(colors.GREY)  # background

        level.show(globals.MAP)  # tiles
        for m in mobs.mobs:
            m.drawMob()
        globals.PLAYER.draw()  # player
        if globals.Jour:  # day
            hud.draw_bar(globals.PLAYER.energie)
        else:  # night
            pygame.draw.polygon(globals.WIN, colors.BLACK, blindPoints)
        hud.draw_bar(globals.PLAYER.energie)
        hud.draw_lvl()
        hud.draw_deaths()

        if globals.NUM_LVL == 1:
            scenario.draw(0)
        elif globals.NUM_LVL == 2:
            scenario.draw(1)
        elif globals.NUM_LVL == 3:
            scenario.draw(2)

        pygame.display.flip()  # show

        for event in pygame.event.get():
            if event.type == pygame.QUIT:
                run = False
                globals.SCORE.add(globals.NB_MORTS)
                save.add(globals.NAME, globals.NUM_LVL)
        if not player.moving:
            player.move()
        else:
            player.moveAnim()
        player.checkState()
        for m in mobs.mobs:
            m.act()

    pygame.quit()
Exemplo n.º 5
0
    def __init__(self, level):
        self.room_x = 0
        self.room_y = 0

        try:
            self.x = level.room(self.room_x, self.room_y).player_x
            self.y = level.room(self.room_x, self.room_y).player_y
        except KeyError:
            self.x = 0.5 * helpers.SCREEN_WIDTH
            self.y = 0.5 * helpers.SCREEN_HEIGHT

        paths = ['player_legs', 'player_body']
        super().__init__(self.x, self.y, WIDTH, HEIGHT, paths,
                         gameobject.CollisionGroup.player)

        self.deaths = 0
        self.time = 0

        self.alive = True
        self.moving = False
        self.looking_up = False
        self.sliding = False
        self.crouched = False
        self.climbing_ladder = False
        self.hugging_wall = False
        self.show_map = False
        self.submerged = False

        self.jump_buffer = False
        self.jump_count = 0
        self.speed_wall = 0.25 * helpers.SCALE

        self.attack_buffer = True
        self.spread = 10
        self.cooldown = WEAPON_COOLDOWN
        self.weapon_mods = {}
        for w in WeaponMod:
            self.weapon_mods[w] = False

        self.bullets = []

        self.abilities = {}
        for a in Ability:
            self.abilities[a] = False
        self.abilities[Ability.run] = True

        self.save = save.Save(self.x, self.y, self.room_x, self.room_y,
                              self.direction, self.abilities, self.weapon_mods)

        self.txtbox = textbox.Textbox('', 0.5 * helpers.SCREEN_WIDTH,
                                      4 * helpers.SCALE)
        self.modifying_weapon = False

        self.level_over = False
        self.reseted = False

        self.ground_timer = 0
        self.wall_timer = 0
Exemplo n.º 6
0
    def delete_job(self, name, title):
        self.__list_job.clear()
        with open(FILENAME_JOB, "r") as file:
            reader_csv = csv.reader(file)
            for row in reader_csv:
                if row != [] and (row[5] != name or row[0] != title):
                    self.__list_job.append(
                        j.Job(row[0], row[1], row[2], row[3], row[4], row[5]))

        with open(FILENAME_JOB, "w") as file:
            writer_csv = csv.writer(file)
            for element in self.__list_job:
                writer_csv.writerow(
                    (element.get_title(), element.get_description(),
                     element.get_employer(), element.get_location(),
                     element.get_salary(), element.get_post_name()))

        #should delete the rows in save_job.csv that relative to the job deleted
        print(title)
        self.__list_save_job.clear()
        with open(FILENAME_SAVE_JOB, "r") as file:
            reader_csv = csv.reader(file)
            for row in reader_csv:
                if row != [] and row[1] != title:
                    self.__list_save_job.append(sa.Save(row[0], row[1]))

        with open(FILENAME_SAVE_JOB, "w") as file:
            writer_csv = csv.writer(file)
            for element in self.__list_save_job:
                writer_csv.writerow(
                    (element.get_username(), element.get_title()))

        #for deleting the applications related to the job and notifying them about it
        self.__list_job.clear()

        #name of those who need to be notified of deleted job
        notify_applicants = list()

        with open(FILENAME_APP, "r") as file:
            reader_csv = csv.reader(file)
            for row in reader_csv:
                if (row != []) and (row[1] == title):
                    notify_applicants.append(row)
                elif (row != []):
                    self.__list_job.append(row)

        with open(FILENAME_APP, "w") as file:
            writer_csv = csv.writer(file)
            for element in self.__list_job:
                writer_csv.writerow(element)

        with open(FILENAME_DEL_JOB, "a") as file:
            writer_csv = csv.writer(file)
            for element in notify_applicants:
                writer_csv.writerow(element)

        write_jobs()
Exemplo n.º 7
0
    def delete_save_job(self, name, title):
        self.__list_save_job.clear()
        with open(FILENAME_SAVE_JOB, "r") as file:
            reader_csv = csv.reader(file)
            for row in reader_csv:
                if row != [] and (row[0] != name or row[1] != title):
                    self.__list_save_job.append(sa.Save(row[0], row[1]))

        with open(FILENAME_SAVE_JOB, "w") as file:
            writer_csv = csv.writer(file)
            for element in self.__list_save_job:
                writer_csv.writerow(
                    (element.get_username(), element.get_title()))
Exemplo n.º 8
0
    def __init__(self):
        # 当前目录加入环境变量中
        path.add_path()

        # 界面控制变量
        self.root = None
        self.text_frame = None
        self.list_frame = None
        self.next_botton = None
        self.scrollbar = None
        self.score_entry = None
        self.listbox = None
        self.label = None
        self.save_button = None
        self.prefix = "   "

        # 配置
        self.configer = config.Config()

        # url获取
        self.url = url.URL(self.configer.url_dir, self.configer.url_dir_cn,
                           self.configer.class_num, self.configer.url_num,
                           self.configer.url_num_cn)

        # 当前测试url序列号
        self.index = 0
        # 当前剩余url数
        self.all_url_num = 0
        # 当前测试url
        self.cur_url = None

        # 保存测试结果
        self.saver = save.Save(self.configer.result_file)
        self.db_saver = db.DBSaver(self.configer.host, self.configer.port,
                                   self.configer.user, self.configer.passwd,
                                   self.configer.db)

        # 线程通信队列
        self.queue = Queue.Queue()

        # 运行停止标识
        self.running = False
        self.stop_flag = False

        # 文件主窗口
        self.question_win = None
        # 用户标识
        self.user_id = 0
Exemplo n.º 9
0
 def __init__(self):
     self.root = None
     self.text_frame = None
     self.list_frame = None
     self.next_botton = None
     self.scrollbar = None
     self.score_entry = None
     self.listbox = None
     self.label = None
     self.logger = log.get_logger()
     self.confger = config.Config()
     self.url = url.URL(self.confger.url_file)
     self.index = 0
     self.saver = save.Save(self.confger.result_file)
     self.cur_url = None
     self.save_button = None
     self.queue = Queue.Queue()
     self.running = False
     self.prefix = "..."
Exemplo n.º 10
0
    def __init__(self):
        path.add_path()
        self.root = None
        self.text_frame = None
        self.list_frame = None
        self.scrollbar = None
        self.listbox = None
        self.logger = log.get_logger()
        self.confger = config.Config()
        
        self.saver = save.Save(self.confger.result_file)
        self.cur_url = None
        self.kill_flag = False

        self.queue = Queue.Queue()
        self.running = False
        self.pausing = False
        self.prefix = "   "
        self.db_saver = db.DBSaver(self.confger.host, self.confger.user, self.confger.passwd,
            self.confger.db)
Exemplo n.º 11
0
def end_screen():
  s = save.Save()
  s.add(globals.NAME, 1)
  globals.WIN = pygame.display.set_mode((globals.WIDTH, globals.HEIGHT))
  pygame.display.flip()
  alive= True
  globals.SCORE.add(globals.NB_MORTS)
  if globals.LOGS:              
    print(globals.SCORE.score)
  while alive:
    
    for event in pygame.event.get():
      if event.type == pygame.QUIT:
        alive = False
      globals.WIN.fill((255,255,255))
      globals.WIN.blit(sprites.sl["end_screen"],(0,0))
      leaderboard.show_leaderboard()
      pygame.display.flip()


  pygame.quit()
Exemplo n.º 12
0
def main():
    user = sys.argv[1]
    playlist_list = list(sys.argv[2:])
    print(sys.argv[2])

    scopes = 'playlist-read-private,playlist-modify-private'
    token = spotipy.util.prompt_for_user_token("*****@*****.**",
                                               scopes)
    sp = spotipy.Spotify(auth=token)

    g = get.Get(user, sp)
    s = save.Save()
    l = load.Load()
    a = actions.Actions(g, s, l, user, sp)

    #TODO add switch for what mode the user wants
    # collect all frames and lists in a class or something
    # mode switching

    a.get_user_playlists()
    for name in playlist_list:
        a.get_recommendations(name)
Exemplo n.º 13
0
 def test_save_job(self):  #tests the saving of jobs
     filename = "save_job.csv"
     f = open(filename, "w+")
     f.close()
     manage = ma.Manage()
     save_job1 = s.Save("YQ", "Tenth")
     manage.add_save_job("YQ", "Tenth")
     assert save_job1.get_username() == "YQ"
     save_job2 = s.Save("TP1", "Porter")
     manage.add_save_job("TP1", "Porter")
     assert save_job2.get_username() == "TP1"
     save_job3 = s.Save("TP2", "Developer")
     manage.add_save_job("TP2", "Developer")
     assert save_job3.get_username() == "TP2"
     save_job4 = s.Save("TP1", "Sixth")
     manage.add_save_job("TP1", "Sixth")
     assert save_job4.get_username() == "TP1"
     save_job5 = s.Save("TP2", "Seventh")
     manage.add_save_job("TP2", "Seventh")
     assert save_job5.get_username() == "TP2"
     save_job6 = s.Save("YQ", "SCRUMMaster")
     manage.add_save_job("YQ", "SCRUMMaster")
     assert save_job6.get_username() == "YQ"
Exemplo n.º 14
0
def Show(role):  # Laman Utama [Ini tidak sesuai spesifikasi yang diinginkan]
    inUser = input(">>> ")
    # aksi yang terjadi berdasarkan input user
    if inUser == 'caritahun':
        while True:
            gadget.caritahun()
            pil = input("Apa masih mencari caritahun (y/n)?:\n>>> ")
            if pil == 'n':
                Show(role)
    if inUser == 'carirarity':
        while True:
            gadget.carirarity()
            pil = input("Apa masih mencari carirarity (y/n)?:\n>>> ")
            if pil == 'n':
                Show(role)
    if inUser == 'tambahitem':
        if role == 'admin':
            while True:
                modifItem.tambahitem()
                pil = input("Apa masih ingin menambah item (y/n)?:\n>>> ")
                if pil == 'n':
                    Show(role)
        else:
            print('Anda tidak memiliki ijin akses')
            Show(role)
    if inUser == 'hapusitem':
        if role == 'admin':
            while True:
                modifItem.hapusitem()
                pil = input("Apa masih ingin menghapus item (y/n)?:\n>>> ")
                if pil == 'n':
                    Show(role)
        else:
            print('Anda tidak memiliki ijin akses')
            Show(role)
    if inUser == 'ubahjumlah':
        if role == 'admin':
            while True:
                modifItem.ubahjumlah()
                pil = input(
                    "Apa masih ingin mengubah jumlah item (y/n)?:\n>>> ")
                if pil == 'n':
                    Show(role)
        else:
            print('Anda tidak memiliki ijin akses')
            Show(role)
    if inUser == 'register':
        if role == 'admin':
            while True:
                user.register()
                pil = input("Apa masih ingin me-register lagi (y/n)?:\n>>> ")
                if pil == 'n':
                    Show(role)
        else:
            print('Anda tidak memiliki ijin akses')
            Show(role)
    if inUser == 'pinjam':
        if role == 'user':
            while True:
                gadget.PinjamGadget()
                pil = input("Apa masih ingin meminjam lagi (y/n)?:\n>>> ")
                if pil == 'n':
                    Show(role)
        else:
            print('Anda tidak memiliki ijin akses')
            Show(role)
    if inUser == 'kembalikan':
        if role == 'user':
            while True:
                gadget.KembalikanGadget()
                pil = input("Apa masih ingin mengembalikan lagi (y/n)?:\n>>> ")
                if pil == 'n':
                    Show(role)
        else:
            print('Anda tidak memiliki ijin akses')
            Show(role)
    if inUser == 'minta':
        if role == 'user':
            while True:
                consumable.minta()
                pil = input("Apa masih ingin meminta lagi (y/n)?:\n>>> ")
                if pil == 'n':
                    Show(role)
        else:
            print('Anda tidak memiliki ijin akses')
            Show(role)
    if inUser == 'riwayatambil':
        if role == 'admin':
            consumable.riwayatambil()
            Show(role)
        else:
            print('Anda tidak memiliki ijin akses')
            Show(role)
    if inUser == 'riwayatpinjam':
        if role == 'admin':
            gadget.riwayatpinjam()
            Show(role)
        else:
            print('Anda tidak memiliki ijin akses')
            Show(role)
    if inUser == 'riwayatkembali':
        if role == 'admin':
            gadget.riwayatkembali()
            Show(role)
        else:
            print('Anda tidak memiliki ijin akses')
            Show(role)
    if inUser == 'help':
        Help(role)
        Show(role)
    if inUser == 'save':
        save.Save()
        Show(role)
    if inUser == 'exit':
        pil = input("Apa Anda ingin untuk menyimpan perubahan (y/n)?\n>>> ")
        if pil == 'y':
            save.Save()
        exit()
    else:
        print(
            'Terdapat kesalahan input, ketik \'help\' agar melihat list command'
        )
        Show(role)
    return
Exemplo n.º 15
0
    def __init__(self):

        #create a list of student Object
        #self__list_student will store object of Student class
        self.__list_student = []
        self.__list_job = []
        self.__list_settings = []
        self.__list_profiles = []
        self.__list_save_job = []

        if not os.path.isfile(FILENAME_SAVE_JOB):
            with open(FILENAME_SAVE_JOB, "w") as file:
                writer_csv = csv.writer(file)
                #writer_csv.writerow(("User_Name","Title"))

        with open(FILENAME_SAVE_JOB, "r") as file:
            reader_csv = csv.reader(file)
            for row in reader_csv:
                if row != []:
                    self.__list_save_job.append(sa.Save(row[0], row[1]))

        if not os.path.isfile(FILENAME_APP):
            with open(FILENAME_APP, "w") as file:
                writer_csv = csv.writer(file)

        #add title for the student_data.csv
        if not os.path.isfile(FILENAME):
            with open(FILENAME, "w") as file:
                writer_csv = csv.writer(file)
                #writer_csv.writerow(("User_Name","Password","First_Name","Last_Name","Tier", "DayOfLastJobApplication"))

        #add data from student_data.csv to __self.__list_student
        with open(FILENAME, "r") as file:
            reader_csv = csv.reader(file)
            for row in reader_csv:
                if row != []:
                    self.__list_student.append(
                        s.Student(row[0], row[1], row[2], row[3]))

        #add title for the job_data.csv
        if not os.path.isfile(FILENAME_JOB):
            with open(FILENAME_JOB, "w") as file:
                writer_csv = csv.writer(file)
                #writer_csv.writerow(("Title","Description","Employer","Location","Salary","Post_Name"))

        #add data from job_data to __self.__list_job
        with open(FILENAME_JOB, "r") as file:
            reader_csv = csv.reader(file)
            for row in reader_csv:
                if row != []:
                    self.__list_job.append(
                        j.Job(row[0], row[1], row[2], row[3], row[4], row[5]))

        #add title for the settings.csv
        if not os.path.isfile(FILENAME_STG):
            with open(FILENAME_STG, "w") as file:
                writer_csv = csv.writer(file)
                #writer_csv.writerow(("user", "email_notif","sms_notif","targeted_ads","language"))

        #add data from settings.csv to __self.__list_settings
        with open(FILENAME_STG, "r") as file:
            reader_csv = csv.reader(file)
            for row in reader_csv:
                if row != []:
                    self.__list_settings.append(
                        stg.Settings(row[0], row[1], row[2], row[3], row[4]))

        #add title for the profiles.csv
        if not os.path.isfile(FILENAME_PRO):
            with open(FILENAME_PRO, "w") as file:
                writer_csv = csv.writer(file)
            #writer_csv.writerow(("user", "title","major","university","bio", "experience", "education"))

        #add data from profiles.csv
        with open(FILENAME_PRO, "r") as file:
            reader_csv = csv.reader(file)
            for row in reader_csv:
                if row != []:
                    self.__list_profiles.append(
                        pro.Profiles(row[0], row[1], row[2], row[3], row[4],
                                     row[5], row[6]))

        #add title for the pending_messages.csv
        if not os.path.isfile(FILENAME_PRO):
            with open(FILENAME_MES, "w") as file:
                writer_csv = csv.writer(file)

        #add title for the new_jobs.csv
        if not os.path.isfile(FILENAME_NEW_JOB):
            with open(FILENAME_NEW_JOB, "w") as file:
                writer_csv = csv.writer(file)
                #writer_csv.writerow(("jobTitle", "List of NOT seen"))

        #create del_job_app file
        if not os.path.isfile(FILENAME_DEL_JOB):
            with open(FILENAME_DEL_JOB, "w") as file:
                writer_csv = csv.writer(file)
Exemplo n.º 16
0
def placeholder2(screen):
    pygame.mixer.music.fadeout(1500)
    save.Save(screen)
    pass
Exemplo n.º 17
0
def run():
    # initialize VGG Model and PCA
    iset = init.Init()
    # initialize neural network model
    model = networks.Network()
    model.init_model()
    # initialize global instance
    uset = users.Users()

    # store special features in memory
    # dset_special = dataset.Dataset(set.PATH_TO_SPECIAL)
    dset_special = None
    print "Dataset Loaded."
    # set normal features in memory to false
    is_normal_loaded = True
    tset_name = None
    is_reloaded = False
    m_checkpoints = 0

    while True:

        queue = db.lrange(set.REQUEST_QUEUE, set.REQUEST_START, set.REQUEST_END)
        q_uid = None
        # initialize local instance
        select = selectonly.Select()
        finalize = save.Save()
        viewer = view.View()
        retrain_v = retrainView.retrainView()
        retrain_h = retrainHeatmap.retrainHeatmap()
        heat = heatmap.Heatmap()
        t_train = train.Train()
        report_label = label.label()
        report_count = count.count()
        report_map = mapping.map()

        for q in queue:

            q = json.loads(q.decode("utf-8"))
            q_uid = q["uid"]
            target = q["target"]
            session_uid = q["uid"]
            dataSetPath = set.DATASET_DIR + q["dataset"]
            pcaPath = set.DATASET_DIR + q["pca"]
            # if specific features then set m_loaded to true
            is_normal_loaded = False if dataSetPath == set.PATH_TO_SPECIAL else True

            if target == "label":
                report_label.setData(q)

            if target == "count":
                report_count.setData(q)

            if target == "map":
                report_map.setData(q)

            if target == 'selectonly':
                select.setData(q)

            if target == 'save':
                finalize.setData(q)

            if target == 'view':
                viewer.setData(q)

            if target == 'retrainView':
                retrain_v.setData(q)

            if target == 'retrainHeatmap':
                retrain_h.setData(q)

            if target == 'heatmapAll':
                heatmaps = q["viewJSONs"]

            if target == 'heatmap':
                heat.setData(q)

            if target == 'train':
                t_train.setData(q)

            if target == 'reload':
                t_path = set.TRAININGSET_DIR + q["trainingSetName"]
                is_reloaded = True

            if target == 'reviewSave':
                q_samples = json.loads(q["samples"])

        if q_uid is not None:

            print target, " Session Start ....."

            no_uid = True
            uidx = 0

            # find current user Index
            for i in range(len(uset.users)):
                if uset.users[i]['uid'] == session_uid:
                    uidx = i
                    no_uid = False

            if no_uid:
                # set users data
                uset.addUser(session_uid)

            if is_normal_loaded:
                dset = dataset.Dataset(dataSetPath)
            else:
                dset = dset_special

            PCA = joblib.load(pcaPath)

            if target == 'selectonly':
                uset.setIter(uidx, select.iter)
                print "Predict Start ... "
                t0 = time()
                scores = model.predict_prob(dset.features)
                t1 = time()
                print "Predict took ", t1 - t0
                # Find uncertain samples
                data = select.getData(scores, dset.slideIdx, dset.slides, dset.x_centroid, dset.y_centroid)
                db.set(q_uid, json.dumps(data))
                db.ltrim(set.REQUEST_QUEUE, len(q_uid), -1)

            if target == 'view':
                slide_idx = dset.getSlideIdx(viewer.slide)
                object_num = dset.getObjNum(slide_idx)
                data_idx = dset.getDataIdx(slide_idx)
                feature_set = dset.getFeatureSet(data_idx, object_num)
                x_centroid_set = dset.getXcentroidSet(data_idx, object_num)
                y_centroid_set = dset.getYcentroidSet(data_idx, object_num)

                print "Predict Start ... "
                t0 = time()
                predictions = model.predict(feature_set)
                t1 = time()
                print "Predict took ", t1 - t0
                object_idx = load(
                    viewer.left, viewer.right, viewer.top, viewer.bottom, x_centroid_set.astype(np.float), y_centroid_set.astype(np.float)
                )
                data = {}

                for i in object_idx:
                    data[str(x_centroid_set[i][0])+'_'+str(y_centroid_set[i][0])] = str(predictions[i])

                db.set(q_uid, json.dumps(data))
                db.ltrim(set.REQUEST_QUEUE, len(q_uid), -1)

            if target == 'heatmap':
                slide_idx = dset.getSlideIdx(heat.slide)
                object_num = dset.getObjNum(slide_idx)
                data_idx = dset.getDataIdx(slide_idx)
                feature_set = dset.getFeatureSet(data_idx, object_num)
                x_centroid_set = dset.getXcentroidSet(data_idx, object_num)
                y_centroid_set = dset.getYcentroidSet(data_idx, object_num)

                print "Predict Start ... "
                t0 = time()
                if set.IS_HEATMAP == False:
                    scores = model.predict_prob(feature_set)
                t1 = time()
                print "Predict took ", t1 - t0
                # set x and y maps
                heat.setXandYmap()
                # write heatmaps
                heat.setHeatMap(x_centroid_set, y_centroid_set, scores)
                # get heatmap data
                data = heat.getData(0)

                db.set(q_uid, json.dumps(data))
                db.ltrim(set.REQUEST_QUEUE, len(q_uid), -1)

            if target == 'heatmapAll':
                data = []
                index = 0

                t0 = time()
                scores = model.predict_prob(dset.features)
                t1 = time()
                print "Predict took ", t1 - t0

                for h in heatmaps:

                    h['uid'] = session_uid
                    heat.setData(h)

                    slide_idx = dset.getSlideIdx(heat.slide)
                    object_num = dset.getObjNum(slide_idx)
                    data_idx = dset.getDataIdx(slide_idx)
                    # feature_set = dset.getFeatureSet(data_idx, object_num)
                    x_centroid_set = dset.getXcentroidSet(data_idx, object_num)
                    y_centroid_set = dset.getYcentroidSet(data_idx, object_num)
                    score_set = scores[data_idx: data_idx+object_num]
                    # set x and y maps
                    heat.setXandYmap()
                    # write heatmaps
                    heat.setHeatMap(x_centroid_set, y_centroid_set, score_set)
                    # get heatmap data
                    data_k = heat.getData(index)
                    data.append(data_k)
                    index += 1

                # print data
                db.set(q_uid, json.dumps(data))
                db.ltrim(set.REQUEST_QUEUE, len(q_uid), -1)

            if target == 'reload':
                # initialize augment
                agen = augments.Augments()
                # set user train samples
                # uset.setReloadedData(uidx, t_path, dset.slides)
                uset.setReloadedData(uidx, t_path)

                sample_size = len(uset.users[uidx]['samples'])

                m_checkpoints = uset.users[uidx]['samples'][sample_size-1]['checkpoints']

                sample_batch_size = agen.AUG_BATCH_SIZE * sample_size
                train_size = sample_size + sample_batch_size

                train_features = np.zeros((train_size, set.FEATURE_DIM))
                train_labels = np.zeros((train_size, ))

                for i in range(sample_size):
                    train_features[i] = uset.users[uidx]['samples'][i]['feature']
                    train_labels[i] = uset.users[uidx]['samples'][i]['label']
                    train_features[i+sample_size:i+sample_size+agen.AUG_BATCH_SIZE] = uset.users[uidx]['augments'][i]['feature']
                    train_labels[i+sample_size:i+sample_size+agen.AUG_BATCH_SIZE] = uset.users[uidx]['augments'][i]['label']

                tset_path = t_path.split('/')[-1]
                tset_name = tset_path.split('.')[0]

                print "Training ... ", len(train_labels)
                t0 = time()
                model.train_model(train_features, train_labels, tset_name)
                t1 = time()
                print "Training took ", t1 - t0

                data = {"success": 'pass'}
                db.set(q_uid, json.dumps(data))
                db.ltrim(set.REQUEST_QUEUE, len(q_uid), -1)

            if target == 'label':
                # initialize augment
                agen = augments.Augments()
                # set user train samples
                uset.setReloadedData(uidx, report_label.trainSet)

                sample_size = len(uset.users[uidx]['samples'])
                sample_batch_size = agen.AUG_BATCH_SIZE * sample_size
                train_size = sample_size + sample_batch_size

                train_features = np.zeros((train_size, set.FEATURE_DIM))
                train_labels = np.zeros((train_size, ))

                for i in range(sample_size):
                    train_features[i] = uset.users[uidx]['samples'][i]['feature']
                    train_labels[i] = uset.users[uidx]['samples'][i]['label']
                    train_features[i+sample_size:i+sample_size+agen.AUG_BATCH_SIZE] = uset.users[uidx]['augments'][i]['feature']
                    train_labels[i+sample_size:i+sample_size+agen.AUG_BATCH_SIZE] = uset.users[uidx]['augments'][i]['label']

                print "Training ... ", len(train_labels)
                t0 = time()
                model.train_model(train_features, train_labels, report_label.classifier)
                t1 = time()
                print "Training took ", t1 - t0

                slide_idx = dset.getSlideIdx(report_label.slide)
                object_num = dset.getObjNum(slide_idx)
                data_idx = dset.getDataIdx(slide_idx)
                test_features = dset.getFeatureSet(data_idx, object_num)
                x_centroid_set = dset.getXcentroidSet(data_idx, object_num)
                y_centroid_set = dset.getYcentroidSet(data_idx, object_num)
                print "Testing Start ... "
                t0 = time()
                predicts = model.predict(test_features)
                t1 = time()
                print "Predict took ", t1 - t0

                inputImageFile = '/datasets/tif/'+ report_label.slide + '.svs.dzi.tif'

                bold = 512
                bold_left = report_label.left - bold
                bold_top = report_label.top - bold
                bold_bottom = report_label.bottom + bold
                bold_right = report_label.right + bold
                bold_width = report_label.width + 2*bold
                bold_height = report_label.height + 2*bold

                ts = large_image.getTileSource(inputImageFile)

                region = dict(
                    left=report_label.left, top=report_label.top,
                    width=report_label.width, height=report_label.height,
                )

                im_region = ts.getRegion(
                    region=region, format=large_image.tilesource.TILE_FORMAT_NUMPY
                )[0]

                mydb = mysql.connector.connect(
                  host=set.MYSQL_HOST,
                  user="******",
                  passwd="guest",
                  database="nuclei",
                  charset='utf8',
                  use_unicode=True
                )

                boundaryTablename = 'sregionboundaries'

                runcursor = mydb.cursor()

                query = 'SELECT centroid_x, centroid_y, boundary from ' + boundaryTablename + ' where slide="' +  report_label.slide + \
                '" AND centroid_x BETWEEN ' + str(report_label.left) + ' AND ' + str(report_label.right) + \
                ' AND centroid_y BETWEEN ' + str(report_label.top) + ' AND ' + str(report_label.bottom)

                runcursor.execute(query)

                boundarySet = runcursor.fetchall()

                # find region index from hdf5
                object_idx = load(
                    report_label.left, report_label.right, report_label.top, report_label.bottom, x_centroid_set.astype(np.float), y_centroid_set.astype(np.float)
                )

                # set an array for boundary points in a region to zero
                im_bold = np.zeros((bold_height, bold_width), dtype=np.uint8)

                for i in object_idx:
                    for j in range(len(boundarySet)):
                      x = int(boundarySet[j][0])
                      y = int(boundarySet[j][1])
                      boundaryPoints = []
                      if x == int(x_centroid_set[i, 0]) and y == int(y_centroid_set[i, 0]):
                          object = boundarySet[j][2].encode('utf-8').split(' ')
                          object_points = []
                          for p in range(len(object)-1):
                              intP = map(int, object[p].split(','))
                              intP[0] = intP[0] - report_label.left + bold
                              intP[1] = intP[1] - report_label.top + bold
                              object_points.append(intP)
                          boundaryPoints.append(np.asarray(object_points))
                          cv2.fillPoly(im_bold, boundaryPoints, 255 if predicts[i] > 0 else 128)

                im_out = im_bold[bold:bold+report_label.height, bold:bold+report_label.width]

                imsave(report_label.inFile, im_out)

                runcursor.close()
                mydb.close()

                print ("label success ", report_label.inFile)
                data = {"success": report_label.outFile}
                db.set(q_uid, json.dumps(data))
                db.ltrim(set.REQUEST_QUEUE, len(q_uid), -1)

                uset.users = []
                uset.u_size = 0

                model = networks.Network()
                model.init_model()
                print ("label done")

            if target == 'count':
                # initialize augment
                agen = augments.Augments()
                # set user train samples
                uset.setReloadedData(uidx, report_count.trainSet)

                sample_size = len(uset.users[uidx]['samples'])
                sample_batch_size = agen.AUG_BATCH_SIZE * sample_size
                train_size = sample_size + sample_batch_size

                train_features = np.zeros((train_size, set.FEATURE_DIM))
                train_labels = np.zeros((train_size, ))

                for i in range(sample_size):
                    train_features[i] = uset.users[uidx]['samples'][i]['feature']
                    train_labels[i] = uset.users[uidx]['samples'][i]['label']
                    train_features[i+sample_size:i+sample_size+agen.AUG_BATCH_SIZE] = uset.users[uidx]['augments'][i]['feature']
                    train_labels[i+sample_size:i+sample_size+agen.AUG_BATCH_SIZE] = uset.users[uidx]['augments'][i]['label']

                print "Training ... ", len(train_labels)
                t0 = time()
                model.train_model(train_features, train_labels, report_count.classifier)
                t1 = time()
                print "Training took ", t1 - t0

                print "Testing Start ... "
                t0 = time()
                predicts = model.predict(dset.features)
                t1 = time()
                print "Predict took ", t1 - t0

                # find positive and negative numbers for each slide
                pos_num = []
                neg_num = []

                for i in range(dset.n_slides):
                    if i == len(dset.dataIdx) - 1:
                        predict = predicts[dset.dataIdx[i, 0]:]
                    else:
                        predict = predicts[dset.dataIdx[i, 0]: dset.dataIdx[i+1, 0]]
                    pos = len(predict[predict>0])
                    neg = len(predict) - pos
                    pos_num.append(pos)
                    neg_num.append(neg)

                print('>> Writing count file')
                out_file = open(report_count.inFile, 'w')

                out_file.write("Slide\t")
                out_file.write("Predicted positive (superpixels)\t")
                out_file.write("Predicted negative (superpixels)\t")                
                out_file.write("\n")

                for i in range(len(dset.slides)):
                    out_file.write("%s\t" % dset.slides[i])
                    out_file.write("%d\t" % pos_num[i])
                    out_file.write("%d\t" % neg_num[i])
                    out_file.write("\n")

                out_file.close()
                print ("count success ", report_count.inFile)
                data = {"success": report_count.outFile}
                db.set(q_uid, json.dumps(data))
                db.ltrim(set.REQUEST_QUEUE, len(q_uid), -1)

                uset.users = []
                uset.u_size = 0

                model = networks.Network()
                model.init_model()
                print ("count done")

            if target == 'map':
                # initialize augment
                agen = augments.Augments()
                # set user train samples
                uset.setReloadedData(uidx, report_map.trainSet)

                sample_size = len(uset.users[uidx]['samples'])
                sample_batch_size = agen.AUG_BATCH_SIZE * sample_size
                train_size = sample_size + sample_batch_size

                train_features = np.zeros((train_size, set.FEATURE_DIM))
                train_labels = np.zeros((train_size, ))

                for i in range(sample_size):
                    train_features[i] = uset.users[uidx]['samples'][i]['feature']
                    train_labels[i] = uset.users[uidx]['samples'][i]['label']
                    train_features[i+sample_size:i+sample_size+agen.AUG_BATCH_SIZE] = uset.users[uidx]['augments'][i]['feature']
                    train_labels[i+sample_size:i+sample_size+agen.AUG_BATCH_SIZE] = uset.users[uidx]['augments'][i]['label']

                print "Training ... ", len(train_labels)
                t0 = time()
                model.train_model(train_features, train_labels, report_map.classifier)
                t1 = time()
                print "Training took ", t1 - t0

                slide_idx = dset.getSlideIdx(report_map.slide)
                object_num = dset.getObjNum(slide_idx)
                data_idx = dset.getDataIdx(slide_idx)
                test_features = dset.getFeatureSet(data_idx, object_num)
                x_centroid_set = dset.getXcentroidSet(data_idx, object_num)
                y_centroid_set = dset.getYcentroidSet(data_idx, object_num)

                print "Testing Start ... "
                t0 = time()
                predicts = model.predict(test_features)
                t1 = time()
                print "Predict took ", t1 - t0

                output = h5py.File(report_map.inFile, 'w')
                output.create_dataset('features', data=test_features)
                output.create_dataset('predicts', data=predicts)
                output.create_dataset('x_centroid', data=x_centroid_set)
                output.create_dataset('y_centroid', data=y_centroid_set)
                output.create_dataset('slides', data=[report_map.slide])
                output.close()

                print ("map success ", report_map.inFile)
                data = {"success": report_map.outFile}
                db.set(q_uid, json.dumps(data))
                db.ltrim(set.REQUEST_QUEUE, len(q_uid), -1)

                uset.users = []
                uset.u_size = 0

                model = networks.Network()
                model.init_model()
                print ("map done")

            if target == 'save':
                data = finalize.getData(uset.users[uidx])
                db.set(q_uid, json.dumps(data))
                db.ltrim(set.REQUEST_QUEUE, len(q_uid), -1)

            if target == 'review':
                data = {}
                data['review'] = []

                for sample in uset.users[uidx]['samples']:
                    sample_data = {}
                    sample_data['id'] = str(sample['id'])
                    sample_data['label'] = 1 if sample['label'] == 1 else -1
                    sample_data['iteration'] = int(sample['iteration'])
                    sample_data['slide'] = str(sample['slide'])
                    sample_data['centX'] = str(sample['centX'])
                    sample_data['centY'] = str(sample['centY'])
                    sample_data['boundary'] = ""
                    sample_data['maxX'] = 0
                    sample_data['maxY'] = 0

                    data['review'].append(sample_data)

                db.set(q_uid, json.dumps(data))
                db.ltrim(set.REQUEST_QUEUE, len(q_uid), -1)

            if target == 'train':
                # increase checkpoint by 1
                m_checkpoints += 1
                # initialize augment
                agen = augments.Augments()
                uset.setIter(uidx, t_train.iter)

                for sample in t_train.samples:
                    # init sample and augment
                    init_sample = dict(
                        id=0, f_idx=0, checkpoints=0,
                        aurl=None, feature=None, label=0,
                        iteration=0, centX=0, centY=0,
                        slideIdx=0, slide=None
                    )
                    init_augment = dict(
                        id=[], checkpoints=[], feature=[], label=[]
                    )

                    # check db_id in users samples
                    remove_idx = []
                    for u in range(len(uset.users[uidx]['samples'])):
                        if uset.users[uidx]['samples'][u]['id'] == sample['id']:
                            remove_idx.append(u)

                    for r in remove_idx:
                        uset.users[uidx]['samples'].pop(r)
                        uset.users[uidx]['augments'].pop(r)

                    # add feature
                    init_sample['id'] = sample['id']
                    init_sample['aurl'] = str(sample['aurl'])
                    init_sample['slide'] = str(sample['slide'])

                    slide_idx = dset.getSlideIdx(init_sample['slide'])
                    object_num = dset.getObjNum(slide_idx)
                    data_idx = dset.getDataIdx(slide_idx)
                    feature_set = dset.getFeatureSet(data_idx, object_num)
                    x_centroid_set = dset.getXcentroidSet(data_idx, object_num)
                    y_centroid_set = dset.getYcentroidSet(data_idx, object_num)
                    slideIdx_set = dset.getSlideIdxSet(data_idx, object_num)

                    c_idx = getIdx(
                        x_centroid_set.astype(np.float), y_centroid_set.astype(np.float), slideIdx_set.astype(np.int), np.float32(sample['centX']), np.float32(sample['centY']), slide_idx
                    )

                    f_idx = data_idx + c_idx

                    init_sample['f_idx'] =  f_idx
                    init_sample['feature'] = feature_set[c_idx]
                    init_sample['label'] = 1 if sample['label'] == 1 else 0
                    init_sample['iteration'] = t_train.iter
                    init_sample['centX'] = sample['centX']
                    init_sample['centY'] = sample['centY']
                    init_sample['checkpoints'] = m_checkpoints

                    # add augment features
                    slide_idx = dset.getSlideIdx(init_sample['slide'])
                    slide_mean = dset.getWSI_Mean(slide_idx)
                    slide_std = dset.getWSI_Std(slide_idx)

                    a_imgs = agen.prepare_image(init_sample['aurl'], slide_mean, slide_std)
                    a_featureSet = iset.FC1_MODEL.predict(a_imgs)
                    a_featureSet = PCA.transform(a_featureSet)
                    a_labelSet = np.zeros((agen.AUG_BATCH_SIZE, )).astype(np.uint8)
                    a_idSet = []
                    a_checkpointSet = []
                    for i in range(agen.AUG_BATCH_SIZE):
                        a_idSet.append(init_sample['id'])
                        a_checkpointSet.append(init_sample['checkpoints'])
                    if init_sample['label'] > 0:
                        a_labelSet.fill(1)

                    init_augment['id'] = a_idSet
                    init_augment['feature'] = a_featureSet
                    init_augment['label'] = a_labelSet
                    init_augment['checkpoints'] = a_checkpointSet

                    uset.setAugmentData(uidx, init_augment)
                    uset.setTrainSampleData(uidx, init_sample)

                sample_size = len(uset.users[uidx]['samples'])
                sample_batch_size = agen.AUG_BATCH_SIZE * sample_size
                train_size = sample_size + sample_batch_size

                train_features = np.zeros((train_size, set.FEATURE_DIM))
                train_labels = np.zeros((train_size, ))

                for i in range(sample_size):
                    train_features[i] = uset.users[uidx]['samples'][i]['feature']
                    train_labels[i] = uset.users[uidx]['samples'][i]['label']
                    train_features[i+sample_size:i+sample_size+agen.AUG_BATCH_SIZE] = uset.users[uidx]['augments'][i]['feature']
                    train_labels[i+sample_size:i+sample_size+agen.AUG_BATCH_SIZE] = uset.users[uidx]['augments'][i]['label']

                # train_labels = to_categorical(train_labels, num_classes=2)
                if tset_name is None:
                    tset_name = t_train.classifier

                print "Training ... ", len(train_labels)
                t0 = time()
                model.train_model(train_features, train_labels, tset_name)
                t1 = time()
                print "Training took ", t1 - t0

                data = {"success": 'pass'}
                db.set(q_uid, json.dumps(data))
                db.ltrim(set.REQUEST_QUEUE, len(q_uid), -1)

            if target == 'retrainView':

                m_checkpoints += 1
                # initialize augment
                agen = augments.Augments()

                uset.setIter(uidx, retrain_v.iter)

                print "Augment ... ", len(retrain_v.samples)
                t0 = time()
                for sample in retrain_v.samples:
                    # init sample and augment
                    init_sample = dict(
                        id=0, f_idx=0, checkpoints=0,
                        aurl=None, feature=None, label=0,
                        iteration=0, centX=0, centY=0,
                        slideIdx=0, slide=None
                    )
                    init_augment = dict(
                        id=[], checkpoints=[], feature=[], label=[]
                    )

                    # remove samples stored if it already exists
                    remove_idx = []
                    for u in range(len(uset.users[uidx]['samples'])):
                        if uset.users[uidx]['samples'][u]['id'] == sample['id']:
                            remove_idx.append(u)

                    for r in remove_idx:
                        uset.users[uidx]['samples'].pop(r)
                        uset.users[uidx]['augments'].pop(r)

                    # add feature
                    init_sample['id'] = sample['id']
                    init_sample['aurl'] = str(sample['aurl'])
                    init_sample['slide'] = str(sample['slide'])

                    slide_idx = dset.getSlideIdx(init_sample['slide'])
                    object_num = dset.getObjNum(slide_idx)
                    data_idx = dset.getDataIdx(slide_idx)
                    feature_set = dset.getFeatureSet(data_idx, object_num)
                    x_centroid_set = dset.getXcentroidSet(data_idx, object_num)
                    y_centroid_set = dset.getYcentroidSet(data_idx, object_num)
                    slideIdx_set = dset.getSlideIdxSet(data_idx, object_num)

                    c_idx = getIdx(
                        x_centroid_set.astype(np.float), y_centroid_set.astype(np.float), slideIdx_set.astype(np.int), np.float32(sample['centX']), np.float32(sample['centY']), slide_idx
                    )

                    f_idx = data_idx + c_idx

                    init_sample['f_idx'] =  f_idx
                    init_sample['feature'] = feature_set[c_idx]
                    init_sample['label'] = 1 if sample['label'] == 1 else 0
                    init_sample['iteration'] = retrain_v.iter
                    init_sample['centX'] = sample['centX']
                    init_sample['centY'] = sample['centY']
                    init_sample['checkpoints'] = m_checkpoints

                    # add augment features
                    slide_idx = dset.getSlideIdx(init_sample['slide'])
                    slide_mean = dset.getWSI_Mean(slide_idx)
                    slide_std = dset.getWSI_Std(slide_idx)

                    a_imgs = agen.prepare_image(init_sample['aurl'], slide_mean, slide_std)
                    a_featureSet = iset.FC1_MODEL.predict(a_imgs)
                    a_featureSet = PCA.transform(a_featureSet)
                    a_labelSet = np.zeros((agen.AUG_BATCH_SIZE, )).astype(np.uint8)
                    a_idSet = []
                    a_checkpointSet = []
                    for i in range(agen.AUG_BATCH_SIZE):
                        a_idSet.append(init_sample['id'])
                        a_checkpointSet.append(init_sample['checkpoints'])
                    if init_sample['label'] > 0:
                        a_labelSet.fill(1)

                    init_augment['id'] = a_idSet
                    init_augment['feature'] = a_featureSet
                    init_augment['label'] = a_labelSet
                    init_augment['checkpoints'] = a_checkpointSet

                    uset.setAugmentData(uidx, init_augment)
                    uset.setTrainSampleData(uidx, init_sample)

                t1 = time()
                print "Augmentation took ", t1 - t0
                sample_size = len(uset.users[uidx]['samples'])
                sample_batch_size = agen.AUG_BATCH_SIZE * sample_size
                train_size = sample_size + sample_batch_size

                train_features = np.zeros((train_size, set.FEATURE_DIM))
                train_labels = np.zeros((train_size, ))

                for i in range(sample_size):
                    train_features[i] = uset.users[uidx]['samples'][i]['feature']
                    train_labels[i] = uset.users[uidx]['samples'][i]['label']
                    train_features[i+sample_size:i+sample_size+agen.AUG_BATCH_SIZE] = uset.users[uidx]['augments'][i]['feature']
                    train_labels[i+sample_size:i+sample_size+agen.AUG_BATCH_SIZE] = uset.users[uidx]['augments'][i]['label']

                # train_labels = to_categorical(train_labels, num_classes=2)
                if tset_name is None:
                    tset_name = retrain_v.classifier

                t0 = time()
                model.train_model(train_features, train_labels, tset_name)
                t1 = time()
                print "Training took ", t1 - t0, " ", len(train_labels), "Samples"

                slide_idx = dset.getSlideIdx(retrain_v.slide)
                object_num = dset.getObjNum(slide_idx)
                data_idx = dset.getDataIdx(slide_idx)
                feature_set = dset.getFeatureSet(data_idx, object_num)
                x_centroid_set = dset.getXcentroidSet(data_idx, object_num)
                y_centroid_set = dset.getYcentroidSet(data_idx, object_num)

                print "Testing Start ... "
                t0 = time()
                predictions = model.predict(feature_set)
                t1 = time()
                print "Predict took ", t1 - t0

                object_idx = load(
                    retrain_v.left, retrain_v.right, retrain_v.top, retrain_v.bottom, x_centroid_set.astype(np.float), y_centroid_set.astype(np.float)
                )
                data = {}
                for i in object_idx:
                    data[str(x_centroid_set[i][0])+'_'+str(y_centroid_set[i][0])] = str(predictions[i])

                db.set(q_uid, json.dumps(data))
                db.ltrim(set.REQUEST_QUEUE, len(q_uid), -1)

            if target == 'retrainHeatmap':
                m_checkpoints += 1
                # initialize augment
                agen = augments.Augments()

                uset.setIter(uidx, retrain_h.iter)

                for sample in retrain_h.samples:
                    # init sample and augment
                    init_sample = dict(
                        id=0, f_idx=0, checkpoints=0,
                        aurl=None, feature=None, label=0,
                        iteration=0, centX=0, centY=0,
                        slideIdx=0, slide=None
                    )
                    init_augment = dict(
                        id=[], checkpoints=[], feature=[], label=[]
                    )

                    # remove samples stored if it already exists
                    remove_idx = []
                    for u in range(len(uset.users[uidx]['samples'])):
                        if uset.users[uidx]['samples'][u]['id'] == sample['id']:
                            remove_idx.append(u)

                    for r in remove_idx:
                        uset.users[uidx]['samples'].pop(r)
                        uset.users[uidx]['augments'].pop(r)

                    # add feature
                    init_sample['id'] = sample['id']
                    init_sample['aurl'] = str(sample['aurl'])
                    init_sample['slide'] = str(sample['slide'])

                    slide_idx = dset.getSlideIdx(init_sample['slide'])
                    object_num = dset.getObjNum(slide_idx)
                    data_idx = dset.getDataIdx(slide_idx)
                    feature_set = dset.getFeatureSet(data_idx, object_num)
                    x_centroid_set = dset.getXcentroidSet(data_idx, object_num)
                    y_centroid_set = dset.getYcentroidSet(data_idx, object_num)
                    slideIdx_set = dset.getSlideIdxSet(data_idx, object_num)

                    c_idx = getIdx(
                        x_centroid_set.astype(np.float), y_centroid_set.astype(np.float), slideIdx_set.astype(np.int), np.float32(sample['centX']), np.float32(sample['centY']), slide_idx
                    )

                    f_idx = data_idx + c_idx

                    init_sample['f_idx'] =  f_idx
                    init_sample['feature'] = feature_set[c_idx]
                    init_sample['label'] = 1 if sample['label'] == 1 else 0
                    init_sample['iteration'] = retrain_h.iter
                    init_sample['centX'] = sample['centX']
                    init_sample['centY'] = sample['centY']
                    init_sample['checkpoints'] = m_checkpoints

                    # add augment features
                    slide_idx = dset.getSlideIdx(init_sample['slide'])
                    slide_mean = dset.getWSI_Mean(slide_idx)
                    slide_std = dset.getWSI_Std(slide_idx)

                    a_imgs = agen.prepare_image(init_sample['aurl'], slide_mean, slide_std)
                    a_featureSet = iset.FC1_MODEL.predict(a_imgs)
                    a_featureSet = PCA.transform(a_featureSet)
                    a_labelSet = np.zeros((agen.AUG_BATCH_SIZE, )).astype(np.uint8)
                    a_idSet = []
                    a_checkpointSet = []
                    for i in range(agen.AUG_BATCH_SIZE):
                        a_idSet.append(init_sample['id'])
                        a_checkpointSet.append(init_sample['checkpoints'])
                    if init_sample['label'] > 0:
                        a_labelSet.fill(1)

                    init_augment['id'] = a_idSet
                    init_augment['feature'] = a_featureSet
                    init_augment['label'] = a_labelSet
                    init_augment['checkpoints'] = a_checkpointSet

                    uset.setAugmentData(uidx, init_augment)
                    uset.setTrainSampleData(uidx, init_sample)

                sample_size = len(uset.users[uidx]['samples'])
                sample_batch_size = agen.AUG_BATCH_SIZE * sample_size
                train_size = sample_size + sample_batch_size

                train_features = np.zeros((train_size, set.FEATURE_DIM))
                train_labels = np.zeros((train_size, ))

                for i in range(sample_size):
                    train_features[i] = uset.users[uidx]['samples'][i]['feature']
                    train_labels[i] = uset.users[uidx]['samples'][i]['label']
                    train_features[i+sample_size:i+sample_size+agen.AUG_BATCH_SIZE] = uset.users[uidx]['augments'][i]['feature']
                    train_labels[i+sample_size:i+sample_size+agen.AUG_BATCH_SIZE] = uset.users[uidx]['augments'][i]['label']

                if tset_name is None:
                    tset_name = retrain_h.classifier

                t0 = time()
                model.train_model(train_features, train_labels, tset_name)
                t1 = time()
                print "Training took ", t1 - t0, " ", len(train_labels), "Samples"

                slide_idx = dset.getSlideIdx(retrain_h.slide)
                object_num = dset.getObjNum(slide_idx)
                data_idx = dset.getDataIdx(slide_idx)
                feature_set = dset.getFeatureSet(data_idx, object_num)
                x_centroid_set = dset.getXcentroidSet(data_idx, object_num)
                y_centroid_set = dset.getYcentroidSet(data_idx, object_num)

                print "Testing Start ... "
                t0 = time()
                if set.IS_HEATMAP == False:
                    scores = model.predict_prob(feature_set)
                t1 = time()
                print "Predict took ", t1 - t0
                # set x and y maps
                retrain_h.setXandYmap()
                # write heatmaps
                retrain_h.setHeatMap(x_centroid_set, y_centroid_set, scores)
                # get heatmap data
                data = retrain_h.getData(0)

                db.set(q_uid, json.dumps(data))
                db.ltrim(set.REQUEST_QUEUE, len(q_uid), -1)

            if target == 'cancel':

                uset.users = []
                uset.u_size = 0
                is_normal_loaded = True
                tset_name = None
                is_reloaded = False
                m_checkpoints = 0

                del select
                del finalize
                del viewer
                del retrain_v
                del retrain_h
                del heat
                del t_train
                del report_label

                model = networks.Network()
                model.init_model()
                # dset = dataset.Dataset(set.PATH_TO_SPECIAL)

                data = {"success": 'pass'}
                db.set(q_uid, json.dumps(data))
                db.ltrim(set.REQUEST_QUEUE, len(q_uid), -1)

            if target == 'reviewSave':
                # modify labels if they are changed on review tab
                for q_sample in q_samples:
                    for sample in uset.users[uidx]['samples']:
                        if sample['id'] == q_sample['id']:
                            sample['label'] = 1 if q_sample['label'] == 1 else 0

                    for sample in uset.users[uidx]['augments']:
                        if sample['id'][0] == q_sample['id']:
                            sample['label'][:] = 1 if q_sample['label'] == 1 else 0

                data = {"success": 'pass'}
                db.set(q_uid, json.dumps(data))
                db.ltrim(set.REQUEST_QUEUE, len(q_uid), -1)
Exemplo n.º 18
0
	def __init__(self, shader, player, texture_manager, options):
		self.options = options
		self.shader = shader
		self.player = player
		self.texture_manager = texture_manager
		self.block_types = [None]

		self.shader_daylight_location = shader.find_uniform(b"u_Daylight")
		self.daylight = 1800
		self.incrementer = 0
		self.time = 0
		self.c = 0

		# Compat
		self.get_chunk_position = get_chunk_position
		self.get_local_position = get_local_position

		# parse block type data file

		blocks_data_file = open("data/blocks.mcpy")
		blocks_data = blocks_data_file.readlines()
		blocks_data_file.close()

		logging.info("Loading block models")
		for block in blocks_data:
			if block[0] in ['\n', '#']: # skip if empty line or comment
				continue
			
			number, props = block.split(':', 1)
			number = int(number)

			# default block

			name = "Unknown"
			model = models.cube
			texture = {"all": "unknown"}

			# read properties

			for prop in props.split(','):
				prop = prop.strip()
				prop = list(filter(None, prop.split(' ', 1)))

				if prop[0] == "sameas":
					sameas_number = int(prop[1])

					name = self.block_types[sameas_number].name
					texture = self.block_types[sameas_number].block_face_textures
					model = self.block_types[sameas_number].model
				
				elif prop[0] == "name":
					name = eval(prop[1])
				
				elif prop[0][:7] == "texture":
					_, side = prop[0].split('.')
					texture[side] = prop[1].strip()

				elif prop[0] == "model":
					model = eval(prop[1])
			
			# add block type

			_block_type = block_type.Block_type(self.texture_manager, name, texture, model)

			if number < len(self.block_types):
				self.block_types[number] = _block_type
			
			else:
				self.block_types.append(_block_type)

		self.light_blocks = [10, 11, 50, 51, 62, 75]

		self.texture_manager.generate_mipmaps()

		indices = []

		for nquad in range(chunk.CHUNK_WIDTH * chunk.CHUNK_HEIGHT * chunk.CHUNK_LENGTH * 8):
			indices.append(4 * nquad + 0)
			indices.append(4 * nquad + 1)
			indices.append(4 * nquad + 2)
			indices.append(4 * nquad + 2)
			indices.append(4 * nquad + 3)
			indices.append(4 * nquad + 0)


		self.ibo = gl.GLuint(0)
		gl.glGenBuffers(1, self.ibo)
		gl.glBindBuffer(gl.GL_ELEMENT_ARRAY_BUFFER, self.ibo)
		gl.glBufferData(
			gl.GL_ELEMENT_ARRAY_BUFFER,
			ctypes.sizeof(gl.GLuint * len(indices)),
			(gl.GLuint * len(indices))(*indices),
			gl.GL_STATIC_DRAW)
		gl.glBindBuffer(gl.GL_ELEMENT_ARRAY_BUFFER, 0)

		logging.debug("Created Shared Index Buffer")

		# load the world

		self.save = save.Save(self)

		self.chunks = {}
		self.sorted_chunks = []

		# light update queue

		self.light_increase_queue = deque() # Node: World Position, light
		self.light_decrease_queue = deque() # Node: World position, light
		self.skylight_increase_queue = deque()
		self.skylight_decrease_queue = deque()
		self.chunk_building_queue = deque()

		self.save.load()
		
		logging.info("Lighting chunks")
		for world_chunk in self.chunks.values():
			self.init_skylight(world_chunk)

		logging.info("Generating chunks")
		for world_chunk in self.chunks.values():
			world_chunk.update_subchunk_meshes()

		del indices
		self.visible_chunks = []

		# Debug variables

		self.pending_chunk_update_count = 0
		self.chunk_update_counter = 0
Exemplo n.º 19
0
    def __init__(self):
        self.texture_manager = texture_manager.Texture_manager(16, 16, 256)
        self.block_types = [None]

        # parse block type data file

        blocks_data_file = open("data/blocks.mcpy")
        blocks_data = blocks_data_file.readlines()
        blocks_data_file.close()

        for block in blocks_data:
            if block[0] in ['\n', '#']:  # skip if empty line or comment
                continue

            number, props = block.split(':', 1)
            number = int(number)

            # default block

            name = "Unknown"
            model = models.cube
            texture = {"all": "unknown"}

            # read properties

            for prop in props.split(','):
                prop = prop.strip()
                prop = list(filter(None, prop.split(' ', 1)))

                if prop[0] == "sameas":
                    sameas_number = int(prop[1])

                    name = self.block_types[sameas_number].name
                    texture = self.block_types[
                        sameas_number].block_face_textures
                    model = self.block_types[sameas_number].model

                elif prop[0] == "name":
                    name = eval(prop[1])

                elif prop[0][:7] == "texture":
                    _, side = prop[0].split('.')
                    texture[side] = prop[1].strip()

                elif prop[0] == "model":
                    model = eval(prop[1])

            # add block type

            _block_type = block_type.Block_type(self.texture_manager, name,
                                                texture, model)

            if number < len(self.block_types):
                self.block_types[number] = _block_type

            else:
                self.block_types.append(_block_type)

        self.texture_manager.generate_mipmaps()

        # load the world

        self.save = save.Save(self)

        self.chunks = {}
        self.save.load()

        for chunk_position in self.chunks:
            self.chunks[chunk_position].update_subchunk_meshes()
            self.chunks[chunk_position].update_mesh()
Exemplo n.º 20
0
    tasks = []
    for path in config.training_tasks:
        with open(path, 'r') as file:
            task_loaded = json.load(file)
            task_loaded["name"] = path
            tasks.append(task_loaded)

    if args.filter:
        tasks = taskfilter.filter_tasks_by_max_board_area(
            tasks, config.max_board_area)
        tasks = taskfilter.filter_tasks_by_number_of_colors(
            tasks, config.min_colors, config.max_colors,
            config.must_have_black)

    results = save.Save()
    passed_tests = 0
    all_tests = 0

    for task in tasks:
        i += 1
        print(task["name"])
        print(f'Task {i}: ', end='')

        result_boards = process_task("path", task, results,
                                     operations.OPERATIONS,
                                     splitting.SPLITTING_TYPES,
                                     strategies.STRATEGY)
        passed_tests_current, all_tests_current =\
            show_results(task, result_boards, args.visualize)
        results.add_results(result_boards, task["name"])
Exemplo n.º 21
0
#import queue
#import threading

import traceback

import game
#import radio
import save

#q = queue.Queue()
#music_thread = threading.Thread(target=radio.run_radio, args=[q])
#music_thread.daemon = True
#music_thread.start()
s = save.Save()
g = game.Game(s)
g.run()
#q.put("exit")
Exemplo n.º 22
0
    def do_download(self, pic_size: {}, page, max_page, pic_type, path):
        # 默认'C:/Yandere/'
        wr = save.Save(path)
        yande = yande_re.Yande()
        lg = log.Log()
        resp = phttp.Http()

        wr.create_folder()

        flag_id = int(wr.get('flag_id.data'))  # 上次开始爬取时第一张图片ID。爬到此ID则终止此次爬取
        i = 0  # 当前第几张
        end = False  # 爬取是否已结束

        while True:

            # 终止页码为0 或 未到达终止页码时 才进行爬取
            if max_page == 0 or page <= max_page:
                # 获取页面内容
                lg.add('正在读取第' + str(page) + '页……')
                html = yande.get_html(page)
                # 获取每个li的内容
                for li in yande.get_li(html):
                    i += 1
                    info = yande.get_info(li)[
                        0]  # (id, img_url, width, height)
                    width = int(info[2])
                    height = int(info[3])

                    # 存储last_start_id
                    if i == 1:
                        if len(info) == 4:
                            wr.clog('flag_id.data', info[0], True)
                        else:
                            # 第一张个li就出现了问题,这就无法存储last_start_id了
                            exit()

                    # 数据结构是否错误?
                    if len(info) != 4:
                        lg.add(str(i) + ' 错误,跳过')
                        continue

                    # 已经爬到上次开始爬的地方了 且 终止页码为0 本次爬取结束
                    if int(info[0]) == flag_id and max_page == 0:
                        end = True
                        break

                    download = False  # 是否下载此图?
                    # 判断图片类型(不想写一长串……只好如此了)
                    if pic_type == 0:
                        download = True
                    elif pic_type == 1 and width > height:
                        download = True
                    elif pic_type == 2 and width < height:
                        download = True
                    elif pic_type == 3 and width == height:
                        download = True
                    else:
                        lg.add('图片类型不符,跳过')
                        continue
                    # 判断图片尺寸
                    if width >= pic_size['min'][
                            'width'] and height >= pic_size['min']['height']:
                        if pic_size['max'][
                                'width'] and width > pic_size['max']['width']:
                            download = False
                        if pic_size['max']['height'] and height > pic_size[
                                'max']['height']:
                            download = False
                    else:
                        download = False
                    # 判断图片宽高比
                    proportion = width / height
                    if proportion < pic_size['min']['proportion'] or (
                            pic_size['max']['proportion']
                            and proportion > pic_size['max']['proportion']):
                        download = False
                    if not download:
                        lg.add('图片尺寸不符,跳过')
                        continue

                    if download:
                        # 获取文件名
                        # 此处不进行URL解码,因为有些文件名神TM带*之类的
                        file_name = info[1].split('/')[-1]
                        # 提供时间戳命名格式 但可能会影响判断文件是否存在
                        # file_name = datetime.datetime.now().strftime('%Y%m%d') + "-" + info[0] + "-" + file_name
                        # 文件是否已存在?
                        if wr.exists(file_name):
                            lg.add(info[0] + ' 已存在,跳过')
                            continue

                        lg.add(str(i) + ' - ' + info[0] + ' 开始下载……')
                        ts = time.time()
                        img = resp.get(
                            info[1], {
                                'Host': 'files.yande.re',
                                'Referer':
                                'https://yande.re/post/show/' + info[0]
                            }).content
                        lg.add('下载完毕。耗时:' + str(int(time.time() - ts)) + 's')

                        wr.write(file_name, img)

                if end:
                    break
            else:
                break

            page += 1

        lg.add('爬取结束')
        wr.clog('log_' + str(int(time.time())) + '.txt', lg.get())
        exit(200)