Пример #1
0
def update_curriculum_check(uid):
    """Update the curriculum_check record by uid\n
    Return: `bool`"""
    # if not exist
    res = db.session.execute(
        text('SELECT id FROM Course.curriculum_check WHERE uid=:uid'),
        {'uid': uid})
    if not res.rowcount:
        res = db.session.execute(
            text(
                'INSERT INTO Course.curriculum_check (uid, time) VALUES (:uid, :time)'
            ), {
                'uid': uid,
                'time': utils.time_now()
            })
        db.session.commit()
        return res.rowcount
    # Update
    res = db.session.execute(
        text('''
        UPDATE Course.curriculum_check SET time=:new_time WHERE uid=:uid
    '''), {
            'new_time': utils.time_now(),
            'uid': uid
        })
    db.session.commit()
    return res.rowcount
Пример #2
0
    def compute_and_save_features(self, loaders):
        print("Time:{}.  Start to compute features".format(time_now()))
        # compute features
        features_meter, pids_meter, cids_meter = CatMeter(), CatMeter(), CatMeter()
        self.set_eval()
        with torch.no_grad():
            for i, data in enumerate(loaders.rgb_all_loader):
                # load data
                images, pids, cids, _ = data
                # forward
                images = images.to(self.device)
                feat_pool, feat_fc = self.model(images, images, 1)
                # meter
                features_meter.update(feat_fc.data)
                pids_meter.update(pids.data)
                cids_meter.update(cids.data)

            for i, data in enumerate(loaders.ir_all_loader):
                # load data
                images, pids, cids, _ = data
                # forward
                images = images.to(self.device)
                feat_pool, feat_fc = self.model(images, images, 2)
                # meter
                features_meter.update(feat_fc.data)
                pids_meter.update(pids.data)
                cids_meter.update(cids.data)

        features = features_meter.get_val_numpy()
        pids = pids_meter.get_val_numpy()
        cids = cids_meter.get_val_numpy()

        print("Time: {}.  Note: Start to save features as .mat file".format(time_now()))
        # save features as .mat file
        results = {1: XX(), 2: XX(), 3: XX(), 4: XX(), 5: XX(), 6: XX()}
        for i in range(features.shape[0]):
            feature = features[i, :]
            feature = np.resize(feature, [1, feature.shape[0]])
            cid, pid = cids[i], pids[i]
            results[cid].update(pid, feature)

        pid_num_of_cids = [333, 333, 533, 533, 533, 333]
        cids = [1, 2, 3, 4, 5, 6]
        for cid in cids:
            a_result = results[cid]
            xx = []
            for pid in range(1, 1 + pid_num_of_cids[cid - 1]):
                xx.append([a_result.get_val(pid).astype(np.double)])
            xx = np.array(xx)
            os.makedirs(os.path.join(self.results_dir, "test_features"), exist_ok=True)
            sio.savemat(
                os.path.join(
                    self.results_dir, "test_features", "feature_cam{}.mat".format(cid)
                ),
                {"feature": xx},
            )
        print("Time: {}. end to save features as .mat file".format(time_now()))
def insert_image(id):
    name = "image_" + str(time_now()) + ".jpg"
    copyfile(HOME_PATH + '/out.jpg', HOME_PATH + '/images/' + name)
    con= lite.connect(HOME_PATH + "/static/samples.db")
    cur = con.cursor()
    to_execute = ("UPDATE Samples SET image=\"%s\" WHERE id=%d" % (name,int(id)))
    cur.execute(to_execute)
    con.commit()
    con.close()
Пример #4
0
 def resume_model(self, resume_epoch):
     for i, _ in enumerate(self.model_list):
         self.model_list[i].load_state_dict(
             torch.load(
                 os.path.join(
                     self.config.save_models_path +
                     'models_{}'.format(resume_epoch),
                     'model-{}_{}.pkl'.format(i, resume_epoch))))
     print('Time:{}, successfully resume model from {}'.format(
         time_now(), resume_epoch))
Пример #5
0
 def __init__(self, domain, period, hyperparameters, domain_limits):
     self.LAT_S, self.LAT_N, self.LON_W, self.LON_E = domain
     self.domain = domain
     d_str = '_'.join(str(i) for i in self.domain)
     self.dir_str = f'{d_str}'
     self.iterations, self.gridsize, self.training_mode, self.sigma, self.learning_rate, self.random_seed = hyperparameters
     self.dir_hp_str = f'{d_str}_{self.iterations}iter_{self.gridsize}x{self.gridsize}_{self.sigma}sig_{self.learning_rate}lr'
     self.period = period
     self.hyperparameters = hyperparameters
     self.RUN_datetime = utils.datetime_now()
     self.RUN_time = utils.time_now()
     self.domain_limits = domain_limits
     self.domain_limits_str = '_'.join(str(i) for i in domain_limits)
     # evaluation params
     self.ALPHAs = None # i.d. of PSI split dataset, distributed automatically
Пример #6
0
    def train(self, auto_resume=True, eval_freq=0):
        """
        Args:
            auto_resume(boolean): automatically resume latest model from self.result_dir/model_{latest_epoch}.pth if True.
            eval_freq(int): if type is int, evaluate every eval_freq. default is 0.
        """

        # automatically resume from the latest model
        start_epoch = 0
        if auto_resume:
            start_epoch = self.resume_latest_model()
            start_epoch = 0 if start_epoch is None else start_epoch
        # train loop
        best_epoch = start_epoch
        best_rank1 = -100
        best_rank1_map = -1
        for curr_epoch in range(start_epoch, self.optimizer.max_epochs):
            # save model
            self.save_model(curr_epoch)
            # evaluate final model
            if eval_freq > 0 and curr_epoch % eval_freq == 0 and curr_epoch > 0:
                cmc, mAP = self.eval(self.test_dataset)
                # self.eval2(self.test_dataset)
                if cmc[0] > best_rank1:
                    best_epoch = curr_epoch
                    best_rank1 = cmc[0]
                    best_rank1_map = mAP
                print(
                    "best rank1: ",
                    best_rank1,
                    "mAP:",
                    best_rank1_map,
                    "epoch: ",
                    best_epoch,
                )
            # train
            results = self.train_an_epoch(curr_epoch)
            # logging
            self.logging(EPOCH=curr_epoch, TIME=time_now(), RESULTS=results)
        # save final model
        self.save_model(self.optimizer.max_epochs)
        # evaluate final model
        self.eval(self.test_dataset)
Пример #7
0
 def resume_latest_model(self):
     """
     resume from the latest model in path self.results_dir
     """
     root, _, files = os_walk(self.results_dir)
     pth_files = [
         file for file in files if ".pth" in file and file != "final_model.pth.tar"
     ]
     if len(pth_files) != 0:
         pth_epochs = [
             int(pth_file.replace(".pth", "").split("_")[1])
             for pth_file in pth_files
         ]
         max_epoch = max(pth_epochs)
         model_path = os.path.join(root, "model_{}.pth".format(max_epoch))
         self.model.load_state_dict(torch.load(model_path), strict=True)
         self.logging(time_now(), "restore from {}".format(model_path))
         return max_epoch
     else:
         return None
Пример #8
0
    def __update(self, year, month):
        ret_dict = dict()
        # self.cache[(year, month)]
        for k in ROOM_MAP:
            try:
                req = urllib2.urlopen(ROOM_INFO_URL_FMT % (year, month - 1, k))
            except urllib2.URLError:
                continue
            month_dict = dict()

            d = req.read().decode("utf-8").replace("\t", "").replace("  ", "")

            avtime = []
            th = d.split("<th")[3:]
            for t in th:
                avtime.append(int(t.split(">")[1].split("<")[0]))

            month_dict['avtime'] = avtime

            for low in d.split("<tr>")[3:]:

                date = low.split("<td")[1].split("<")[0].split(">")[-1]
                date = int(date.split(" ")[0])

                month_dict[date] = list()

                for col in low.split("<td")[2:]:
                    try:
                        month_dict[date].append(
                            int(col.split("<")[0].split(">")[-1]))
                    except:
                        pass
            ret_dict[k] = month_dict
        if len(ret_dict) > 0:
            ret_dict['time'] = utils.time_now()
        else:
            ret_dict = None
        return ret_dict
Пример #9
0
def prepare_download_tar():
    export = tarfile.open(HOME_PATH + '/static/data.tar', mode='w:gz')
    export.add(HOME_PATH + '/static/samples.db', arcname='/samples.db')
    time = time_now()
    export.add(HOME_PATH + '/images', arcname=('/images_' + str(time)))
    export.close()
Пример #10
0
def prepare_download_tar():
    export = tarfile.open(HOME_PATH + '/static/data.tar', mode='w:gz')
    export.add(HOME_PATH + '/static/samples.db',arcname='/samples.db')
    time = time_now()
    export.add(HOME_PATH + '/images', arcname=('/images_'+str(time)))
    export.close()
Пример #11
0
def now():
    return utils.time_now()
Пример #12
0
    def get(self, stuID, weekday, timeperiod):
        idx = ['', '2', '3']
        period = ['D0', 'D1', 'D2', 'D3', 'D4', 'DN', 'D5',
                    'D6', 'D7', 'D8', 'E0', 'E1', 'E2', 'E3', 'E4']
        day = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
        day_num2eng = ['', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
        timelist = [[0]*15 for i in range(7)]

        res = db.session.execute(text('SELECT uid FROM `user` WHERE username=:user'), {'user': stuID})
        uid = res.fetchone()[0]
        space_time = Auto_course_insert.get_free_period(uid, timelist)

        # CoursePeriod
        ALL_PERIOD = ['D0', 'D1', 'D2', 'D3', 'D4', 'DN', 'D5', 'D6', 'D7', 'D8', 'E0', 'E1', 'E2', 'E3', 'E4']
        PERIOD2IDX = {'D0' : 0, 'D1' : 1, 'D2' : 2, 'D3' : 3, 'D4' : 4, 'DN' : 5, 'D5' : 6, 'D6' : 7, 'D7' : 8, 'D8' : 9, 'E0' : 10, 'E1' : 11, 'E2' : 12, 'E3' : 13, 'E4' : 14}

        data = Auto_course_insert.load_schedule_data_if_not_expired(uid)
        if not data:
            data = {
                'Mon': {},
                'Tue': {},
                'Wed': {},
                'Thu': {},
                'Fri': {},
                'Sat': {},
                'Sun': {}
            }
            print('[*] debug: Collecting pickable courses')
            for i in range(0, 7):
                for j in space_time[i]:
                    sql = 'SELECT * FROM fju_course WHERE '
                    sql += "day='{0}' OR day2='{0}' OR day3='{0}'".format(day[i])

                    res = db.session.execute(text(sql))
                    # print(res.rowcount)
                    time = j.split('-')
                    time = CoursePeriod(*time)
                    # print(time)

                    candi = []
                    for row in res:
                        # Check period of a course if necessary
                        p1 = make_CoursePeriod(row['period']) if row['period'] else None
                        p2 = make_CoursePeriod(row['period2']) if row['period2'] else None
                        p3 = make_CoursePeriod(row['period3']) if row['period3'] else None
                        plist = [p1, p2, p3]
                        succ = True
                        for p in plist:
                            if p and p not in time:
                                # print(p)
                                succ = False
                        if succ:
                            candi.append({
                                'course_code': check_null(row['course_code']),
                                'name'       : check_null(row['name']),
                                'teacher'    : check_null(row['teacher']),
                                'department' : check_null(row['department']),
                                'score'      : check_null(row['score']),
                                'kind'       : check_null(row['kind']),
                                'times'      : check_null(row['times']),
                                'day'        : check_null(row['day']),
                                'week'       : check_null(row['week']),
                                'period'     : check_null(row['period']),
                                'classroom'  : check_null(row['classroom']),
                                'day2'       : check_null(row['day2']),
                                'week2'      : check_null(row['week2']),
                                'period2'    : check_null(row['period2']),
                                'classroom2' : check_null(row['classroom2']),
                                'day3'       : check_null(row['day3']),
                                'week3'      : check_null(row['week3']),
                                'period3'    : check_null(row['period3']),
                                'classroom3' : check_null(row['classroom3'])
                            })
                    data[day[i]].update({j :  candi})
                    # end for row
                # end for j
            #end for i

            # Updates the data
            db.session.execute(text('''
                UPDATE user SET schedule_data=:schedule_data, schedule_data_time=:schedule_data_time WHERE uid=:uid
            '''), {'schedule_data': json.dumps(data), 'schedule_data_time': utils.time_now(), 'uid': uid})
            db.session.commit()
        else:
            print('[*] debug: deserialize schedule_data')
        weekday = day_num2eng[weekday]
        tlist = []
        for i in data[weekday]: # Turn period into CoursePeriod
            tlist.append(CoursePeriod(*(i.split('-'))))
        cur_time = CoursePeriod(timeperiod, timeperiod) # e.g. D4 = D4-D4
        res_list = None
        # See if it exists a CoursePeriod which includes cur_time
        for t in tlist:
            if cur_time in t:
                k = str(t)
                res_list = data[weekday][k]
                break
        return res_list, 200
Пример #13
0
    def normalize_config(self):
        """
        Some data may be missing. Verify a working set
        of parameters.

        If OK, return 0, else 1
        """
        # All values must be string!
        for key in self.log_filters.keys():
            if type(self.log_filters[key]) != str:
                sys.stderr.write(
                    'Configuration: Key %s must be a string! Not %s\n' %
                    (key, str(self.log_filters[key])))
                return 1

        # Verify input file.
        in_file = self.log_filters.get('in_file', None)
        if in_file is None:
            self.log_filters['in_file_handle'] = sys.stdin
            self.log_filters['in_file'] = 'sys.stdin'
        else:
            try:
                file_handle = open(in_file, 'r')
            except IOError as err:
                sys.stderr.write('--in-file="%s": %s\n' % (in_file, str(err)))
                return 1
            self.log_filters['in_file_handle'] = file_handle

        # Verify output file.
        out_file = self.log_filters.get('out_file', None)
        if out_file is None:
            self.log_filters['out_file_handle'] = sys.stdout
            self.log_filters['out_file'] = '<sys.stdout>'
        else:
            try:
                file_handle = open(out_file, 'w')
            except IOError as err:
                sys.stderr.write('--out-file="%s": %s\n' %
                                 (out_file, str(err)))
                return 1
            self.log_filters['out_file_handle'] = file_handle

        if 'start' not in self.log_filters:
            # No start date. Use start of epoch.
            self.log_filters['start'] = '1970-01-01T00:00:00.000'
        start_date = utils.ISO8601_to_seconds(self.log_filters['start'])
        if start_date is None:
            sys.stderr.write('--start="%s" is not a valid ISO8601 date\n' %
                             self.log_filters['end'])
            return 1
        self.log_filters['start_secs'] = start_date

        if 'end' not in self.log_filters:
            # No end time specified. Assume now.
            now_secs = utils.time_now()
            self.log_filters['end_secs'] = now_secs
            self.log_filters['end'] = utils.seconds_to_ISO8601(now_secs)
        else:
            end_secs = utils.ISO8601_to_seconds(self.log_filters['end'])
            if end_secs is None:
                sys.stderr.write('--end="%s" is not a valid ISO8601 date\n' %
                                 self.log_filters['end'])
                return 1
            self.log_filters['end_secs'] = end_secs

        if self.log_filters['end_secs'] <= self.log_filters['start_secs']:
            sys.stderr.write(
                'end time <= start time. start=%s, end=%s\n' %
                self.log_filters['start'], self.log_filters['end'])
            return 1

        if 'sep_char' not in self.log_filters.keys():
            self.log_filters['sep_char'] = utils.SEPARATION_CHAR
        if 'key_val_sep' not in self.log_filters.keys():
            self.log_filters['key_val_sep'] = \
                    utils.KEY_VALUE_SEPARATOR
        if 'payload_connector' not in self.log_filters.keys():
            self.log_filters['payload_connector'] = \
                    utils.PAYLOAD_CONNECTOR

        if 'level' not in self.log_filters.keys():
            self.log_filters['level'] = 'DEBUG'  # Pass all logs
        self.filter_dict = \
                utils.filter_priority(self.log_filters['level'])

        if 'line_number' not in self.log_filters.keys():
            self.log_filters['line_number'] = 0
        self.line_number = self.log_filters['line_number']

        return 0
Пример #14
0
from PIL import ImageGrab
import cv2
import numpy as np
import os.path
from utils import test_filename, training_filename, timer, time_now
from image_processing import process_image
filename = test_filename

if os.path.isfile(filename):
    training_data = list(np.load(filename))
else:
    training_data = []

timer(4)

last_time = time_now()
frame_count = 0
while (True):
    frame = ImageGrab.grab(bbox=(0, 40, 800, 600))
    frame = np.array(frame)
    processed_frame = process_image(frame)

    cv2.imshow('image', processed_frame)

    processed_frame = cv2.resize(processed_frame, (80, 64))
    training_data.append(processed_frame)

    frame_count += 1
    if frame_count == 20:
        avg_time_elapsed = (time_now() - last_time) / 20
        print('FPS: ' + str(int(1 / avg_time_elapsed)))
Пример #15
0
    def normalize_config(self):
        """
        Some data may be missing. Verify a working set
        of parameters.

        If OK, return 0, else 1
        """
        # All values must be string!
        for key in self.log_filters.keys():
            if type(self.log_filters[key]) != str:
                sys.stderr.write(
                    'Configuration: Key %s must be a string! Not %s\n' %
                    (key, str(self.log_filters[key])))
                return 1

        # Verify input file.
        in_file = self.log_filters.get('in_file', None)
        if in_file is None:
            self.log_filters['in_file_handle'] = sys.stdin
            self.log_filters['in_file'] = 'sys.stdin'
        else:
            try:
                file_handle = open(in_file, 'r')
            except IOError as err:
                sys.stderr.write('--in-file="%s": %s\n' %
                        (in_file, str(err)))
                return 1
            self.log_filters['in_file_handle'] = file_handle

        # Verify output file.
        out_file = self.log_filters.get('out_file', None)
        if out_file is None:
            self.log_filters['out_file_handle'] = sys.stdout
            self.log_filters['out_file'] = '<sys.stdout>'
        else:
            try:
                file_handle = open(out_file, 'w')
            except IOError as err:
                sys.stderr.write('--out-file="%s": %s\n' %
                        (out_file, str(err)))
                return 1
            self.log_filters['out_file_handle'] = file_handle

        if 'start' not in self.log_filters:
            # No start date. Use start of epoch.
            self.log_filters['start'] = '1970-01-01T00:00:00.000'
        start_date = utils.ISO8601_to_seconds(self.log_filters['start'])
        if start_date is None:
            sys.stderr.write('--start="%s" is not a valid ISO8601 date\n' %
                    self.log_filters['end'])
            return 1
        self.log_filters['start_secs'] = start_date

        if 'end' not in self.log_filters:
            # No end time specified. Assume now.
            now_secs = utils.time_now()
            self.log_filters['end_secs'] = now_secs
            self.log_filters['end'] = utils.seconds_to_ISO8601(now_secs)
        else:
            end_secs = utils.ISO8601_to_seconds(self.log_filters['end'])
            if end_secs is None:
                sys.stderr.write('--end="%s" is not a valid ISO8601 date\n' %
                        self.log_filters['end'])
                return 1
            self.log_filters['end_secs'] = end_secs

        if self.log_filters['end_secs'] <= self.log_filters['start_secs']:
            sys.stderr.write('end time <= start time. start=%s, end=%s\n' %
                    self.log_filters['start'], self.log_filters['end'])
            return 1

        if 'sep_char' not in self.log_filters.keys():
            self.log_filters['sep_char'] = utils.SEPARATION_CHAR
        if 'key_val_sep' not in self.log_filters.keys():
            self.log_filters['key_val_sep'] = \
                    utils.KEY_VALUE_SEPARATOR
        if 'payload_connector' not in self.log_filters.keys():
            self.log_filters['payload_connector'] = \
                    utils.PAYLOAD_CONNECTOR

        if 'level' not in self.log_filters.keys():
            self.log_filters['level'] = 'DEBUG'  # Pass all logs
        self.filter_dict = \
                utils.filter_priority(self.log_filters['level'])

        if 'line_number' not in self.log_filters.keys():
            self.log_filters['line_number'] = 0
        self.line_number = self.log_filters['line_number']

        return 0