Ejemplo n.º 1
0
def installCertificates(session):
	if not os_exists(CERT_FILE) \
			or not os_exists(KEY_FILE):
		print("[Webinterface].installCertificates :: Generating SSL key pair and CACert")
		# create a key pair
		k = crypto.PKey()
		k.generate_key(crypto.TYPE_RSA, 1024)

		# create a self-signed cert
		cert = crypto.X509()
		cert.get_subject().C = "DE"
		cert.get_subject().ST = "Home"
		cert.get_subject().L = "Home"
		cert.get_subject().O = "Dreambox"
		cert.get_subject().OU = "STB"
		cert.get_subject().CN = socket_gethostname()
		cert.set_serial_number(random.randint(1000000, 1000000000))
		cert.set_notBefore(b"20120101000000Z")
		cert.set_notAfter(b"20301231235900Z")
		cert.set_issuer(cert.get_subject())
		cert.set_pubkey(k)
		print("[Webinterface].installCertificates :: Signing SSL key pair with new CACert")
		cert.sign(k, 'sha1')

		try:
			print("[Webinterface].installCertificates ::  Installing newly generated certificate and key pair")
			saveFile(CERT_FILE, crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
			saveFile(KEY_FILE, crypto.dump_privatekey(crypto.FILETYPE_PEM, k))
		except IOError as e:
			#Disable https
			config.plugins.Webinterface.https.enabled.value = False
			config.plugins.Webinterface.https.enabled.save()
			#Inform the user
			session.open(MessageBox, "Couldn't install generated SSL-Certifactes for https access\nHttps access is disabled!", MessageBox.TYPE_ERROR)
Ejemplo n.º 2
0
def installCertificates(session):
	if not os_exists(CERT_FILE) \
			or not os_exists(KEY_FILE):
		print "[Webinterface].installCertificates :: Generating SSL key pair and CACert"
		# create a key pair
		k = crypto.PKey()
		k.generate_key(crypto.TYPE_RSA, 1024)

		# create a self-signed cert
		cert = crypto.X509()
		cert.get_subject().C = "DE"
		cert.get_subject().ST = "Home"
		cert.get_subject().L = "Home"
		cert.get_subject().O = "Dreambox"
		cert.get_subject().OU = "STB"
		cert.get_subject().CN = socket_gethostname()
		cert.set_serial_number(random.randint(1000000,1000000000))
		cert.set_notBefore("20120101000000Z");
		cert.set_notAfter("20301231235900Z")
		cert.set_issuer(cert.get_subject())
		cert.set_pubkey(k)
		print "[Webinterface].installCertificates :: Signing SSL key pair with new CACert"
		cert.sign(k, 'sha1')

		try:
			print "[Webinterface].installCertificates ::  Installing newly generated certificate and key pair"
			saveFile(CERT_FILE, crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
			saveFile(KEY_FILE, crypto.dump_privatekey(crypto.FILETYPE_PEM, k))
		except IOError, e:
			#Disable https
			config.plugins.Webinterface.https.enabled.value = False
			config.plugins.Webinterface.https.enabled.save()
			#Inform the user
			session.open(MessageBox, "Couldn't install generated SSL-Certifactes for https access\nHttps access is disabled!", MessageBox.TYPE_ERROR)
Ejemplo n.º 3
0
 def check_config(self):
     if not os_exists(self.cfg_dir):
         try:
             mkdir(self.cfg_dir, 0o750)
         except Exception as e:
             print("Error: {}".format(str(e)))
     else:
         if not os_exists(self.cfg_file):
             return False
         else:
             return True
Ejemplo n.º 4
0
    def test_save_calibrations():
        """
        Test save_calibrations.

        Loading calibs was already tested above, so can rely on it.

        CSV FORMAT
            - delimiter: ', '
            - no floats, round and convert to int using known precision
            - strings in double quotes

            e.g. "varname", var[0], var[1], ...\\n
        """
        fn_cp = calibsdir + '/camera_params.csv'
        if not os_exists(fn_cp):
            raise RuntimeError('tests/camera_params.csv could not be found')
        # set not to tests/ (`load_calibrations` will do that)
        #   don't want to overwrite test data
        calib1 = CalibratePSEye()
        calib1.init_chessboard(p, o)
        try:
            # Load known calibs and then save
            cp = calib1.calibpath
            debug('%s' % calib1.calibpath)
            calib1.load_calibrations(fn_cp)
            calib1.calibpath = cp
            calib1.save_calibrations()
            if not os_exists(calib1.calibpath):
                raise RuntimeError('Failed to create calib path \'%s\''
                                   % calib1.calib_path)
            # Compare saving
            with open(fn_cp, 'r') as f:
                f1 = f.read()
            with open(cp+'/camera_params.csv', 'r') as f:
                f2 = f.read()
            if f1 != f2:
                raise RuntimeError('Camera parameter csvs did not match')

            # Compare loading
            calib2 = CalibratePSEye()
            calib2.load_calibrations(calib1.calibpath+'/camera_params.csv')
            paramlist = ('cameraMatrix', 'distCoeffs')
            for k in paramlist:
                k1 = getattr(calib1, k)
                k2 = getattr(calib2, k)
                if not array_equal(k1, k2):
                    raise RuntimeError(
                        'param \'%s\' does not match between calib1 and calib2' % k
                    )
                debug('\'%s\' matched' % k)
        finally:
            calib1.removepath()
Ejemplo n.º 5
0
 def test_init_chessboard_str():
     """
     Test initializing based off of filepaths.
     """
     pstr = calibsdir + '/processing_params.csv'
     ostr = calibsdir + '/objp.csv'
     if not os_exists(pstr) or not os_exists(ostr):
         raise RuntimeError('Bad test files \'%s\' and \'%s\'' % (pstr, ostr))
     # Read String Files
     with open(ostr, 'r') as f:
         text = f.read()
         data = text.split(', ')
         shape = [int(v) for v in data[0].replace('\"', '').split('x')]
         objp = reshape([int(v) for v in data[1:]], shape).astype('float32')
     with open(pstr, 'r') as f:
         lines = f.read().splitlines()
         param_text = [line.split(', ') for line in lines]
         params = dict()
     for p in param_text:
         name = p[0].replace('\"', '')
         if name not in ('boardsize', 'zerozone', 'winsize', 'criteria'):
             continue
         if name == 'boardsize':
             params[name] = (int(p[1]), int(p[2]))
         elif name == 'zerozone':
             params[name]= (int(p[1]), int(p[2]))
         elif name == 'winsize':
             params[name] = (int(p[1]), int(p[2]))
         elif name == 'criteria':
             # first value should be single-digit number
             #   use that to determine decimal place shift
             powshift = -int(log10(int(p[1])))
             params[name] = (
                 int(int(p[1]) * 10**powshift),  # mode (int)
                 int(int(p[2]) * 10**powshift),  # something about pixels (int)
                 int(p[3]) * 10**powshift        # EPS termination (float)
             )
     # Test
     calib = CalibratePSEye()
     calib.init_chessboard(pstr, ostr)
     try:
         if calib.calibpath is None:
             raise RuntimeError('_calib_path wasn\'t created')
         for k in params.keys():
             if not getattr(calib, k) == params[k]:
                 raise RuntimeError('\'%s\' param was loaded incorrectly' % k)
             debug('\'%s\' param matched' % k)
         if not array_equal(calib.objp, objp):
             raise RuntimeError('Failed to load objp correctly')
     finally:
         calib.removepath()
Ejemplo n.º 6
0
    def test_load_calib_imgs_paths():
        """
        Test load_calib_imgs path creation/checking.

        All `calib_imgs_paths` tests are basically the same
        """
        global p, o
        if not os_exists(testdir+'/raw'):
            raise RuntimeError('test \'raw\' directory could not be found')
        # Setup
        calib = CalibratePSEye()
        calib.init_chessboard(p, o)
        try:
            calib.load_calib_imgs(testdir+'/raw')

            # Make sure everything was created properly
            for p in ('/raw', '/corners'):
                if not isdir(calib.calibpath + p):
                    raise RuntimeError('path \'%s\' wasn\'t created')

            # Make sure raw images were copied correctly
            for fn in listdir(testdir + '/raw'):
                f1 = calib.calibpath + '/raw/' + fn
                f2 = testdir + '/raw/' + fn
                g1 = cv_imread(f1)
                i2 = cv_imread(f2)
                g2 = cvtColor(cvtColor(i2, COLOR_RGB2GRAY), COLOR_GRAY2RGB)
                if not array_equal(g1, g2):
                    raise RuntimeError('frame \'%s\' did not match' % fn)
                debug('\'%s\' matched' % fn)
        finally:
            calib.removepath()
Ejemplo n.º 7
0
    def load_dictionary(self, corpus, term_index, count_index, encoding=None):
        """Load multiple dictionary entries from a file of
        word/frequency count pairs. Merges with any dictionary data
        already loaded.

        **Args**:

        * corpus (str): The path+filename of the file.
        * term_index (int): The column position of the word.
        * count_index (int): The column position of the frequency\
            count.
        * encoding (str): Text encoding of the dictionary file

        **Returns**:
        True if file loaded, or False if file not found.
        """
        if not os_exists(corpus):
            return False
        with open(corpus, "r", encoding=encoding) as infile:
            for line in infile:
                line_parts = line.rstrip().split(" ")
                if len(line_parts) >= 2:
                    key = line_parts[term_index]
                    count = helpers.try_parse_int64(line_parts[count_index])
                    if count is not None:
                        self.create_dictionary_entry(key, count)
        return True
Ejemplo n.º 8
0
def setSettings(settings, alternative_settings_path=None):
    if alternative_settings_path is not None and os_exists(
            alternative_settings_path):
        with open(alternative_settings_path, 'r',
                  encoding="utf-8") as settings_file:
            settings.update(json_load(settings_file))

    counted = {}

    for key, value in settings.items():
        if not isinstance(value, str) or value[0] != '$':
            if isinstance(value, float) and value % 1 == 0:
                settings[key] = int(value)
            counted[key] = value

    while True:
        for key, value in settings.items():
            if key not in counted:
                try:
                    settings[key] = eval(value[1:], None, counted)
                except NameError:
                    continue
                if isinstance(value, float) and value % 1 == 0:
                    settings[key] = int(value)
                counted[key] = value
                break
        else:
            break
Ejemplo n.º 9
0
    def correct_and_save(self, imgpath):
        """
        Correct images and then saves them with the calibration timestamp.

        INPUTS
            imgpath -- str -- path to images to correct

        OUTPUTS
            undistorted_imgs -- uint8 N x H x W x BYTES

        NOTE
            Only corrects filenames ending in '.jpg'

        ALGORITHM
            Saves corrected images at same level as the 'imgpath' directory, but
            adds a timestamp which references the calibration dataset/params
            used. `undistorted_YYMMDD-HHmmss/` and `remapped_YYMMDD-HHmmss/`

        EXCEPTIONS
            Raises RuntimeError If calibration path isn't set. Likely results
                from failing to load/compute calibration matrices.
        """
        if type(imgpath) != str:
            raise TypeError('imgpath must be str')
        if self.calibpath is None:
            raise RuntimeError(
                'Be sure to set self.calibpath (did you compute/load calibrations?)'
            )

        # Find images to correct
        potential_imgs = listdir(imgpath)
        fn_imgs = [f for f in potential_imgs
                   if f[-4:].lower() == '.jpg']  # select jpegs
        img_list = [cv_imread(imgpath + '/' + fn)
                    for fn in fn_imgs]  # read all jpegs

        # Make Save Directories
        savedir = realpath(imgpath + '/..')
        timestamp = self.get_timestamp()
        copyfile(self.calibpath + '/camera_params.csv',
                 savedir + '/' + timestamp + '_camera_params.csv')
        ud_path = savedir + '/' + timestamp + '_undistorted'
        if not isdir(ud_path):
            if os_exists(ud_path):
                os_remove(ud_path)
            mkdir(ud_path)

        # Correct & Save Frames
        ud = self.correct(img_list)
        for i in range(len(img_list)):
            fnud = ud_path + ('/f%s' % str(i + 1).zfill(5)) + '.jpg'
            cv_imwrite(fnud, ud[i, ...], (IMWRITE_JPEG_QUALITY, 100))

        # Return in case we want to use them later
        return ud
Ejemplo n.º 10
0
    def test_load_calibrations_csv():
        """
        Test load_calibrations parsing CSV.

        WARNING
            Call `test_load_calibrations_str` before
            `test_load_calibrations_dict` and before
            `test_compute_calibrations`so that the dict can be loaded
            rather than manually inputted.

        CSV FORMAT
            - delimiter: ', '
            - no floats, round and convert to int using known precision
            - strings in double quotes

            e.g. "varname", var[0], var[1], ...\\n
        """
        testcalibpath = calibsdir + '/camera_params.csv'
        if not os_exists(testcalibpath):
            raise RuntimeError('Can\'t find \'%s\'' % testcalibpath)
        global cpdict  # global for use in other functions
        cpdict = {}

        # Load params from test
        with open(testcalibpath, 'r') as f:
            lines = f.read().splitlines()
        entries = [line.split(', ') for line in lines]
        for c in entries:
            name = c[0].replace('\"', '')
            if name not in ('cameraMatrix', 'distCoeffs', 'w', 'h'):
                warning(
                    'variable name: \'%s\' not valid calib name' % name
                )
                continue
            if name in ('w', 'h'):
                cpdict[name] = int(c[1])
                continue

            shape = [int(v) for v in c[1].replace('\"', '').split('x')]
            data = asarray([int(v) for v in c[2:]]) / 10**4
            if name == 'cameraMatrix':
                cpdict[name] = reshape(data, shape).astype('float64')
            elif name == 'distCoeffs':
                cpdict[name] = reshape(data, shape).astype('float64')
            else:
                raise RuntimeError('Unreachable state!?')

        # Test calib loading
        calib = CalibratePSEye()
        calib.load_calibrations(calibsdir + '/camera_params.csv')
        for k in cpdict.keys():
            if not array_equal(getattr(calib, k), cpdict[k]):
                raise RuntimeError('\'%s\' did not match' % k)
            debug('\'%s\' matched' % k)
Ejemplo n.º 11
0
    def __init__(self, config_dct):
        super(ICJudge, self).__init__(config_dct)
        self._ic_lag = self._config_dct["ic_lag"]
        self._tdf = pd_DataFrame(
            data=None,
            columns=["ticker", "y_predict", "y_test", "date", "score"])
        ICJudge.TEST_COUNT += 1

        self._this_path = p_join(ICJudge.REPORT_PATH, str(ICJudge.TEST_COUNT))
        if not os_exists(self._this_path):
            os_makedirs(self._this_path)
def parse_emails_in_folder(folder_path):
    parsed_emails_list = []
    parser = Parser()
    if os_exists(folder_path):
        for email_file_name in listdir(folder_path):
            filename = os_join(folder_path, email_file_name)
            if isdir(filename):
                continue
            with open(filename, "r") as raw_email_file:
                raw_email_text = raw_email_file.read()
                parsed_email = parser.parsestr(raw_email_text)
                parsed_emails_list.append(parsed_email)
    return parsed_emails_list
Ejemplo n.º 13
0
 def _create_calib_path(self):
     """
     Create timestamped directory for storing new calibration data.
     """
     # time_str not member var b/c load_calibrations might have weird interaction
     time_str = strftime('%Y%m%d-%H%M%S', localtime())
     file_path = dirname(dirname(realpath(__file__)))
     self.calibpath = self.basepath + '/calibration_' + time_str
     if not isdir(self.calibpath):
         if os_exists(self.calibpath):
             os_remove(self.calibpath)
         mkdir(self.calibpath)
     logging.info('set up calibration directory at \'%s\'' % self.calibpath)
def parse_emails_in_folder(folder_path):
    parsed_emails_list = []
    email_files = []
    parser = Parser()
    if os_exists(folder_path):
        for email_file_name in listdir(folder_path):
            filename = os_join(folder_path, email_file_name)
            if isdir(filename):
                continue
            with codecs.open(filename, "r", encoding='utf-8',
                             errors='ignore') as raw_email_file:
                raw_email_text = raw_email_file.read()
                email_files.append(filename)
                raw_email_text = removing_special_words(raw_email_text)
                parsed_email = parser.parsestr(raw_email_text)
                parsed_emails_list.append(parsed_email)
    return parsed_emails_list, email_files
Ejemplo n.º 15
0
    def create_dictionary(self, corpus, encoding=None):
        """Load multiple dictionary words from a file containing plain
        text. Merges with any dictionary data already loaded.

        **Args**:

        * corpus (str) The path+filename of the file.
        * encoding (str): Text encoding of the corpus file

        **Returns**:
        True if file loaded, or False if file not found.
        """
        if not os_exists(corpus):
            return False
        with open(corpus, "r", encoding=encoding) as infile:
            for line in infile:
                for key in self._parse_words(line):
                    self.create_dictionary_entry(key, 1)
        return True
Ejemplo n.º 16
0
    def _prepare(self):

        self.dem_coord_sys = get_ras_props(self.raw_dem_path)[8]
        self.gage_shp_coord_sys = get_vec_props(self.gage_shp_path, 0)[4]

        assert 'PROJCS' in self.dem_coord_sys, (
            'DEM coordinate system not projected!')

        assert 'PROJCS' in self.gage_shp_coord_sys, (
            'Gage shapefile coordinate system not projected!')

        print('\n', 'Raw DEM coord. sys.:\n', self.dem_coord_sys, sep='')

        print('\n\n',
              'Gage shp. coord. sys.:\n',
              self.gage_shp_coord_sys,
              '\n\n',
              sep='')

        try:
            in_shp_vec = ogr.Open(self.gage_shp_path)
            in_shp_lyr = in_shp_vec.GetLayer(0)
            in_lyr_defn = in_shp_lyr.GetLayerDefn()
            in_field_cnt = in_lyr_defn.GetFieldCount()
            id_in_fields = False

            for i in range(in_field_cnt):
                if str(in_lyr_defn.GetFieldDefn(i).name) == str('id'):
                    id_in_fields = True
                    break

            assert id_in_fields

        except AssertionError:
            raise AssertionError('Field \'id\' does not exist in gage_shp!')

        finally:
            in_shp_vec.Destroy()

        if not os_exists(self.outputs_dir):
            mkdir(self.outputs_dir)
        return
Ejemplo n.º 17
0
    def __call__(self):

        self._prepare()

        out_shps = [self.gage_shp_moved, self.dem_net, self.watersheds_shp]

        for shp in out_shps:
            if not os_exists(shp):
                continue

            in_vec = ogr.Open(shp)
            out_driver = in_vec.GetDriver()
            in_vec.Destroy()
            out_driver.DeleteDataSource(shp)

        self._grid = Grid.from_raster(self.raw_dem_path, data_name='dem')

        ftns = [
            self._deps,
            self._flats,
            self._fdrs,
            self._slopes,
            self._facs,
            self._threshs,
            self._snaps,
            self._streams_net,
            # 			self._streams,
        ]

        for ftn in ftns:
            beg_time = timeit.default_timer()

            ftn()

            end_time = timeit.default_timer()

            if self.verbose:
                print(f'Took {end_time - beg_time:0.2f} seconds.\n')

        return
Ejemplo n.º 18
0
def build_vocab(filepath, vocabpath, tokenizer, min_freq=5, max_size=50000):
    '''
    filepath: str, corpus data path, with label, sep by \t
    vocabpath: str, vocabulary path, pkl format
    tokenizer: callable, for cut word,
    min_freq: int, min frequence
    max_size: int, max vocab size
    return: vocab_dict: dict, {word: idex} format
    '''
    if os_exists(vocabpath):
        with open(vocabpath, "rb") as f:
            vocab_dict = pkl.load(f)
        return vocab_dict
    vocab_dict = {}
    with open(filepath, "r", encoding="utf8") as f:
        for line in tqdm(f):
            if not line:
                continue

            content = line.split("\t")[0]
            for word in tokenizer(content):
                vocab_dict[word] = vocab_dict.get(word, 0) + 1
        sorted_vocab_lst = sorted(
            [_ for _ in vocab_dict.items() if _[1] > min_freq],
            key=lambda x: x[1],
            reverse=True)
        if len(sorted_vocab_lst) >= max_size:
            sorted_vocab_lst = sorted_vocab_lst[:max_size]
        vocab_dict = {
            vocab_word[0]: idx
            for idx, vocab_word in enumerate(sorted_vocab_lst)
        }
        vocab_dict.update({
            public_config.UNK: len(vocab_dict),
            public_config.PAD: len(vocab_dict) + 1
        })

    with open(vocabpath, "wb") as f:
        pkl.dump(vocab_dict, f)
    return vocab_dict
Ejemplo n.º 19
0
    def __init__(self, raw_dem_path, gage_shp_path, outputs_dir, n_cpus=1):

        self.raw_dem_path = abspath(raw_dem_path)
        self.gage_shp_path = abspath(gage_shp_path)
        self.outputs_dir = abspath(outputs_dir)
        self.n_cpus = n_cpus

        assert n_cpus > 0

        if not os_exists(self.raw_dem_path):
            raise IOError('raw_dem file does not exist at the given location!')

        if not os_exists(self.gage_shp_path):
            raise IOError(
                'gage_shp (%s) file does not exist at the given location!' %
                (self.gage_shp_path))

        self.fil = os_join(self.outputs_dir, 'fil.tif')
        self.fdr = os_join(self.outputs_dir, 'fdr.tif')
        self.sd8 = os_join(self.outputs_dir, 'sd8.tif')
        self.fac = os_join(self.outputs_dir, 'fac.tif')
        self.strm = os_join(self.outputs_dir, 'strm.tif')
        self.strm_dist = os_join(self.outputs_dir, 'strm_dist.tif')

        crnt_time = datetime.now().strftime('%Y%m%d%H%M%S')
        self.log_file = os_join(self.outputs_dir, f'log_file_{crnt_time}.txt')

        self.watersheds = os_join(self.outputs_dir, 'watersheds.tif')
        self.watersheds_all = os_join(self.outputs_dir, 'watersheds_all.tif')
        self.watersheds_shp = os_join(self.outputs_dir, 'watersheds.shp')
        self.watersheds_ids = os_join(self.outputs_dir, 'watersheds_id.txt')

        _ = basename(self.gage_shp_path).rsplit('.', 1)[0]
        self.gage_shp_moved = os_join(self.outputs_dir, _ + '_moved.shp')

        self.dem_ord = os_join(self.outputs_dir, 'dem_ord.tif')
        self.dem_tree = os_join(self.outputs_dir, 'dem_tree.dat')
        self.dem_coords = os_join(self.outputs_dir, 'dem_coords.dat')
        self.dem_net = os_join(self.outputs_dir, 'dem_net.shp')

        self.watersheds_flag = True
        self.area_flag = False
        self.strm_dists_flag = False
        self.verbose = True

        self.run_type = 'before'  # can be 'before' or 'after'
        self.strm_orign_thresh = 1000
        self.max_cell_move = 15  # grid cells

        _bitness = architecture()[0]
        if _bitness == '32bit':
            raise NotImplementedError('To be downloaded!')
            self.exes_dir = os_join(dirname(abspath(__file__)),
                                    'TauDEM537exeWin32')

        elif _bitness == '64bit':
            self.exes_dir = os_join(dirname(abspath(__file__)),
                                    'TauDEM535exeWin64')

        else:
            raise RuntimeError('Could not get the bitness of the system!')

        self.polygonize_file = os_join(dirname(abspath(__file__)),
                                       'gdal_polygonize.py')

        self.fil_exe = os_join(self.exes_dir, 'PitRemove')
        self.fdr_exe = os_join(self.exes_dir, 'D8FlowDir')
        self.fac_exe = os_join(self.exes_dir, 'AreaD8')
        self.thresh_exe = os_join(self.exes_dir, 'Threshold')
        self.snap_pp_exe = os_join(self.exes_dir, 'MoveOutletsToStreams')
        self.strm_net_exe = os_join(self.exes_dir, 'StreamNet')
        self.gage_watershed_exe = os_join(self.exes_dir, 'GageWatershed')
        self.strm_dist_exe = os_join(self.exes_dir, 'D8HDistToStrm')
        return
Ejemplo n.º 20
0
    def __call__(self):

        self._prepare()

        fil_cmd = '"%s" -z "%s" -fel "%s"' % (self.fil_exe, self.raw_dem_path,
                                              self.fil)

        fdr_cmd = '"%s" -fel "%s" -p "%s" -sd8 "%s"' % (self.fdr_exe, self.fil,
                                                        self.fdr, self.sd8)

        if self.area_flag:
            # if gage_shp is the original one, then stream net might be wrong.
            # Run once without the area_flag. Rename gage_shp_moved to
            # gage_shp_path
            fac_cmd = '"%s" -p "%s" -ad8 "%s" -o "%s"' % (
                self.fac_exe, self.fdr, self.fac, self.gage_shp_path)

        else:
            fac_cmd = '"%s" -p "%s" -ad8 "%s"' % (self.fac_exe, self.fdr,
                                                  self.fac)

        thresh_cmd = (
            '"%s" -ssa "%s" -src "%s" -thresh %d' %
            (self.thresh_exe, self.fac, self.strm, self.strm_orign_thresh))

        snap_pp_cmd = (
            '"%s" -p "%s" -src "%s" -o "%s" -om "%s" -md %d' %
            (self.snap_pp_exe, self.fdr, self.strm, self.gage_shp_path,
             self.gage_shp_moved, self.max_cell_move))

        # The -sw flag seem to have been responsible for the incorrect
        # catchment ids.
        # TODO: Fix dem_net layer name.
        streamnet_cmd = (
            ('"%s" -fel "%s" -p "%s" -ad8 "%s" -src "%s" -ord "%s" '
             '-tree "%s" -coord "%s" -net "%s" -netlyr dem_net -w "%s" '
             '-o "%s"') %
            (self.strm_net_exe, self.fil, self.fdr, self.fac, self.strm,
             self.dem_ord, self.dem_tree, self.dem_coords, self.dem_net,
             self.watersheds_all, self.gage_shp_moved))

        if self.n_cpus > 1:
            fil_cmd = 'mpiexec -n %d %s' % (self.n_cpus, fil_cmd)
            fdr_cmd = 'mpiexec -n %d %s' % (self.n_cpus, fdr_cmd)
            fac_cmd = 'mpiexec -n %d %s' % (self.n_cpus, fac_cmd)
            thresh_cmd = 'mpiexec -n %d %s' % (self.n_cpus, thresh_cmd)
            snap_pp_cmd = 'mpiexec -n %d %s' % (self.n_cpus, snap_pp_cmd)
            streamnet_cmd = 'mpiexec -n %d %s' % (self.n_cpus, streamnet_cmd)

        out_shps = [self.gage_shp_moved, self.dem_net, self.watersheds_shp]

        for shp in out_shps:
            if not os_exists(shp):
                continue

            in_vec = ogr.Open(shp)
            out_driver = in_vec.GetDriver()
            in_vec.Destroy()
            out_driver.DeleteDataSource(shp)

        if self.run_type == 'before':
            cmd_list = [
                fil_cmd, fdr_cmd, fac_cmd, thresh_cmd, snap_pp_cmd,
                streamnet_cmd
            ]

        elif self.run_type == 'after':
            cmd_list = [fac_cmd, thresh_cmd, snap_pp_cmd, streamnet_cmd]

            assert os_exists(self.fil), '%s does not exist!' % self.fil
            assert os_exists(self.fdr), '%s does not exist!' % self.fdr
            assert os_exists(self.sd8), '%s does not exist!' % self.sd8

        else:
            raise NameError('\RUN_TYPE\' can only be \'before\' or \'after\'!')

        if self.watersheds_flag:
            gage_watershed_cmd = (
                '"%s" -p "%s" -o "%s" -gw "%s" -id "%s"' %
                (self.gage_watershed_exe, self.fdr, self.gage_shp_moved,
                 self.watersheds, self.watersheds_ids))

            if self.n_cpus > 1:
                gage_watershed_cmd = ('mpiexec -n %d %s' %
                                      (self.n_cpus, gage_watershed_cmd))

            cmd_list.append(gage_watershed_cmd)

        if self.verbose:
            for cmd in cmd_list:
                print('\nExecuting: %s' % cmd)
                proc = subprocess.Popen(cmd, shell=False, env=subproc_env)
                proc.wait()

        else:
            log_file_cur = open(self.log_file, 'w')
            for cmd in cmd_list:
                # print activitities to LOG_FILE
                print('\nExecuting: %s' % cmd)
                proc = subprocess.Popen(cmd,
                                        shell=False,
                                        stdout=log_file_cur,
                                        stderr=log_file_cur,
                                        env=subproc_env)

                proc.wait()

            log_file_cur.close()

        if self.watersheds_flag:
            assert os_exists(
                self.watersheds), ('watersheds file does not exist!')

            temp_shp = dirname(self.watersheds_shp)
            temp_shp = os_join(temp_shp, 'temp_')
            temp_shp += basename(self.watersheds_shp)

            fmt = 'ESRI Shapefile'
            cmd = 'python "%s" -8 "%s" -f "%s" "%s"' % (
                self.polygonize_file, self.watersheds, fmt, temp_shp)

            print('\nExecuting: %s' % cmd)

            if self.verbose:
                proc = subprocess.Popen(cmd, shell=False, env=subproc_env)
                proc.wait()

            else:
                log_file_cur = open(self.log_file, 'a')
                proc = subprocess.Popen(cmd,
                                        shell=False,
                                        stdout=log_file_cur,
                                        stderr=log_file_cur,
                                        env=subproc_env)

                proc.wait()
                log_file_cur.close()

            merge_same_id_shp_poly(temp_shp, self.watersheds_shp, field='DN')

            driver = ogr.GetDriverByName(fmt)
            if os_exists(temp_shp):
                driver.DeleteDataSource(temp_shp)

        if self.strm_dists_flag:

            # Distance to closest stream.
            # 			strm_dist_cmd = (
            # 				'"%s" -p "%s" -src "%s" -dist "%s" -thresh "%d"' % (
            # 					self.strm_dist_exe,
            # 					self.fdr,
            # 					self.fac,
            # 					self.strm_dist,
            # 					self.strm_orign_thresh))

            # Distance to the main outlet.
            in_ds = gdal.Open(self.fac)

            if in_ds is None:
                raise RuntimeError('Could not open %s for reading!' % self.fac)

            in_band = in_ds.GetRasterBand(1)
            fac_arr = np.array(in_band.ReadAsArray())

            max_fac = np.nanmax(fac_arr)

            in_ds = None
            fac_arr = None

            strm_dist_cmd = ('"%s" -p "%s" -src "%s" -dist "%s" -thresh "%d"' %
                             (self.strm_dist_exe, self.fdr, self.fac,
                              self.strm_dist, max_fac))

            if self.n_cpus > 1:
                strm_dist_cmd = ('mpiexec -n %d %s' %
                                 (self.n_cpus, strm_dist_cmd))

            print('\nExecuting: %s' % strm_dist_cmd)

            if self.verbose:
                proc = subprocess.Popen(strm_dist_cmd,
                                        shell=False,
                                        env=subproc_env)

                proc.wait()

            else:
                log_file_cur = open(self.log_file, 'a')
                proc = subprocess.Popen(strm_dist_cmd,
                                        shell=False,
                                        stdout=log_file_cur,
                                        stderr=log_file_cur,
                                        env=subproc_env)

                proc.wait()
                log_file_cur.close()
        return
Ejemplo n.º 21
0
    def run(self, config_dct):
        t_md5 = md5()
        data_info_dct = {
            "feature_generators":
            config_dct["feature_generators"],
            "iterator__predict_period":
            config_dct["iterator"]["config"]["predict_period"]
        }
        t_md5.update(str(data_info_dct).encode("utf-8"))
        cache_file = p_join(ModelSelector.CACHE_FOLDER,
                            t_md5.hexdigest() + ".hdf")

        # 1. read cache or generate df from raw_df
        if os_exists(cache_file):
            save_cache = False
            print("Using cached DataFrame...")
            raw_df = read_hdf(cache_file)

        else:
            save_cache = True
            print("Generating feature DataFrame...")
            raw_df = read_hdf(
                p_join(ModelSelector.DATA_FOLDER,
                       config_dct["others"]["raw_data_file"]))
            fg_dct = config_dct["feature_generators"]

            # generate customized features
            for fg in tqdm(fg_dct.values()):
                this_fg = eval(fg["type"])(config_dct=fg["config"])
                raw_df = this_fg.generate_all(raw_df)

            raw_df.to_hdf(cache_file, "data")

        # 2. get iterator of data, create training target
        dt_dct = config_dct["iterator"]
        #data_iter = eval(dt_dct["type"])(raw_df, config_dct=dt_dct["config"], generate_target=save_cache)
        data_iter = eval(dt_dct["type"])(raw_df,
                                         config_dct=dt_dct["config"],
                                         generate_target=True)

        if save_cache:
            data_iter.get_data_df_with_y().to_hdf(cache_file, "data")

        # 3. get judge and learning algorithms; train, predict and evaluate
        jg_dct = config_dct["judge"]

        judge = eval(jg_dct["type"])(config_dct=jg_dct["config"])

        in_queue = MPModelSelector.Queue(maxsize=self._q_size)
        out_queue = MPModelSelector.Queue(maxsize=self._q_size)
        close_event = MPModelSelector.Event()
        # cur_event_lst = [MPModelSelector.Event() for _ in range(self._p_count)]
        p_lst = [
            MPModelSelector.Process(target=MPModelSelector._work_func,
                                    args=(in_queue, out_queue, close_event))
            for i in range(self._p_count)
        ]
        [p.start() for p in p_lst]

        lm_dct = config_dct["learning_model"]
        arg_lst = ((idx, *ctt, lm_dct, idx in LEARNING_CURVE_LST)
                   for idx, ctt in enumerate(data_iter))

        total_len = 0
        last_start = MPModelSelector.time()
        for arg in tqdm(arg_lst):
            try:
                obj = out_queue.get_nowait()
                if isinstance(obj, ValueError):
                    total_len -= 1
                else:
                    idx, y_predict, y_test, tkr_name, curve_lst = obj
                    judge.add_score(y_predict, y_test, tkr_name, idx,
                                    curve_lst)
                    total_len -= 1
                """
                idx, y_predict, y_test, tkr_name, curve_lst = obj
                judge.add_score(y_predict, y_test, tkr_name, idx, curve_lst)
                total_len -= 1
                """
            except MPModelSelector.mpq.Empty:
                pass

            while 1:
                try:
                    in_queue.put_nowait(arg)
                    last_start = MPModelSelector.time()
                    total_len += 1
                    break
                except MPModelSelector.mpq.Full:
                    # if MPModelSelector.reduce(lambda x, y: x or y.is_set(), cur_event_lst):
                    #    last_start = MPModelSelector.time()
                    # else:
                    if MPModelSelector.time(
                    ) - last_start > SINGLE_FIT_MAX_TIME:
                        [p.terminate() for p in p_lst]
                        print("Too slow, terminate: ", config_dct)
                        return -MPModelSelector.np_inf

        close_event.set()

        while total_len > 0:
            try:
                obj = out_queue.get_nowait()

                if isinstance(obj, ValueError):
                    total_len -= 1
                else:
                    idx, y_predict, y_test, tkr_name, curve_lst = obj
                    judge.add_score(y_predict, y_test, tkr_name, idx,
                                    curve_lst)
                    total_len -= 1
                """
                idx, y_predict, y_test, tkr_name, curve_lst = obj
                judge.add_score(y_predict, y_test, tkr_name, idx, curve_lst)
                total_len -= 1
                """
            except MPModelSelector.mpq.Empty:
                pass

        [p.join() for p in p_lst]

        judge.save_result(config_dct)
        return judge.get_result()
Ejemplo n.º 22
0
def run(args):
    # load config
    conf = util.load_conf(args.config)

    # configure variables
    global stagedir, tempdir
    tempdir = Path(conf["temp_dir"])
    stagedir = Path(conf["stage_dir"])
    PICKLE_FILE = conf["youtube_pickle_path"]
    CLIENT_SECRET_FILE = conf["youtube_client_path"]
    API_NAME = 'youtube'
    API_VERSION = 'v3'
    SCOPES = [  # Only force-ssl is required, but both makes it explicit.
        "https://www.googleapis.com/auth/youtube.upload",  # Videos.insert
        "https://www.googleapis.com/auth/youtube.force-ssl"  # Captions.insert
    ]

    # handle logout
    if args.id == "logout":
        try:
            os_remove(PICKLE_FILE)
            cprint("#dLogged out of Google API session#r")
        except:
            util.exit_prog(
                11, "Failed to remove credentials for YouTube account.")

        return

    # load stages, but dont upload
    # Handle id/all
    stagedata = None
    stagedatas = None
    if args.id == "all":
        cprint("#dLoading stages...", end=" ")
        # create a list of all the hashes and sort by date streamed, upload chronologically
        stagedatas = StageData.load_all_stages(stagedir)
        stagedatas.sort(key=sort_stagedata)
    else:
        cprint("#dLoading stage...", end=" ")
        # check if stage exists, and prep it for upload
        stagedata = StageData.load_from_id(stagedir, args.id)
        cprint(f"About to upload stage {stagedata.id}.#r")

    # authenticate youtube service
    if not os_exists(CLIENT_SECRET_FILE):
        util.exit_prog(19, "Missing YouTube Client ID/Secret file.")

    cprint("Authenticating with Google...", end=" ")

    service = None
    credentials = None

    if os_exists(PICKLE_FILE):
        with open(PICKLE_FILE, "rb") as f:
            credentials = pickle.load(f)

    if not credentials or credentials.expired:
        try:
            if credentials and credentials.expired and credentials.refresh_token:
                credentials.refresh(Request())
            else:
                flow = InstalledAppFlow.from_client_secrets_file(
                    CLIENT_SECRET_FILE, SCOPES)
                credentials = flow.run_console()
        except RefreshError:
            flow = InstalledAppFlow.from_client_secrets_file(
                CLIENT_SECRET_FILE, SCOPES)
            credentials = flow.run_console()

        with open(PICKLE_FILE, "wb") as f:
            pickle.dump(credentials, f)

    try:
        service = build(API_NAME, API_VERSION, credentials=credentials)
    except Exception as err:
        util.exit_prog(50, f"Failed to connect to YouTube API, \"{err}\"")

    cprint("Authenticated.", end=" ")

    # Handle id/all
    if args.id == "all":
        # begin to upload
        cprint(f"About to upload {len(stagedatas)} stages.#r")
        for stage in stagedatas:
            video_id = upload_video(conf, service, stage)
            if video_id is not None:
                chat_success = True
                if conf["chat_upload"]:
                    chat_success = upload_captions(conf, service, stage,
                                                   video_id)

                if conf["stage_upload_delete"] and chat_success:
                    try:
                        os_remove(str(stagedir / f"{stage.id}.stage"))
                    except:
                        util.exit_prog(
                            90,
                            f"Failed to remove stage `{stage.id}` after upload."
                        )
            print()
    else:
        # upload stage
        cprint(f"About to upload stage {stagedata.id}.#r")
        video_id = upload_video(conf, service, stagedata)
        if video_id is not None:
            chat_success = True
            if conf["chat_upload"]:
                chat_success = upload_captions(conf, service, stagedata,
                                               video_id)

            if conf["stage_upload_delete"] and chat_success:
                try:
                    os_remove(str(stagedir / f"{stagedata.id}.stage"))
                except:
                    util.exit_prog(
                        90,
                        f"Failed to remove stage `{stagedata.id}` after upload."
                    )
Ejemplo n.º 23
0
def ensure_dir(file_path):
	if '/' in file_path:
		directory = os_dirname(file_path)
		if not os_exists(directory):
			os_makedirs(directory)
Ejemplo n.º 24
0
 def exists(self):
     return os_exists(self.filename)
Ejemplo n.º 25
0
    def load_calib_imgs(self, img_path, clean=False):
        """
        Load calibration JPGs from directory & get chessboard. Be sure to
        call init_chessboard() first.

        INPUTS
            img_path -- str -- path to calibration jpegs
            (optional)
            clean -- bool -- Whether or not to go through sanitization; False

        EXCEPTIONS
            raises RuntimeError: when chessboard hasn't been properly
                initialized by constructor or `init_chessboard`.

        ALGORITHM
            Finds all files ending in '.jpg' and loads them. Objp should have
            been handled in init_chessboard()
        """
        if type(img_path) != str:
            raise TypeError('img_path must be str')
        if not (type(clean) == bool or clean in (0, 1)):
            raise TypeError('clean must be bool')
        if self.img_arr is not None or self.calibpath is None:
            raise RuntimeError('Did you call init_chessboard() first?')

        # Find calibration images and process
        potential_files = listdir(img_path)
        fn_imgs = [
            img_path + '/' + f for f in potential_files
            if f[-4:].lower() == '.jpg'
        ]
        imageshape = cv_imread(fn_imgs[0]).shape
        self.h = imageshape[0]
        self.w = imageshape[1]

        # Save images in calib path
        rawpath = self.calibpath + '/raw'  # copy to current calibration dir
        if rawpath != img_path:
            if not isdir(rawpath):
                if os_exists(rawpath):
                    os_remove(rawpath)
                mkdir(rawpath)
            for f in fn_imgs:
                copyfile(f, rawpath + '/' + basename(f))
        # corners frames for debugging
        cpath = self.calibpath + '/corners'  # save drawn corners for debug
        mkdir(cpath)
        logging.info('saving raw frames to \'%s\'' % rawpath)
        logging.info('saving corners frames to \'%s\'' % cpath)

        # Load images
        self.img_arr = zeros((self.h, self.w, 1, len(fn_imgs)), uint8)
        for i in range(len(fn_imgs)):
            f = fn_imgs[i]
            if imageshape[-1] == 3:
                self.img_arr[..., i] = cvtColor(cv_imread(f),
                                                COLOR_RGB2GRAY)[..., newaxis]
            else:
                self.img_arr[..., i] = cv_imread(f)

        # Chessboard computations
        logging.debug('finding chessboards...')
        for i in range(self.img_arr.shape[-1]):
            gray = self.img_arr[..., i].copy()
            corners = self._find_chessboard(gray)
            if corners is None:
                logging.error('Failed to find chessboard at frame \'%s\'' %
                              str(i + 1).zfill(5))
                continue
            self.corners_arr.append(corners)
            self.objpoints.append(self.objp)  # 3d position (same for all?)

            # save chessboard images for debugging
            #   cvt to rgb for color chessboard
            fn_c = cpath + ('/f%s' % str(i + 1).zfill(5)) + '.jpg'
            gray_color = cvtColor(gray, COLOR_GRAY2RGB)
            img_corners = drawChessboardCorners(gray_color, self.boardsize,
                                                corners, 1)
            cv_imwrite(fn_c, img_corners, (IMWRITE_JPEG_QUALITY, 100))

        # Go through chessboards to make sure okay
        if clean:
            basepath = dirname(cpath)
            self.clean_calib_imgs(basepath=basepath)
        logging.debug('load_calib_imgs() done!')
Ejemplo n.º 26
0
    def test_compute_calibrations():
        """
        Test computation to the 4th decimal place on known calibration data.
        """
        # Test Asserts
        calib = CalibratePSEye()
        calib.corners_arr = [1, 2]
        calib.objpoints = [1, 2]
        calib.w = 320
        calib.h = 240

        calib.corners_arr = []
        try:
            calib.compute_calibrations()
        except RuntimeError:
            pass
        else:
            raise RuntimeError('Failed to catch len(corners_arr)==0')

        calib.corners_arr = [1, 2]
        calib.objpoints = []
        try:
            calib.compute_calibrations()
        except RuntimeError:
            pass
        else:
            raise RuntimeError('Failed to catch len(objpoints)==0')

        calib.objpoints = [1, 2]
        calib.w = None
        try:
            calib.compute_calibrations()
        except RuntimeError:
            pass
        else:
            raise RuntimeError('Failed to catch _w is None')

        calib.h = None
        calib.w = 320
        try:
            calib.compute_calibrations()
        except RuntimeError:
            pass
        else:
            raise RuntimeError('Failed to catch h is None')

        # Test Math
        global cpdict
        imgpath = testdir + '/raw'
        if not os_exists(imgpath):
            raise RuntimeError('Could not find imgpath')
        calib = CalibratePSEye()
        calib.init_chessboard(p, o)
        calib.load_calib_imgs(imgpath)
        try:
            calib.compute_calibrations()
            calib.save_calibrations()
            for k in cpdict.keys():
                k1 = cpdict[k]  # already rounded b/c loaded from file
                k2 = around(getattr(calib, k), decimals=4)
                if not array_equal(k1, k2):
                    raise RuntimeError('\'%s\' did not match' % k)
                debug('\'%s\' matched' % k)
                # print(getattr(calib, k))
        finally:
            calib.removepath()
Ejemplo n.º 27
0
    def run(self, silent=True):
        if silent is False:
            print("Checking...")
        _checking_ok = False
        if not self.check_config():
            self.new_config()
        if self.load_config():
            if self.syntax_config():
                _checking_ok = True
        if not _checking_ok:
            if silent is False:
                print("Error: Syntax")
            self.stop_app()
        else:
            if silent is False:
                print("Syntax: OK")
                print("Database: Checking... ")

            db.init_db(self.config['database'])
            if silent is False:
                print("Database: OK")

            if not db.session.query(exists().where(db.TimerTable.name == unicode("fav"))).scalar():
                t = db.TimerTable()
                t.name = unicode("fav")
                t.time = time()
                t.limit = 75
                t.count = t.limit
                db.session.add(t)
                db.session.commit()
                self.timer_fav['time'] = t.time
                self.timer_fav['count'] = t.count
                self.timer_fav['limit'] = t.limit
            else:
                row = db.session.query(db.TimerTable).filter(db.TimerTable.name == unicode("fav")).first()
                self.timer_fav['time'] = row.time
                self.timer_fav['count'] = row.count
                self.timer_fav['limit'] = row.limit

            if not db.session.query(exists().where(db.TimerTable.name == unicode("fri"))).scalar():
                t = db.TimerTable()
                t.name = unicode("fri")
                t.time = time()
                t.limit = 15
                t.count = t.limit
                db.session.add(t)
                db.session.commit()
                self.timer_fri['time'] = t.time
                self.timer_fri['count'] = t.count
                self.timer_fri['limit'] = t.limit
            else:
                row = db.session.query(db.TimerTable).filter(db.TimerTable.name == unicode("fri")).first()
                self.timer_fri['time'] = row.time
                self.timer_fri['count'] = row.count
                self.timer_fri['limit'] = row.limit

            if silent is False:
                print("Timers: OK")

            if not os_exists(self.config['download_path']):
                mkdir(self.config['download_path'])
            if silent is False:
                print("Download: OK")
Ejemplo n.º 28
0
def writable(dir_nm, fl_nm):
    if not os_exists(dir_nm):
        os_makedir(dir_nm)
    return os_join(dir_nm, fl_nm)
Ejemplo n.º 29
0
    def record_calib_imgs(self, **kwargs):
        """
        Provides photobooth-esque countdown interface. Saves frames to calib
        path in subdirectories `raw/` and `corners/`. Be sure to
        initialize the chessboard first.

        INPUTS
            (optional)
            cam       -- str/int -- camera descriptor for VideoCapture; '/dev/psEye'
            nframes   -- int     -- number of frames to record for calibration; 15
            w         -- int     -- width (px) to set camera frame; 320
            h         -- int     -- height (px) to set camera frame; 240
            fps       -- int     -- frames per second to set camera; 100
            countdown -- int     -- seconds to countdown before recording frame; 3

        EXCEPTIONS
            raises RuntimeError when chessboard hasn't been properly initialized
                by constructor or `init_chessboard`.
        """
        cam = kwargs.get('cam', '/dev/psEye')
        nframes = kwargs.get('nframes', 15)
        w = kwargs.get('w', 320)
        h = kwargs.get('h', 240)
        countdown = kwargs.get('countdown', 3)
        if type(nframes) != int:
            raise TypeError('nframes must be integer')
        if type(countdown) != int:
            raise TypeError('countdown must be integer')
        if self.img_arr is not None or self.calibpath is None:
            raise RuntimeError('Did you call init_chessboard() first?')
        cap = open_camera(cam, w, h, 100)  # this handles asserts
        self.w = int(cap.get(CAP_PROP_FRAME_WIDTH))
        self.h = int(cap.get(CAP_PROP_FRAME_HEIGHT))
        self.img_arr = zeros((self.h, self.w, 1, nframes),
                             dtype=uint8)  # raw frames
        clist = []  # corners frames
        self.corners_arr = []
        self.objpoints = []

        # Recording
        sc = 0  # "sample count"
        timer_ref = now()
        timer = lambda: 1 + int(countdown + timer_ref - now()
                                )  # countDOWN 3,2,1
        try:  # try/except to make sure camera device gets released
            img = zeros(self.img_arr.shape[:-1])  # for immediate cv_imshow
            while sc < nframes:
                # Display at top so can always exit
                cv_imshow('capture', img)
                press = waitKey(20)
                if press in (113, 81, 27):  # q, Q, esc:
                    logging.debug('quitting record')
                    break

                # Find chessboard, if possible
                ret, raw = cap.read()
                if not ret:
                    logging.error('Failed to access frame')
                    timer_ref = now()  # reset timer when things go wrong
                    continue
                gray = cvtColor(raw, COLOR_RGB2GRAY)
                corners = self._find_chessboard(gray)

                # Compute visual feedback
                if corners is None:  # alert to unfindable chessboard
                    img = raw.copy()
                    cv_putText(img,
                               'NO CHESSBOARD', (5, 15),
                               1,
                               1, (0, 0, 255),
                               thickness=2)
                    timer_ref = now(
                    )  # reset timer when chessboard isn't viable
                else:  # show countdown and progess
                    board1 = drawChessboardCorners(raw, self.boardsize,
                                                   corners, ret)
                    img = board1.copy()
                    cv_putText(img,
                               'T-%ds' % timer(), (5, 15),
                               1,
                               1, (0, 0, 255),
                               thickness=2)
                cv_putText(img,
                           '%d/%d' % (sc + 1, nframes), (5, 30),
                           1,
                           1, (0, 0, 255),
                           thickness=2)

                # Capture image
                if timer() <= 0:
                    # image saving
                    self.img_arr[..., sc] = gray.copy()[..., newaxis]
                    clist.append(board1)

                    # for camera calibration
                    self.corners_arr.append(corners)
                    self.objpoints.append(self.objp)

                    # program progess/display
                    img = zeros(raw.shape, dtype=uint8) + 255  # "flash" camera
                    sc += 1
                    timer_ref = now()

            # Save images to file
            if self.calibpath is None:
                self._create_calib_path()
            # Create save directories
            rawpath = self.calibpath + '/raw'
            cpath = self.calibpath + '/corners'
            for p in (rawpath, cpath):
                if not isdir(p):
                    if os_exists(p):
                        logging.warning(
                            '\'%s\' exists, but is not directory. Overwriting.'
                            % p)
                        os_remove(p)
                    mkdir(p)
            for i in range(nframes):
                fn_raw = rawpath + ('/f%s' % str(i + 1).zfill(5)) + '.jpg'
                fn_c = cpath + ('/f%s' % str(i + 1).zfill(5)) + '.jpg'
                cv_imwrite(fn_raw, self.img_arr[..., i],
                           (IMWRITE_JPEG_QUALITY, 100))
                cv_imwrite(fn_c, clist[i], (IMWRITE_JPEG_QUALITY, 100))

        # Close Capture
        except Exception as e:
            logging.error(e)
        finally:
            cap.release()
            destroyAllWindows()
            logging.debug('released \'%s\'' % cam)
Ejemplo n.º 30
0
 def exists(self):
     return os_exists(self.filename)
Ejemplo n.º 31
0
    def test_internal_correct_and_save():
        """
        Test internal correction saving method.
        """
        calib = CalibratePSEye()
        fn_c = calibsdir + '/camera_params.csv'
        # Asserts
        for t in (int, float, complex, list, tuple, range, dict, set,
                  frozenset, bool, bytes, bytearray, memoryview):
                try:
                    calib.correct_and_save(t)
                except TypeError:
                    pass
                else:
                    raise RuntimeError('Failed to catch %s imgpath' % t.__name__)
        calib.load_calibrations(fn_c)
        cp = calib.calibpath
        calib.calibpath = None
        try:
            calib.correct_and_save('file-that-does-not-exist')
        except RuntimeError:
            pass
        else:
            raise RuntimeError('Failed to catch _calib_path is None')

        # Saving
        calib.calibpath = cp
        imgpath = testdir + '/raw'
        storeddir = testdir + '/00000000-000000_undistorted'
        storedcp = testdir + '/00000000-000000_camera_params.csv'
        if os_exists(storeddir):
            rmtree(storeddir)
        if os_exists(storedcp):
            os_remove(storedcp)
        ud1 = calib.correct_and_save(imgpath)
        try:
            # Proper saving
            if not os_exists(storeddir) or not os_exists(storedcp):
                raise RuntimeError('Error creating corrected directories')
            imgcount1 = len([f for f in listdir(imgpath) if f[-4:].lower() == '.jpg'])
            imgcount2 = len([f for f in listdir(storeddir) if f[-4:].lower() == '.jpg'])
            if imgcount1 != imgcount2:
                raise RuntimeError('Not all images were saved')

            # Correct calibration
            #   Check pre-save equality
            imglist = [f for f in listdir(imgpath) if f[-4:].lower() == '.jpg']
            rawimg = [cv_imread(imgpath + '/' + f) for f in imglist]
            ud2 = calib.correct(rawimg)  # will know if `correct` works
            if not array_equal(ud1, ud2):
                raise RuntimeError('Failed pre-save equality check')

            #   Check post-save equality
            for i in range(len(imglist)):
                fnud = storeddir + ('/_f%s' % str(i+1).zfill(5)) + '.jpg'
                cv_imwrite(fnud, ud2[i,...], (IMWRITE_JPEG_QUALITY, 100))
            ud1list = [cv_imread(storeddir + '/' + f) for f in imglist]
            ud2list = [cv_imread(storeddir + '/_' + f) for f in imglist]

            ud1reload = asarray(ud1list, dtype='uint8')
            ud2reload = asarray(ud2list, dtype='uint8')
            if not array_equal(ud1reload, ud2reload):
                raise RuntimeError('Failed reload equality check')
        finally:
            os_remove(storedcp)
            rmtree(storeddir)
            try:
                if os_exists(storedcp):
                    raise RuntimeError('failed to deleted cameraParams csv')
                if os_exists(storeddir):
                    raise RuntimeError('failed to remove undistored img dir')
            except AssertionError:
                raise RuntimeError('Exception during test cleanup')
Ejemplo n.º 32
0
if __name__ == '__main__':
    args = parser.parse_args()
    level=log_CRITICAL
    if args.loglvl == 'ERROR':
        level=log_ERROR
    elif args.loglvl == 'WARNING':
        level=log_WARNING
    elif args.loglvl == 'INFO':
        level=log_INFO
    elif args.loglvl == 'DEBUG':
        level=log_DEBUG
    basicConfig(level=level, format='\t%(levelname)s:%(funcName)s:%(message)s')
    fdir = realpath(dirname(__file__))
    testdir = realpath(fdir + '/tests')
    calibsdir = testdir + '/calibs_00000000-000000'
    if not os_exists(fdir + '/data'):
        mkdir(fdir + '/data')
        warning('data/ directory did not exists; created it')
    # params for chessboard on marand's HP monitor
    #   last updated: 2020-05-22
    #  points of corners on chessboard
    boardsize = (3, 4)
    sq_len = 63  # mm
    o = zeros((boardsize[0]*boardsize[1],3), float32)
    o[:,:2] = mgrid[0:boardsize[0], 0:boardsize[1]].T.reshape(-1,2) * sq_len
    # chessboard detection rules
    p = {
        'boardsize': boardsize,
        'winsize': (11, 11),
        'zerozone': (-1, -1),
        'criteria': (