Exemplo n.º 1
0
def process_line(line, jitterer):
    parts = line.split(
        ','
    )  # expects the chip path and the corresponding coefficient file path separated by comma
    chip_path = parts[0]
    coeff_path = parts[1]
    if not os.path.isfile(coeff_path):
        # print('coeff for  %s does not exist' % chip_path)
        return
    id = category_index(coeff_path)
    # print("ID found %s" % id)
    output_dir = os.path.join(msceleb1m_jitter_dir, id)
    if not os.path.isdir(output_dir):
        os.makdir(output_dir)
    chipID = chip_id(chip_path)
    output_f = os.path.join(output_dir, "%s_jitter_0.jpg" % chipID)
    if os.path.isfile(output_f) and not force_rewrite:
        # print("%s already exists !" % output_f)
        return
    else:
        image = np.array(Image.open(chip_path))
        coeffs = face3d.subject_perspective_sighting_coefficients(coeff_path)
        ims = jitterer.multiple_random_jitters([image], coeffs, N_jitters)
        for i, im in enumerate(ims):
            output_f = os.path.join(output_dir,
                                    "%s_jitter_%s.jpg" % (chipID, i))
            Image.fromarray(im).save(output_f)
Exemplo n.º 2
0
def test__compile_correction_potential():
    """ test varecof_io.writer.corr_potentials.compile_correction_pot
    """

    os.makdir(TMP_DIR)

    varecof_io.writer.corr_potentials.compile_corr_pot(TMP_DIR)
Exemplo n.º 3
0
    def __init__(self):
        global logPath, resultPath,proDir
        proDir = readConfig.proDir
        resultPath = os.path.join(proDir,"result")
        #create result file if it doesn't exit
        if not os.path.exists(resultPath):
            os.mkdir(resultPath)
        #defined test result file name by localtime
        logPath = os.pathjoin(resultPath,str(datetime.now().strftime("%Y%m%d%H%M%S")))
        #create test result file if it doesn't exist
        if not os.path.exists(logPath):
            os.makdir(logPath)
        #defined logger
        self.logger = logging.getLogger()
        #defined log level
        self.logger.setevel(logging.INFO)

        #defined handler
        handler = logging.FileHandler(os.path.join(logPath,"output.log"))
        #defined formatter
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        #defined formatter
        handler.setFormatter(formatter)
        #add handler
        self.logger.addHandler(handler)
Exemplo n.º 4
0
def _upload(file):

     if file:
        file_name = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),'uploadDirectory')
        if not os.path.isdir(file_name):
            os.makdir(file_name)
        file_name = os.path.join(file_name,'' + time.strftime("%m_%d_%H_%M_%S_")+ file.name)
        with open(file_name,'wb+') as destination:
            for chunk in file.chunks():
                destination.write(chunk)
            destination.close()
        return file_name
     return None
Exemplo n.º 5
0
    def save(self):

        # file path
        if not os.path.exists(DATABASE_PATH):
            os.makdir(DATABASE_PATH)
        os.chdir(DATABASE_PATH)

        obj = {"notice": self.notice_singles, "assignments": self.assignment_singles, "files": self.file_singles}

        fin = open("database.db", "w")
        fin.write(json.dumps(obj))

        fin.close()
        return
 def __init__(self):
     self.maxX,self.maxY = 0,0
     self.savePath = './simulatePictures'
     if not os.path.exists(self.savePath):
         os.makdir(self.savePath)
     # ** cross param **#
     self.crossRadius = 14
     self.crossDistance = 150
     self.crossColor = [25,200,0]
     # ** road param **#
     self.roadColor = [0,0,0] #black
     self.roadLineType = 4
     self.channelWidth = 5
     self.channelDistance = 3
     self.lineWidth = 2
     self.time = 0
Exemplo n.º 7
0
def _upload(file):

    if file:
        file_name = os.path.join(
            os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
            'uploadDirectory')
        if not os.path.isdir(file_name):
            os.makdir(file_name)
        file_name = os.path.join(
            file_name, '' + time.strftime("%m_%d_%H_%M_%S_") + file.name)
        with open(file_name, 'wb+') as destination:
            for chunk in file.chunks():
                destination.write(chunk)
            destination.close()
        return file_name
    return None
Exemplo n.º 8
0
    def save(self):

        # file path
        if not os.path.exists(DATABASE_PATH):
            os.makdir(DATABASE_PATH)
        os.chdir(DATABASE_PATH)

        obj = {
            "notice": self.notice_singles,
            "assignments": self.assignment_singles,
            "files": self.file_singles
        }

        fin = open("database.db", "w")
        fin.write(json.dumps(obj))

        fin.close()
        return
Exemplo n.º 9
0
 def __init__(self, name, logfile_name=None, level=logging.DEBUG):
     self.logger = logging.getLogger(name)
     self.logger.setLevel(level)
     formatter = logging.Formatter(
         "%(asctime)s [%(levelname)s] %(name)s - %(message)s")
     ch = None
     if logfile_name is None:
         ch = logging.StreamHandler()
     else:
         logDir = os.path.dirname(logfile_name)
         if logDir != "" and not os.path.exists(logDir):
             os.makdir(logDir)
             pass
         now = time.localtime()
         suffix = '.%d%02d%02d' % (now.tm_year, now.tm_mon, now.tm_mday)
         ch = logging.FileHandler(logfile_name + suffix)
     ch.setLevel(logging.DEBUG)
     ch.setFormatter(formatter)
     self.logger.addHandler(ch)
data = DOGSCATS()
data.data_augmentation(augment_size=5000)
data.data_preprocessing(preprocess_mode='MinMax')
x_train_splitted, x_val, y_train_splitted, y_val = data.get_splitted_train_validation_set()
x_train, y_train = data.get_train_set()
x_test, y_test = data.get_test_set()
num_classes = data.num_classes

modelID = str('DogsCats_CNN_LRsched_PlateauCB')
zeit = str(time.time())

# Save Path 
dir_path = os.path.abspath('/home/phil/MachineLearning/models/')
if not os.path.exists(dir_path):
    os.makdir(dir_path)
model_path = os.path.join(dir_path, str(modelID) + str(zeit) + '.h5')

# Log Dir 
log_dir = os.path.abspath('/home/phil/MachineLearning/logs/')
if not os.path.exists(log_dir):
    os.mkdir(log_dir)
model_log_dir = os.path.join(log_dir, str(modelID) + str(zeit))

# Define the DNN
def model_fn(optimizer, learning_rate, filter_block1, kernel_size_block1, filter_block2, 
             kernel_size_block2, filter_block3, kernel_size_block3, dense_layer_size,
             kernel_initializer, bias_initializer, activation_str, dropout_rate, use_bn):
    # Input
    input_img = Input(shape=x_train.shape[1:])
    # Conv Block 1
Exemplo n.º 11
0
    # - To denoise with block-wise denoiser, set pad to some scalar (5 by default).
    # We set the step-size to be 1/(L+2*tau)
    alg_args['step'] = 1 / (2 + 2 * tau)
    alg_args['num_processes'] = pnum
    time_start = time.time()
    asyncRED_recon, asyncRED_out, path = asyncRED_solver(
        dObj, rObj, **alg_args)
    time_end = time.time() - time_start
    print(f"Total time used: {time_end:.{4}}")
    asyncRED_out['recon'] = asyncRED_recon
    asyncRED_out['tau'] = tau

    # save out info
    asyncRED_output['img_{}'.format(i)] = asyncRED_out
    if not os.path.exists('results'):
        os.makdir('results')
    sio.savemat(
        'results/AsyncRED_{}-{}_proc={}.mat'.format(startIndex, endIndex,
                                                    pnum), asyncRED_output)

    ####################################################
    ####              PlOT CONVERGENCE               ###
    ####################################################

    # asyncRED_dist = asyncRED_out['dist']
    asyncRED_snr = asyncRED_out['snr']

    # compute the averaged distance to fixed points
    avgSnrAsyncRED = np.squeeze(asyncRED_snr)
    xRange = np.linspace(0, alg_args['num_iter'], alg_args['num_iter'])
    fig, (ax1, ax2) = plt.subplots(1, 2)
Exemplo n.º 12
0
Python 3.6.4 (v3.6.4:d48eceb, Dec 19 2017, 06:04:45) [MSC v.1900 32 bit (Intel)] on win32
Type "copyright", "credits" or "license()" for more information.
>>> 
 RESTART: C:\Users\pavan.badveli\Desktop\canarabank docs\PYTHON\04172018\RMS.py 
>>> import os
>>> print (dir (os))
['DirEntry', 'F_OK', 'MutableMapping', 'O_APPEND', 'O_BINARY', 'O_CREAT', 'O_EXCL', 'O_NOINHERIT', 'O_RANDOM', 'O_RDONLY', 'O_RDWR', 'O_SEQUENTIAL', 'O_SHORT_LIVED', 'O_TEMPORARY', 'O_TEXT', 'O_TRUNC', 'O_WRONLY', 'P_DETACH', 'P_NOWAIT', 'P_NOWAITO', 'P_OVERLAY', 'P_WAIT', 'PathLike', 'R_OK', 'SEEK_CUR', 'SEEK_END', 'SEEK_SET', 'TMP_MAX', 'W_OK', 'X_OK', '_Environ', '__all__', '__builtins__', '__cached__', '__doc__', '__file__', '__loader__', '__name__', '__package__', '__spec__', '_execvpe', '_exists', '_exit', '_fspath', '_get_exports_list', '_putenv', '_unsetenv', '_wrap_close', 'abc', 'abort', 'access', 'altsep', 'chdir', 'chmod', 'close', 'closerange', 'cpu_count', 'curdir', 'defpath', 'device_encoding', 'devnull', 'dup', 'dup2', 'environ', 'errno', 'error', 'execl', 'execle', 'execlp', 'execlpe', 'execv', 'execve', 'execvp', 'execvpe', 'extsep', 'fdopen', 'fsdecode', 'fsencode', 'fspath', 'fstat', 'fsync', 'ftruncate', 'get_exec_path', 'get_handle_inheritable', 'get_inheritable', 'get_terminal_size', 'getcwd', 'getcwdb', 'getenv', 'getlogin', 'getpid', 'getppid', 'isatty', 'kill', 'linesep', 'link', 'listdir', 'lseek', 'lstat', 'makedirs', 'mkdir', 'name', 'open', 'pardir', 'path', 'pathsep', 'pipe', 'popen', 'putenv', 'read', 'readlink', 'remove', 'removedirs', 'rename', 'renames', 'replace', 'rmdir', 'scandir', 'sep', 'set_handle_inheritable', 'set_inheritable', 'spawnl', 'spawnle', 'spawnv', 'spawnve', 'st', 'startfile', 'stat', 'stat_float_times', 'stat_result', 'statvfs_result', 'strerror', 'supports_bytes_environ', 'supports_dir_fd', 'supports_effective_ids', 'supports_fd', 'supports_follow_symlinks', 'symlink', 'sys', 'system', 'terminal_size', 'times', 'times_result', 'truncate', 'umask', 'uname_result', 'unlink', 'urandom', 'utime', 'waitpid', 'walk', 'write']
>>> print (os.getcwd())
C:\Users\pavan.badveli\Desktop\canarabank docs\PYTHON\04172018
>>> os.chdir()
Traceback (most recent call last):
  File "<pyshell#3>", line 1, in <module>
    os.chdir()
TypeError: Required argument 'path' (pos 1) not found
>>> os.makdir('')
Traceback (most recent call last):
  File "<pyshell#4>", line 1, in <module>
    os.makdir('')
AttributeError: module 'os' has no attribute 'makdir'
>>> os.makedirs('')

Traceback (most recent call last):
  File "<pyshell#5>", line 1, in <module>
    os.makedirs('')
  File "C:\Users\pavan.badveli\AppData\Local\Programs\Python\Python36-32\lib\os.py", line 220, in makedirs
    mkdir(name, mode)
FileNotFoundError: [WinError 3] The system cannot find the path specified: ''
>>> os.removedirs(folder name in the working directory)
Exemplo n.º 13
0
    M = Model(args.z_dims)
    M._build_graph(images_batch)
    global_step = tf.get_variable('global_step', [], 
			initializer=tf.constant_initializer(0), trainable=False)
    train_op = tf.train.AdamOptimizer(args.lr).minimize(M.cost, global_step=global_step)
    saver = tf.train.Saver(max_to_keep=20)

    config = get_session_config(0.3, multiprocessing.cpu_count()/2)
    sess = tf.Session(config=config)
    init_op = tf.global_variables_initializer()
    sess.run(init_op)
    if args.task != 'train':
    	saver.restore(sess, tf.train.latest_checkpoint('./checkpoints'))
    else:
	if not os.path.exists(bin_filepath):
            os.makdir('./logs')
    	summary_writer = tf.summary.FileWriter('./logs')
    	summary_op = tf.summary.merge_all()
    # creates threads to start all queue runners collected in the graph
    # [remember] always call init_op before start the runner
    tf.train.start_queue_runners(sess=sess)
    if args.task == 'train':
    	step = 0
    	while True:
  	    _, summary_str, loss= sess.run([train_op, summary_op, M.cost])
	    summary_writer.add_summary(summary_str, step)
	    if step%1000 == 0:
	    	if not os.path.exists('./checkpoints'):
		    os.mkdir('./checkpoints')
	    	saver.save(sess, os.path.join('./checkpoints', 'mnist'), global_step=global_step)
	    	print "==================================="