Example #1
0
 def extract( self, flename, path ):
     if not filename.endswith( '/' ):
         f = os.path.join( path, flename )
         dir = os.path.dirname( f )
         if not os.path.exists( dir ):
             os.makdirs( dir )
         file( f, 'wb').write( self.zfile.read( filename ))
Example #2
0
 def backup(self):
     logging.info('Backing up {host}'.format( host=self.host ))
     if self.kind == 'mongodump':
         logging.info('Performing mongodump...')
         cmd = ["mongodump",
             "-o", backup_dir,
             "-h", str(self.host.hostname),
             "--port", str(self.host.port),
         ]
         subprocess.Popen(cmd,
             stdout=subprocess.PIPE,
             stdin=subprocess.PIPE,
             stderr=subprocess.STDOUT
         )
     elif self.kind == 'raw': # and check for offline status
         logging.info('Performing raw backup...')
         self.ssh_connect()
         # ssh -n remotehost "tar jcvf - SOURCEDIR" > DESTFILE.tar.gz
         #cmd = ["tar", "zcf", "-", source_dir]
         #stdin, stdout, sterr = self.ssh.exec_command(" ".join(cmd))
         #print stdout.read()
         if not os.path.exists(backup_dir):
             os.makdirs(backup_dir)
         tar_cmd = '"tar jcf - {source_dir}"'.format(source_dir=source_dir)
         cmd = ["ssh", "-n", self.host.hostname, "-p5555", tar_cmd, ">", "backup.tar.gz"]
         subprocess.call(cmd)
         # Make a tar archive from ssh pipe
         #tar = tarfile.open(backup_dir + 'backup.tar.gz', 'w:gz')
         #tar.add(stdout)
         #tar.close()
         
         # tar zcvf - /wwwdata | ssh [email protected] "cat > /backup/wwwdata.tar.gz"
     else:
         # DB was not locked
         logging.info('Database needs to be locked for raw backup')
Example #3
0
def check_lm_bin(dataset, lm_path):
    if not os.path.isdir(os.path.join(dataset, 'lm_bin')):
        os.makdirs(os.path.join(dataset, 'lm_bin'))
        for i in range(len(lm_path)):
            lm = np.loadtxt(lm_path[i])
            lm = np.reshape(lm, [-1])
            lm.tofile(
                os.path.join(dataset, 'lm_bin',
                             lm_path[i].split('/')[-1].replace('txt', 'bin')))
def filterAndSave(fname,path,savepath,filterSize):
	vol = importStack(path,fname)
	
	try:
		os.makdirs(savepath)
	except:
		print(savepath+' already exists')
	res = np.memmap(savepath + 'filtered_Size_'+ str(filterSize) + fname,dtype = 'float64', mode = 'w+', shape = vol.shape)	
	streaming3Dfilter(vol, res,filterSize)
Example #5
0
def fillPlayers():
	path = "./metadata/"
	
	if not os.path.exists(path):
		os.makdirs(path)

	with open((os.path.join(path, 'players.txt'))) as playerNames:
		playerNameList = playerNames.readlines()
	for playerName in playerNameList:
		newPlayer = player(playerName)
		playerList.append(newPlayer)
Example #6
0
    def dump(manager, **kwargs):
        data_path = get_data_path(**kwargs)

        # If the parent directory does not exist, create it.
        data_dir = os.path.dirname(data_path)
        try:
            os.makdirs(data_dir)
        except FileExistsError:
            pass

        with open(data_path, 'w') as fh:
            json.dump(manager.to_dict(), fh)
Example #7
0
    def __init__(self,
                 root,
                 target_type="category",
                 train=True,
                 transform=None,
                 target_transform=None,
                 download=False):
        super(Caltech101, self).__init__(os.path.join(root, 'caltech101'))
        self.train = train
        self.dir_name = '101_ObjectCategories_split/train' if self.train else '101_ObjectCategories_split/test'

        os.makdirs(self.root, exist_ok=True)
        if isinstance(target_type, list):
            self.target_type = target_type
        else:
            self.target_type = [target_type]
        self.transform = transform
        self.target_transform = target_transform

        if download:
            self.download()

        if not self._check_integrity():
            raise RuntimeError('Dataset not found or corrupted.' +
                               ' You can use download=True to download it')

        self.categories = sorted(
            os.listdir(os.path.join(self.root, "101_ObjectCategories")))
        self.categories.remove("BACKGROUND_Google")  # this is not a real class

        # For some reason, the category names in "101_ObjectCategories" and
        # "Annotations" do not always match. This is a manual map between the
        # two. Defaults to using same name, since most names are fine.
        name_map = {
            "Faces": "Faces_2",
            "Faces_easy": "Faces_3",
            "Motorbikes": "Motorbikes_16",
            "airplanes": "Airplanes_Side_2"
        }
        self.annotation_categories = list(
            map(lambda x: name_map[x]
                if x in name_map else x, self.categories))

        self.index = []
        self.y = []
        for (i, c) in enumerate(self.categories):
            file_names = os.listdir(os.path.join(self.root, self.dir_name, c))
            n = len(file_names)
            self.index.extend(file_names)
            self.y.extend(n * [i])

        print(self.train, len(self.index))
Example #8
0
def save_sidebar(content):
    filepath = current_app.config.get("SITE_SIDEBAR")
    if not filepath:
        flash("Config your site with SITE_SIDEBAR", "warn")
        return

    dirname = os.path.dirname(filepath)
    if not os.path.exists(dirname):
        os.makdirs(dirname)

    with open(filepath, "wb") as f:
        content = content.encode("utf-8")
        f.write(content)
Example #9
0
def save_sidebar(content):
    filepath = current_app.config.get('SITE_SIDEBAR')
    if not filepath:
        flash('Config your site with SITE_SIDEBAR', 'warn')
        return

    dirname = os.path.dirname(filepath)
    if not os.path.exists(dirname):
        os.makdirs(dirname)

    with open(filepath, 'wb') as f:
        content = content.encode('utf-8')
        f.write(content)
Example #10
0
def save_sidebar(content):
    filepath = current_app.config.get('SITE_SIDEBAR')
    if not filepath:
        flash('Config your site with SITE_SIDEBAR', 'warn')
        return

    dirname = os.path.dirname(filepath)
    if not os.path.exists(dirname):
        os.makdirs(dirname)

    with open(filepath, 'wb') as f:
        content = content.encode('utf-8')
        f.write(content)
def filterAndSave(fname,path,savepath,filterSize):
	vol = importStack(path,fname)
	#volsmall =vol[100:150,100:150,100:101]
	#volsmall = vol
	
	
	try:
		os.makdirs(savepath)
	except:
		print(savepath+' already exists')
	res = np.memmap(savepath + 'filtered_Size_'+ str(filterSize) + fname,dtype = 'float64', mode = 'w+', shape = vol.shape)	
	#res = ndimage.generic_filter(vol, nanmeanFilter,size = filterSize)
	streaming3Dfilter(vol, res,filterSize)
Example #12
0
def organize_by_mtime(argv):
	assert len(argv) == 2
	src, dest = argv
	for root, dirs, files in os.walk(src):
		for basename in files:
			src_file = os.path.join(root, basename)
			t = os.path.getmtime(src_file)
			year, month =  datetime.datetime.fromtimestamp(t).strftime('%Y %m').split()
			dest_dir = os.path.join(dest, year, month)
			dest_file = os.path.join(dest_dir, basename)
			if not os.path.exists(dest_dir):
				os.makdirs(dest_dir)

			print 'Moving %s to %s' % (src_file, dest_file)
			try:
				shutil.move(src_file, dest_file)
			except Exception, e:
				print e
				sys.exit()
Example #13
0
def moveFiles(listOfFile, destinationDir):
    """
        Move list of the files into different directory
        If the same name file exists, it add random prefix into filename

        :param listOfFile : <string list> list of file path
        :type listOfFile:str
        :param destinationDir :Destination directory name
        :type destinationDir:str
        :return void
    """

    for filename in listOfFile:
        path, name = os.path.split(filename)
        prefix_num = random.randrange(1, 99999999)

        if not os.path.exists(destinationDir):
            os.makdirs(destinationDir)
        if (filename is destinationDir):
            continue
            #os.getcwd() + os.sep +
        destinationFilename = destinationDir + os.sep  #+ str(prefix_num) + "_"
        os.rename(filename, destinationFilename + name)
Example #14
0
#!/usr/bin/env pythong

import os

os.chdir('/tmp/tmp')
for dirname in '1 10 11 12 2 20 21 22 3 30 31 32'.split():
    try:
        os.makdirs(dirname)
    except OSError: pass

for root, dirs, files in os.walk('.'):
    for dirname in sorted(dirs):
#   for dirname in sorted(dirs, key=int):
        print(dirname)
Example #15
0
                        metavar='')
    parser.add_argument('-l',
                        '--log',
                        action='store_true',
                        help='Write log to the consol')
    args = parser.parse_args()
    if not args.interface:
        print("Error: capture interface not given, try --help")
        sys.exit(-1)

    # Setup thread for sniffer #
    sniffer = packetsniff(args.interface)

    # Making folder for files #
    if not os.path.exists(os.path.dirname(__file__) + '/data'):
        os.makdirs(os.path.dirname(__file__) + '/data')
    folder = os.path.dirname(__file__) + '/data/' + args.output[:-4]
    # Check if folder exists #
    if os.path.exists(folder):
        print('Please choose another file name'
              '\nOne with this name already exists')
        # Check if tmux is running
        if os.system('pidof tmux') != 256:
            time.sleep(10)
            os.system("pkill tmux")
        sys.exit(-1)
    os.makedirs(folder)

    # Setting up csv writer and file for starting coordinates #
    csvFile = open(folder + '/' + args.output, 'w')
    wr = csv.writer(csvFile)
Example #16
0
        avg_cost = 0

        # Training step
        for i in range(batchSize):
            feed_dict = {X: trainX, Y: trainY}
            s, _ = sess.run([summary, optimizer], feed_dict=feed_dict)
            writer.add_summary(s, global_step=global_step)
            global_step += 1

            avg_cost += sess.run(loss, feed_dict=feed_dict)  #/ total_batch

        print("Epoch= {:>5}, loss= {:>12}".format(epoch + 1, avg_cost))
        sess.run(last_epoch.assign(epoch + 1))
        if not os.path.exists(CHECK_POINT_DIR):
            os.makdirs(CHECK_POINT_DIR)
        saver.save(sess, CHECK_POINT_DIR + "/model", global_step=epoch + 1)

        # Test step
        test_predict = sess.run(Y_pred, feed_dict={X: testX})
        rmse_val = sess.run(rmse,
                            feed_dict={
                                targets: testY,
                                predictions: test_predict
                            })
        print("RMSE: {}".format(rmse_val))

    # Test step
    test_predict = sess.run(Y_pred, feed_dict={X: testX})
    rmse_val = sess.run(rmse,
                        feed_dict={
Example #17
0
def createDir(path_str):
    if os.path.exists(path_str):
        return
    os.makdirs(path_str)
cfg = get_cfg()
cfg.merge_from_file(
    model_zoo.get_config_file(
        "COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml"))
cfg.DATASETS.TRAIN = ("balloon_train", )
cfg.DATASETS.TEST = ()
cfg.MODEL.DEVICE = 'gpu'
cfg.DATALOADER.NUM_WORKERS = 2
cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url(
    "COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml")
cfg.SOLVER.IMS_PER_BATCH = 2
cfg.SOLVER.BASE_LR = 0.00025
cfg.SOLVER.MAX_ITER = 300
cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE = 128
cfg.MODEL.ROI_HEADS.NUM_CLASSES = 1  # only has one class to be detected
os.makdirs(cfg.OUTPUT_DIR, exist_ok=True)
trainer = DefaultTrainer(cfg)
trainer.resume_or_load(resume=False)
trainer.train()

# eval the model
cfg.MODEL.WEIGHTS = os.path.join(cfg.OUTPUT_DIR, "model_final.pth")
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.7
predictor = DefaultPredictor(cfg)
dataset_dicts = get_balloon_dicts('balloon/val')
for d in random.sample(dataset_dicts, 3):
    im = cv2.imread(d["file_name"])
    outputs = predictor(im)
    v = Visualizer(im[:, :, ::-1],
                   metadata=balloon_metadata,
                   scale=0.5,
#!/usr/bin/env python

import shutil, glob, os

sourceModelPath = 'libs/FaceTracker/model/'
examples = glob.glob('example*')
for example in examples:
	targetModelPath = example + '/bin/data/'
	try: os.makdirs(targetModelPath)
	except: pass
	try:
		shutil.copytree(sourceModelPath, targetModelPath + 'model/')
		print 'Copied model data into ' + example
	except:
		print 'Did not copy model data into ' + example
Example #20
0
def main(cfg):
    # Checking data directory
    input_directory = cfg['dataset']['input_directory']
    if input_directory == None:
        data_dir = parentdir.replace("\hybcast", "")
        data_dir = parentdir.replace("/hybcast", "")

        input_directory = data_dir + "\\hybcast\\data\\external"
        if os.path.isdir(input_directory) == False:
            print("Creating data directory " + input_directory)
            os.makedirs(input_directory)
    else:
        if os.path.isdir(input_directory) == False:
            print("Creating data directory " + input_directory)
            os.makdirs(input_directory)

    # Checking cuda availability
    if torch.cuda.is_available() == False:
        print("CUDA is not available, using cpu instead")
        cfg['device'] = 'cpu'
    else:
        print("Using CUDA")

    print('Preparing M4 data - ' + cfg['dataset']['name'])
    X_train_df, y_train_df, X_test_df, y_test_df = prepare_m4_data(
        dataset_name=cfg['dataset']['name'],
        directory=input_directory,
        num_obs=cfg["dataset"]["num_obs"])

    print("Successfully get the M4 data.\nBegin ESTransformer")
    #Instantiate the model
    model = DeployedESTransformer(  # Device and dataset
        device=cfg['device'],
        root_dir=input_directory,
        dataset_name=cfg['dataset']['name'],

        # Train parameters
        max_epochs=500,
        batch_size=cfg['train_parameters']['batch_size'],
        freq_of_test=cfg['train_parameters']['freq_of_test'],
        learning_rate=float(cfg['train_parameters']['learning_rate']),
        per_series_lr_multip=cfg['train_parameters']['per_series_lr_multip'],
        lr_scheduler_step_size=cfg['train_parameters']
        ['lr_scheduler_step_size'],
        lr_decay=cfg['train_parameters']['lr_decay'],
        level_variability_penalty=cfg['train_parameters']
        ['level_variability_penalty'],
        testing_percentile=cfg['train_parameters']['testing_percentile'],
        training_percentile=cfg['train_parameters']['training_percentile'],
        ensemble=cfg['train_parameters']['ensemble'],

        # ES parameters
        seasonality=cfg['ES_parameters']['seasonality'],
        random_seed=cfg['ES_parameters']['random_seed'],
        input_size=cfg['ES_parameters']['input_size'],
        d_input=cfg['ES_parameters']
        ['d_input'],  # input_size + 6 (which is exo features)
        output_size=cfg['ES_parameters']
        ['output_size'],  # must be the same as d_output
        d_output=cfg['ES_parameters']
        ['d_output'],  # input_size + output_size >= 13

        # Transformer parameter
        transformer_weight_decay=0.0,
        d_model=cfg['Transformer_parameter']['d_model'],
        q=cfg['Transformer_parameter']['q'],
        v=cfg['Transformer_parameter']['v'],
        h=cfg['Transformer_parameter']['h'],
        N=cfg['Transformer_parameter']['N'],
        attention_size=cfg['Transformer_parameter']['attention_size'],
        dropout=cfg['Transformer_parameter']['dropout'],
        chunk_mode=cfg['Transformer_parameter']['chunk_mode'],
        pe=cfg['Transformer_parameter']['pe'],
        pe_period=cfg['Transformer_parameter']['pe_period'])
    # Fit model
    # If y_test_df is provided the model # will evaluate predictions on this set every freq_test epochs
    model.fit(X_train_df, y_train_df, X_test_df, y_test_df)

    # Predict on test set
    y_hat_df = model.predict(X_test_df)

    # Predict on train set
    y_train_hat_df = model.predict(X_train_df)

    # Evaluate predictions
    final_owa, final_mase, final_smape = evaluate_prediction_owa(
        y_hat_df, y_train_df, X_test_df, y_test_df, naive2_seasonality=1)
Example #21
0
def check_folder():
    if not os.path.exists('data/enigmata'):
        print('Creating data/enigmata')
        os.makdirs('data/enigmata')
Example #22
0
                bead_dict[h] = beads
        print('Finished', pos)
        pickle.dump(bead_dict, open(os.path.join(bead_path, pos), 'wb'))
    else:
        bead_dict = {}
        for h in fnames_dict.keys():
            beads = find_beads_3D(fnames_dict[h], ave_bead)
            bead_dict[h] = beads
        print('Finished', pos)
        pickle.dump(bead_dict, open(os.path.join(bead_path, pos), 'wb'))


if __name__ == '__main__':
    #Setting up paths
    if not os.path.exists(args.analysis_path):
        os.makdirs(args.analysis_path)
    if args.md_path == 'None':
        deconvolved_path = os.path.join(args.analysis_path, 'deconvolved')
        if not os.path.exists(deconvolved_path):
            print('analysis path doesnt have your metadata')
            print('add path to metadata after analysis path')
    else:
        deconvolved_path = args.md_path
    bead_path = os.path.join(args.analysis_path, 'beads')
    if not os.path.exists(bead_path):
        os.makedirs(bead_path)
    results_path = os.path.join(args.analysis_path, 'results')
    if not os.path.exists(results_path):
        os.makedirs(results_path)

    #Retreiving file names and position names
Example #23
0
#!/usr/bin/env python

import shutil, glob, os

sourceModelPath = 'libs/FaceTracker/model/'
examples = glob.glob('example*')
for example in examples:
    targetModelPath = example + '/bin/data/'
    try:
        os.makdirs(targetModelPath)
    except:
        pass
    try:
        shutil.copytree(sourceModelPath, targetModelPath + 'model/')
        print 'Copied model data into ' + example
    except:
        print 'Did not copy model data into ' + example
Example #24
0
    def __init__(self, net_type):
        self.net_type = net_type
        self.anchor_per_scale = cfg.YOLO.ANCHOR_PER_SCALE
        self.classes = utils.read_class_names(cfg.YOLO.CLASSES)
        self.num_classes = len(self.classes)
        
        self.learn_rate_init = cfg.TRAIN.LEARN_RATE_INIT
        self.learn_rate_end = cfg.TRAIN.LEARN_RATE_END
        self.first_stage_epochs = cfg.TRAIN.FISRT_STAGE_EPOCHS
        self.second_stage_epochs = cfg.TRAIN.SECOND_STAGE_EPOCHS
        self.warmup_periods = cfg.TRAIN.WARMUP_EPOCHS
        self.initial_weight = cfg.TRAIN.INITIAL_WEIGHT
        
        self.ckpt_path = cfg.TRAIN.CKPT_PATH        
        if not os.path.exists(self.ckpt_path):
            os.makdirs(self.ckpt_path)
        
        self.time = time.strftime('%Y-%m-%d-%H-%M-%S', time.localtime(time.time()))
        self.moving_ave_decay = cfg.YOLO.MOVING_AVE_DECAY
        self.max_bbox_per_scale = 150

        self.train_logdir = ("./%s/log/train" % net_type)
        if not os.path.exists(self.train_logdir):
            os.makedirs(self.train_logdir)

        self.trainset = Dataset('train', self.net_type)
        self.testset = Dataset('test', self.net_type)
        self.steps_per_period = len(self.trainset)
        self.sess = tf.Session(config=tf.ConfigProto(allow_soft_placement=True))

        with tf.name_scope('input'):
            if net_type == 'tiny':
                self.input_data = tf.placeholder(dtype=tf.float32, name='input_data')
                self.label_mbbox = tf.placeholder(dtype=tf.float32, name='label_mbbox')
                self.label_lbbox = tf.placeholder(dtype=tf.float32, name='label_lbbox')

                self.true_mbboxes = tf.placeholder(dtype=tf.float32, name='mbboxes')
                self.true_lbboxes = tf.placeholder(dtype=tf.float32, name='lbboxes')
                self.trainable = tf.placeholder(dtype=tf.bool, name='training')

            else:                
                self.input_data = tf.placeholder(dtype=tf.float32, name='input_data')
                self.label_sbbox = tf.placeholder(dtype=tf.float32, name='label_sbbox')
                self.label_mbbox = tf.placeholder(dtype=tf.float32, name='label_mbbox')
                self.label_lbbox = tf.placeholder(dtype=tf.float32, name='label_lbbox')

                self.true_sbboxes = tf.placeholder(dtype=tf.float32, name='sbboxes')
                self.true_mbboxes = tf.placeholder(dtype=tf.float32, name='mbboxes')
                self.true_lbboxes = tf.placeholder(dtype=tf.float32, name='lbboxes')
                self.trainable = tf.placeholder(dtype=tf.bool, name='training')

        with tf.name_scope("define_loss"):
            if self.net_type == 'tiny':
                self.model = YOLOV3Tiny(self.input_data, self.trainable)
                self.net_var = tf.global_variables()
                self.iou_loss, self.conf_loss, self.prob_loss = self.model.compute_loss(self.label_mbbox, self.label_lbbox,
                                                                                        self.true_mbboxes, self.true_lbboxes)
                self.loss = self.iou_loss + self.conf_loss + self.prob_loss

            elif self.net_type == 'yolov3':
                self.model = YOLOV3(self.input_data, self.trainable)
                self.net_var = tf.global_variables()
                self.iou_loss, self.conf_loss, self.prob_loss = self.model.compute_loss(self.label_sbbox, self.label_mbbox, self.label_lbbox,
                                                                                        self.true_sbboxes, self.true_mbboxes, self.true_lbboxes)
                self.loss = self.iou_loss + self.conf_loss + self.prob_loss
            
            elif self.net_type == 'yolov4' or self.net_type == 'yolov5' :
                iou_use = 1  # (0, 1, 2) ==> (giou_loss, diou_loss, ciou_loss)
                focal_use = False  # (False, True) ==> (normal, focal_loss)
                label_smoothing = 0

                if self.net_type == 'yolov4':
                    self.model = YOLOV4(self.input_data, self.trainable)
                else:
                    self.model = YOLOV5(self.input_data, self.trainable)

                self.net_var = tf.global_variables()
                self.iou_loss, self.conf_loss, self.prob_loss = self.model.compute_loss(self.label_sbbox, self.label_mbbox, self.label_lbbox,
                                                                                        self.true_sbboxes, self.true_mbboxes, self.true_lbboxes,
                                                                                        iou_use, focal_use, label_smoothing)
                self.loss = self.iou_loss + self.conf_loss + self.prob_loss
                # self.loss = tf.Print(self.loss, [self.iou_loss, self.conf_loss, self.prob_loss], message='loss: ')
            else:
                print('self.net_type=%s error' % self.net_type)

        with tf.name_scope('learn_rate'):
            self.global_step = tf.Variable(1.0, dtype=tf.float64, trainable=False, name='global_step')
            warmup_steps = tf.constant(self.warmup_periods * self.steps_per_period, dtype=tf.float64, name='warmup_steps')
            train_steps = tf.constant((self.first_stage_epochs + self.second_stage_epochs)* self.steps_per_period,
                                       dtype=tf.float64, name='train_steps')
            
            self.learn_rate = tf.cond(pred=self.global_step < warmup_steps, true_fn=lambda: self.global_step / warmup_steps * self.learn_rate_init,
                                      false_fn=lambda: self.learn_rate_end + 0.5 * (self.learn_rate_init - self.learn_rate_end) * \
                                              (1 + tf.cos((self.global_step - warmup_steps) / (train_steps - warmup_steps) * np.pi)))
            global_step_update = tf.assign_add(self.global_step, 1.0)

        with tf.name_scope("define_weight_decay"):
            moving_ave = tf.train.ExponentialMovingAverage(self.moving_ave_decay).apply(tf.trainable_variables())

        with tf.name_scope("define_first_stage_train"):
            self.first_stage_trainable_var_list = []
            for var in tf.trainable_variables():
                var_name = var.op.name
                var_name_mess = str(var_name).split('/')
                
                if net_type == 'tiny':
                    bboxes = ['conv_sbbox', 'conv_mbbox', 'conv_lbbox']
                else
                    bboxes = ['conv_mbbox', 'conv_lbbox']
                
                if var_name_mess[0] inbboxes:
                    self.first_stage_trainable_var_list.append(var)

            first_stage_optimizer = tf.train.AdamOptimizer(self.learn_rate).minimize(self.loss, var_list=self.first_stage_trainable_var_list)
            with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)):
                with tf.control_dependencies([first_stage_optimizer, global_step_update]):
                    with tf.control_dependencies([moving_ave]):
                        self.train_op_with_frozen_variables = tf.no_op()

        with tf.name_scope("define_second_stage_train"):
            second_stage_trainable_var_list = tf.trainable_variables()
            second_stage_optimizer = tf.train.AdamOptimizer(self.learn_rate).minimize(self.loss, var_list=second_stage_trainable_var_list)

            with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)):
                with tf.control_dependencies([second_stage_optimizer, global_step_update]):
                    with tf.control_dependencies([moving_ave]):
                        self.train_op_with_all_variables = tf.no_op()

        with tf.name_scope('loader_and_saver'):
            self.loader = tf.train.Saver(self.net_var)
            self.saver  = tf.train.Saver(tf.global_variables(), max_to_keep=1000)

        with tf.name_scope('summary'):
            tf.summary.scalar("learn_rate", self.learn_rate)
            tf.summary.scalar("iou_loss", self.iou_loss)
            tf.summary.scalar("conf_loss", self.conf_loss)
            tf.summary.scalar("prob_loss", self.prob_loss)
            tf.summary.scalar("total_loss", self.loss)

            logdir = ("./%s/log/" % self.net_type)
            if os.path.exists(logdir): 
                shutil.rmtree(logdir)
            os.makedirs(logdir)
            
            self.write_op = tf.summary.merge_all()
            self.summary_writer = tf.summary.FileWriter(logdir, graph=self.sess.graph)
Example #25
0
 def save(self, global_step=None):
     print('Saving checkpoint')
     if not os.path.exists(self.ckpt_dir):
         os.makdirs(self.ckpt_dir)
     self.saver.save(self.sess, self.ckpt_dir, global_step=global_step)
Example #26
0
example to pull out the last part of the file path:
os.path.basename('c:\\folder1\\folder\\spam.png')
os.path.basename('c:\\folder1\\folder')
""")

print(os.path.isabs('..\\..\\spam.png'))
print(os.path.isabs('c:\\folder\\folder'))

print(os.path.relpath('c:\\folder1\\folder2\\spam.png', 'c:\\folder'))
print(os.path.dirname('c:\\folder\\folder2\\spam.png'))
print(os.path.basename('c:\\folder1\\folder\\spam.png'))
print(os.path.basename('c:\\folder1\\folder'))

print(
    """To check if a filepath exists, is a file, is a directory, and the file size:
os.path.exists('c:\\folder1\\folder2\\spam.png')
os.path.exists('c:\\')
os.path.isfile('c:\\folder1\\folder2\\spam.png')
os.path.isdir('c:\\')
os.path.getsize('c:\\windows\\system32\\calc.exe)
""")

print(os.path.exists('c:\\folder1\\folder2\\spam.png'))
print(os.path.exists('c:\\'))
print(os.path.isfile('c:\\folder1\\folder2\\spam.png'))
print(os.path.isdir('c:\\'))
print(os.path.getsize('c:\\windows\\system32\\calc.exe'))

print("""To create new folders:
os.makdirs('c:\\delicous\walnut\\coffee')""")
Example #27
0
from network3 import *
from node2 import *
from message2 import *
from constants import *
import os

#number of Taus

#initialize and fill network with nodes
net = Network()
net.fill_network(V, maxX, maxY)

if not os.path.exists(path_to_folder):
    os.makdirs(path_to_folder)

#initialize output file
output_file = open(path_to_folder + delivery_file_name, "w")
output_file.write("ID\ts\td\tts\tte\tm\tLLC\tELC\n")
output_file.write("----------------------------------------------------\n")
output_file.close()

output_file2 = open(path_to_folder + consumedEnergyFile, 'w')
output_file2.write("Time\tEnergy\n")
output_file2.close()

day1_link_exists = link_exists_folder.split("/")

specBW = pickle.load(open(link_exists_folder + "specBW.pkl", "rb"))
LINK_EXISTS = pickle.load(open(link_exists_folder + "/LINK_EXISTS.pkl", "rb"))

day1_folder_path = path_to_folder.split("/")
Example #28
0
SAMPLERATE = 8000
DATAPATH = 'data/test'

# In[100]:

# Download raw data from source
# and uncompress to DATAPATH

os.makedirs(DATAPATH, exist_ok=True)

url = 'http://download.tensorflow.org/data/'
file_name = 'speech_commands_v0.01.tar.gz'

if not os.path.exists(DATAPATH):
    os.makdirs(DATAPATH)

if not os.path.exists(os.path.join(DATAPATH, file_name)):

    print('Downloading', file_name)
    rtnVal = subprocess.call(
        ['wget', os.path.join(url, file_name), '-P', DATAPATH])
    assert rtnVal == 0, 'downloaded failed!'
    print(file_name, 'downloaded successfully')

    print('Uncompressing', os.path.basename(file_name))
    rtnVal = subprocess.call(
        ['tar', '-C', DATAPATH, '-zxvf',
         os.path.join(DATAPATH, file_name)])
    assert rtnVal == 0, 'file failed to uncompress!'
    print(file_name, 'uncompressed successfully')