def _install_from(self, fromPath, fromLoc, toLocation=None, ignore=None):
        """Copy file or directory from a location to the droplet

        Copies a file or directory from a location to the application
        droplet. Directories are copied recursively, but specific files
        in those directories can be ignored by specifing the ignore parameter.

            fromPath   -> file to copy, relative build pack
            fromLoc    -> root of the from path.  Full path to file or
                          directory to be copied is fromLoc + fromPath
            toLocation -> optional location where to copy the file
                          relative to app droplet.  If not specified
                          uses fromPath.
            ignore     -> an optional callable that is passed to
                          the ignore argument of shutil.copytree.
        """
        self._log.debug("Install file [%s] from [%s]", fromPath, fromLoc)
        fullPathFrom = os.path.join(fromLoc, fromPath)
        if os.path.exists(fullPathFrom):
            fullPathTo = os.path.join(
                self._ctx['BUILD_DIR'],
                ((toLocation is None) and fromPath or toLocation))
            safe_makedirs(os.path.dirname(fullPathTo))
            self._log.debug("Copying [%s] to [%s]", fullPathFrom, fullPathTo)
            if os.path.isfile(fullPathFrom):
                shutil.copy(fullPathFrom, fullPathTo)
            else:
                utils.copytree(fullPathFrom, fullPathTo, ignore=ignore)
Example #2
0
    def _install_from(self, fromPath, fromLoc, toLocation=None, ignore=None):
        """Copy file or directory from a location to the droplet

        Copies a file or directory from a location to the application
        droplet. Directories are copied recursively, but specific files
        in those directories can be ignored by specifing the ignore parameter.

            fromPath   -> file to copy, relative build pack
            fromLoc    -> root of the from path.  Full path to file or
                          directory to be copied is fromLoc + fromPath
            toLocation -> optional location where to copy the file
                          relative to app droplet.  If not specified
                          uses fromPath.
            ignore     -> an optional callable that is passed to
                          the ignore argument of shutil.copytree.
        """
        self._log.debug("Install file [%s] from [%s]", fromPath, fromLoc)
        fullPathFrom = os.path.join(fromLoc, fromPath)
        if os.path.exists(fullPathFrom):
            fullPathTo = os.path.join(
                self._ctx['BUILD_DIR'],
                ((toLocation is None) and fromPath or toLocation))
            safe_makedirs(os.path.dirname(fullPathTo))
            self._log.debug("Copying [%s] to [%s]", fullPathFrom, fullPathTo)
            if os.path.isfile(fullPathFrom):
                shutil.copy(fullPathFrom, fullPathTo)
            else:
                utils.copytree(fullPathFrom, fullPathTo, ignore=ignore)
Example #3
0
    def __init__(self,
                 labels=None,
                 download_dir='temp',
                 fps=None,
                 start=None,
                 n=None):
        self.fps = fps
        self.download_dir = download_dir
        safe_makedirs(self.download_dir)

        objects = list(range(len(self.object_labels())))
        if labels is not None:
            objects = [self.object_labels().index(label) for label in labels]

        annotations = np.load('data/hand_object.npy')
        indices = [np.where(annotations[:, obj] == 1)[0] for obj in objects]
        indices = np.unique(np.hstack(indices))

        links = open('data/youtube_links.txt', 'r')
        self.entries = [link for i, link in enumerate(links) if i in indices]
        links.close()

        self.downloader = YoutubeDownloader('mp4')

        # Keep only entries in range [start, start + n]
        self.start = 0 if not start else start
        self.stop = len(self.entries) - self.start if not n else min(
            self.start + n, len(self.entries))
        self.entries = self.entries[self.start:self.stop + 1]
Example #4
0
def save_buckets(buckets, bucket_dir):
    '''
    Saves the bucket lists to a directory.
    '''
    utils.safe_makedirs(bucket_dir)

    for i in buckets.keys():
        fname = os.path.join(bucket_dir, str(i))
        with open(fname, 'w') as f:
            for utt in buckets[i]:
                f.write(utt + '\n')
 def init_logging(ctx):
     logFmt = '%(asctime)s [%(levelname)s] %(name)s - %(message)s'
     if ctx.get('BP_DEBUG', False):
         logging.basicConfig(level=logging.DEBUG, format=logFmt)
     else:
         logLevelStr = ctx.get('BP_LOG_LEVEL', 'INFO')
         logLevel = getattr(logging, logLevelStr, logging.INFO)
         logDir = os.path.join(ctx['BUILD_DIR'], '.bp', 'logs')
         safe_makedirs(logDir)
         logging.basicConfig(level=logLevel, format=logFmt,
                             filename=os.path.join(logDir, 'bp.log'))
Example #6
0
 def init_logging(ctx):
     logFmt = '%(asctime)s [%(levelname)s] %(name)s - %(message)s'
     if ctx.get('BP_DEBUG', False):
         logging.basicConfig(level=logging.DEBUG, format=logFmt)
     else:
         logLevelStr = ctx.get('BP_LOG_LEVEL', 'INFO')
         logLevel = getattr(logging, logLevelStr, logging.INFO)
         logDir = os.path.join(ctx['BUILD_DIR'], '.bp', 'logs')
         safe_makedirs(logDir)
         logging.basicConfig(level=logLevel, format=logFmt,
                             filename=os.path.join(logDir, 'bp.log'))
Example #7
0
 def __init__(self, config):
     SafeConfigParser.__init__(self)
     from utils import safe_makedirs
     safe_makedirs(os.path.dirname(config))
     # we always want this section, even on fresh installs
     self.add_section("general")
     # read the config
     self.configfile = config
     try:
         self.read(self.configfile)
     except Exception as e:
         # don't crash on a corrupted config file
         LOG.warn("Could not read the config file '%s': %s",
                  self.configfile, e)
         pass
Example #8
0
def main(args):
    '''
    Training and evaluation script for character-based CTC ASR on WSJ 
    dataset, pre-processed by ESPnet toolkit
    '''
    jsons = {
        'train': 'dump/train_si284/deltafalse/data.json',
        'dev': 'dump/test_dev93/deltafalse/data.json',
        'test': 'dump/test_eval92/deltafalse/data.json'
    }

    # if the temporary directory contains a json, we'll assume it's correct
    if not os.path.exists(os.path.join(args.temp_root, jsons['train'])):
        # copy the data for faster reading than NFS
        utils.safe_copytree(args.data_root, args.temp_root)

    # if model_dir is specified, and it doesn't contain the log file, make it
    log_file = os.path.join(args.model_dir, args.log_file)
    if args.model_dir is not None and not os.path.exists(log_file):
        utils.safe_makedirs(args.model_dir)

    logging.basicConfig(filename=log_file, filemode='a', level=logging.INFO)

    torch.manual_seed(args.seed)
    torch.backends.cudnn.deterministic = not args.nondeterm
    np.random.seed(args.seed)

    if not args.eval_only:
        utils.safe_json_dump(vars(args),
                             os.path.join(args.model_dir, 'args.json'))
        epoch_stats = train(args, jsons)
        utils.safe_json_dump(epoch_stats,
                             os.path.join(args.model_dir, 'epoch_stats.json'))
    if args.eval_only:
        data_root, temp_root = args.data_root, args.temp_root
        test, cpu, seed, cleanup = args.test, args.cpu, args.seed, args.cleanup
        with open(os.path.join(args.model_dir, 'args.json'), 'r') as f:
            json_dict = json.load(f)
        args = argparse.Namespace(**json_dict)
        args.data_root, args.temp_root = data_root, temp_root
        args.test, args.cpu, args.seed, args.cleanup = test, cpu, seed, cleanup

    evaluate(args, jsons)

    if args.cleanup:
        utils.safe_rmtree(args.temp_root)
Example #9
0
    def __init__(self, config):
        super(SoftwareCenterConfig, self).__init__()
        # imported here to avoid cycle
        from utils import safe_makedirs
        safe_makedirs(os.path.dirname(config))
        # we always want this sections, even on fresh installs
        for section in self.SECTIONS:
            self.add_section(section)

        # read the config
        self.configfile = config
        try:
            self.read(self.configfile)
        except Exception as e:
            # don't crash on a corrupted config file
            LOG.warn("Could not read the config file '%s': %s",
                     self.configfile, e)
            pass
Example #10
0
    def __init__(self, config):
        super(SoftwareCenterConfig, self).__init__()
        # imported here to avoid cycle
        from utils import safe_makedirs

        safe_makedirs(os.path.dirname(config))
        # we always want this sections, even on fresh installs
        for section in self.SECTIONS:
            self.add_section(section)

        # read the config
        self.configfile = config
        try:
            self.read(self.configfile)
        except Exception as e:
            # don't crash on a corrupted config file
            LOG.warn("Could not read the config file '%s': %s", self.configfile, e)
            pass
Example #11
0
def surface_normals(image):
    h, w, _ = image.shape
    image = cv2.resize(image, (256, 256))

    temp_dir = os.path.join(os.getcwd(), 'normals_temp')
    safe_makedirs(temp_dir)
    image_path = os.path.join(temp_dir, 'temp_image.png')
    normals_path = os.path.join(temp_dir, 'temp_normals.png')

    # Run Taskonomy command on temporarily saved image
    imsave(image_path, image)
    cmd = 'python taskonomy/taskbank/tools/run_img_task.py --task rgb2sfnorm ' \
          '--img \"' + image_path + '\" --store \"' + normals_path + '\"'
    call(cmd, shell=True, stdout=DEVNULL, stderr=DEVNULL)

    # Load results from file and clean-up temporary images and folders
    sf = cv2.imread(normals_path)
    sf = cv2.cvtColor(sf, cv2.COLOR_BGR2RGB)
    sf = cv2.resize(sf, (w, h))
    safe_deldirs(temp_dir)
    return sf
Example #12
0
        logfilter.add(filter_str)
    # attach or filter
    handler.addFilter(logfilter)


# setup global software-center logging
root = logging.getLogger()
fmt = logging.Formatter(
    "%(asctime)s - %(name)s - %(levelname)s - %(message)s", None)
handler = logging.StreamHandler()
handler.setFormatter(fmt)
root.addHandler(handler)
handler.addFilter(NullFilterThatWarnsAboutRootLoggerUsage())

# create log file
safe_makedirs(SOFTWARE_CENTER_CACHE_DIR)
logfile_path = os.path.join(SOFTWARE_CENTER_CACHE_DIR, "software-center.log")

# try to fix inaccessible s-c directory (#688682)
if not os.access(SOFTWARE_CENTER_CACHE_DIR, os.W_OK):
    logging.warn("found not writable '%s' dir, trying to fix" %
                 SOFTWARE_CENTER_CACHE_DIR)
    # if we have to do more renames, soemthing else is wrong and its
    # ok to crash later to learn about the problem
    for i in range(10):
        target = "%s.%s" % (SOFTWARE_CENTER_CACHE_DIR, i)
        if not os.path.exists(target):
            os.rename(SOFTWARE_CENTER_CACHE_DIR, target)
            break
    safe_makedirs(SOFTWARE_CENTER_CACHE_DIR)
Example #13
0
    def submit(self,
               src,
               dst,
               tag=None,
               is_data=False,
               commands={},
               no_submit=False):
        """Submit postprocessing jobs to HTCondor's DAGMan.

        DAGMan jobs can be retried automatically and should jobs fail, users can
        take advantage of the automatically generated rescue DAG for resubmitting
        only failed jobs.

        For more information, see the DAGMan documentation at
        http://research.cs.wisc.edu/htcondor/manual/latest/2_10DAGMan_Applications.html

        Parameters
        ----------
        src : url
            The XRootD url of the directory containing the ntuples to postprocess.
            Any .root files are automatically located by recursing through subdirectories.
        dst : url
            The XRootD url of the output directory for the postprocessed ntuples.
            The directory will only be created if jobs are submitted.
        tag : str, optional
            The name of the parent directory for the generated job submission
            files. The default is the current timestamp.
        is_data : bool, optional
            Whether the ntuples are data or Monte-Carlo. This determines the
            postprocessing modules in the postprocessing template script.
            The default is False for Monte-Carlo.
        commands : dict, optional
            HTCondor commands to include in the submit description file, in addition to the
            following which are handled automatically:
                * arguments
                * error
                * executable
                * getenv
                * log
                * output
                * queue
                * should_transfer_files
                * transfer_input_files
                * transfer_output_files
                * universe
            The default is no additional commands.
        no_submit : bool, optional
            If True, the job submission files are generated but not submitted
            to the HTCondor scheduler. The default is False.
        """
        # Create the directory tree for the job submission files.
        if not tag:
            tag = time.strftime('%Y%m%d_%H%M%S')
        dagdir = os.path.join(os.getcwd(), 'PostProcessDAGs', tag)
        dag_path = os.path.join(dagdir, 'dag')
        dag_exists = True if os.path.isfile(dag_path) else False
        if not dag_exists:
            logdir = os.path.join(dagdir, 'logs')
            utils.safe_makedirs(logdir)
            # Generate the job submission files.
            context = {
                'timestamp': time.strftime('%a %b %d %H:%M:%S %Z %Y'),
                'environ': os.environ,
                'urls': utils.xrdfs_locate_root_files(src),
                'destination': dst,
                'is_data': is_data,
                'commands': commands,
            }
            self._generate_from_template('dag_input_file', dag_path, context)
            self._generate_from_template('node_submit_description',
                                         os.path.join(dagdir, 'node'), context)
            self._generate_from_template('worker.sh',
                                         os.path.join(dagdir, 'worker.sh'),
                                         context)
            self._generate_from_template(
                'postprocess.py', os.path.join(dagdir, 'postprocess.py'),
                context)
            shutil.copy(os.path.join(TEMPLATE_DIR, 'keep_and_drop.txt'),
                        dagdir)
        # Unless otherwise directed, submit the DAG input file to DAGMan.
        if no_submit:
            if dag_exists:
                print 'HTCondor DAG input file exists but not submitted: {0}'.format(
                    dag_path)
            else:
                print 'HTCondor DAG input file generated but not submitted: {0}'.format(
                    dag_path)
        else:
            utils.xrdfs_makedirs(dst)
            subprocess.check_call([
                'condor_submit_dag', '-usedagdir', '-maxjobs', '250', dag_path
            ])
Example #14
0
        RESULT_DIR = os.environ[args.result_dir[1:]]
    else:
        RESULT_DIR = args.result_dir

    model_path = os.path.join(RESULT_DIR, 'model')
    if (args.data_dir[0] == '$'):
        DATA_DIR = os.environ[args.data_dir[1:]]
    else:
        DATA_DIR = args.data_dir

    # Make directories for this run
    time_string = "cgan"  #time.strftime("%Y-%m-%d-%H-%M-%S")
    #model_path = os.path.join(config.model_path, time_string)
    model_path = os.path.join(RESULT_DIR, time_string, 'model')
    results_path = os.path.join(RESULT_DIR, time_string, 'results')
    utils.safe_makedirs(model_path)
    utils.safe_makedirs(results_path)

    # Initialise logging
    log_path = os.path.join(RESULT_DIR, time_string, 'logs')
    summary_writer = tf.summary.create_file_writer(
        log_path, flush_millis=10000
    )  #tf.contrib.summary.create_file_writer(log_path, flush_millis=10000)
    global_step = tf.compat.v1.train.get_or_create_global_step()

    # Load the dataset
    #(train_images, _), (test_images, _) = tf.keras.datasets.mnist.load_data()
    (train_images, _), (test_images, _) = prepare_data(DATA_DIR)

    # Add noise for condition input
    #train_inputs = artefacts.add_gaussian_noise(train_images, stdev=0.2, data_range=(0, 255)).astype('float32')
Example #15
0
def main(args):
    w, h = args.w, args.h

    frames_subdir = os.path.join(args.output_dir, 'frames')
    inpainted_subdir = os.path.join(args.output_dir, 'inpainted')
    masks_subdir = os.path.join(args.output_dir, 'masks')
    sf_subdir = os.path.join(args.output_dir, 'surface_normals')

    safe_makedirs([
        args.output_dir, frames_subdir, inpainted_subdir, masks_subdir,
        sf_subdir
    ])

    download_dir = 'temp'
    if not args.input_dir:
        dataset = VLOGDataset(fps=args.fps,
                              download_dir=download_dir,
                              start=args.start_video_idx,
                              n=args.n)
    else:
        dataset = DirectoryDataset(args.input_dir, fps=args.fps)

    detector = MaskRCNN(args.confidence_threshold,
                        args.area_threshold,
                        classes=args.classes)
    current = args.start_output_id

    # Initialize loggers
    info = logging.getLogger('info')
    info.setLevel(logging.DEBUG)
    fh = logging.FileHandler('info.log')
    fh.setLevel(logging.DEBUG)
    info.addHandler(fh)

    progress = logging.getLogger('progress')
    progress.setLevel(logging.DEBUG)
    fh = logging.FileHandler('progress.log')
    fh.setLevel(logging.DEBUG)
    progress.addHandler(fh)

    # TODO: log input arguments

    for video_id, frames in dataset:
        if frames:
            progress.info('Processing video ' + str(video_id) + '...')
            with tqdm(total=len(frames)) as t:
                for frame in frames:
                    scores, masks = detector.detect(frame)
                    for score, mask in zip(scores, masks):
                        inpainted, dilated = generative_inpaint(
                            frame, mask, args.inpaint_model_dir, dilate=True)
                        sf = surface_normals(inpainted)
                        imsave(
                            os.path.join(inpainted_subdir,
                                         str(current) + '.png'),
                            resize_pad(inpainted, (h, w)))
                        imsave(
                            os.path.join(masks_subdir,
                                         str(current) + '.png'),
                            resize_pad(mask, (h, w)))
                        imsave(
                            os.path.join(frames_subdir,
                                         str(current) + '.png'),
                            resize_pad(frame, (h, w)))
                        imsave(os.path.join(sf_subdir,
                                            str(current) + '.png'),
                               resize_pad(sf, (h, w)))
                        # imsave(os.path.join(masks_subdir, str(current) + '_dilated.png'), resize_pad(dilated, (h, w)))

                        progress.info("\tSaved item " + str(current) +
                                      " with score " + str(score))
                        current += 1
                    t.update()
    safe_deldirs(download_dir)
        logfilter.add(filter_str)
    # attach or filter
    handler.addFilter(logfilter)


# setup global software-center logging
root = logging.getLogger()
fmt = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s",
                        None)
handler = logging.StreamHandler()
handler.setFormatter(fmt)
root.addHandler(handler)
handler.addFilter(NullFilterThatWarnsAboutRootLoggerUsage())

# create log file
safe_makedirs(SOFTWARE_CENTER_CACHE_DIR)
logfile_path = os.path.join(SOFTWARE_CENTER_CACHE_DIR, "software-center.log")

# try to fix inaccessible s-c directory (#688682)
if not os.access(SOFTWARE_CENTER_CACHE_DIR, os.W_OK):
    logging.warn("found not writable '%s' dir, trying to fix" %
                 SOFTWARE_CENTER_CACHE_DIR)
    # if we have to do more renames, soemthing else is wrong and its
    # ok to crash later to learn about the problem
    for i in range(10):
        target = "%s.%s" % (SOFTWARE_CENTER_CACHE_DIR, i)
        if not os.path.exists(target):
            os.rename(SOFTWARE_CENTER_CACHE_DIR, target)
            break
    safe_makedirs(SOFTWARE_CENTER_CACHE_DIR)