Ejemplo n.º 1
0
    def as_tmp_file(self, url, mode):
        """
        Return temporary representation of a file.

        Args:
            url (str): apyfal.storage URL of the file.
            mode (str): Access mode. 'r' or 'w'.

        Returns:
            str or file-like object: temporary object.
        """
        # Generates randomized temporary filename
        local_path = _os_path.join(self._tmp_dir, str(_uuid()))

        # Gets input file
        if 'r' in mode:
            _srg.copy(url, local_path)

        # Yields local temporary path
        yield local_path

        # Sends output file
        if 'w' in mode:
            _srg.copy(local_path, url)

        # Clears temporary file
        _remove(local_path)
Ejemplo n.º 2
0
def _pyload_arrays(names):
    arrays = []
    for n in names:
        try:
            arrays.append(_aload(n))
        finally:
            _remove(n)
    _rmdir(_path.dirname(names[0]))
    return arrays
Ejemplo n.º 3
0
 def delete(self, recursive=False):
     if self.is_file():
         if isinstance(self._os_level, int):
             _close(self._os_level)
         _remove(str(self))
     elif self.is_dir():
         if recursive:
             _rmtree(str(self))
         else:
             self.rmdir()
Ejemplo n.º 4
0
def _jload_datasets(names):
    datasets = []
    ldr = _get_gateway().jvm.org.eclipse.triquetrum.scisoft.analysis.io.NumPyFileLoader
    for n in names:
        try:
            datasets.append(ldr(n).loadFile().getDataset(0))
        finally:
            _remove(n)
    _rmdir(_path.dirname(names[0]))
    return datasets
Ejemplo n.º 5
0
def _pyload_arrays(names):
    arrays = []
    for n in names:
        try:
            arrays.append(_aload(n))
        finally:
            _remove(n)
    if len(names) > 0:
        _rmdir(_path.dirname(names[0]))
    return arrays
Ejemplo n.º 6
0
def clean_results(*args):
    """

    Removes the filepaths passed as argument

    :param \*args: List of filepaths 

    """
    for fp in args:
        if _path.isfile(fp) and _path.exists(fp):
            _remove(fp)
Ejemplo n.º 7
0
def get_cli_cache(name, recursive=False):
    """
    Get an object from disk cache.

    Args:
        name (str): Cache name.
        recursive (bool): If True, recursively search for cached values
            starting by various "name" prefixes.

    Returns:
        dict or list or None: object, None if object is not cached.
    """
    if not is_cli():
        return None

    # List cached values candidates
    timestamp = _time()
    candidates = {}
    for filename in _listdir(CACHE_DIR):
        path = _join(CACHE_DIR, filename)
        cached_name, expiry = filename.rsplit('_', 1)

        # Remove expired cached files
        if int(expiry) < timestamp:
            try:
                _remove(path)
                continue
            except OSError:  # pragma: no cover
                # Should never raise, May be already removed by another accelpy
                # instance
                continue

        # Memorize candidates cached files
        candidates[cached_name] = path

    if not candidates:
        return

    # Get cached value, or return None
    if recursive:
        names = []
        while name and not name.endswith('|'):
            names.append(name)
            name = name[:-1]
        names.append(name)

    else:
        names = name,

    for hashed_name in (hash_cli_name(name) for name in names):
        try:
            return json_read(candidates[hashed_name])
        except KeyError:
            continue
Ejemplo n.º 8
0
 def run(self):
     try:
         print("removing tosdb/_tosdb.py ...")
         _remove(_OUTPUT_PATH)
     except:
         pass
     try:
         print("removing ./build ...")
         _rmtree(_path_join(_OUR_PATH, 'build'))
     except:
         pass
     super().run()
Ejemplo n.º 9
0
 def run(self):                  
     try:
         print("removing tosdb/_tosdb.py ...")
         _remove(_OUTPUT_PATH)
     except:
         pass        
     try:
         print("removing ./build ...")
         _rmtree( _path_join(_OUR_PATH,'build') )
     except:
         pass              
     super().run()  
Ejemplo n.º 10
0
def change_location(src, tgt, move=False, verbose=True):
    '''
    Copies/moves/deletes locations

    :param src:
        Source location where to copy from
    :param tgt:
        Target location where to copy to

        * To backup `src`, set `tgt` explicitly to ``True``. \
        `tgt` will be set to `src` + '_backup_' + \
        :func:`util.system.get_timestamp` then

    :param move:
        Deletes original location after copy (a.k.a. move)

        * To delete `src` , set `tgt` explicitly to ``False`` \
        and `move` to ``True`` (be careful!!1!)

    :param verbose:
        Show warnings
    '''

    from photon.util.system import shell_notify

    if _path.exists(src):
        if tgt:
            if _path.isfile(src):
                _copy2(src, search_location(
                    tgt, create_in=_path.dirname(tgt), verbose=verbose)
                )
            else:
                for l in _listdir(src):
                    change_location(
                        _path.abspath(_path.join(src, l)),
                        _path.abspath(_path.join(tgt, l))
                    )
        if move:
            if _path.isdir(src) and not _path.islink(src):
                _rmtree(src)
            else:
                _remove(src)
        if verbose:
            shell_notify(
                '%s location' % (
                    'deleted'
                    if not tgt and move else
                    'moved'
                    if move else
                    'copied'
                ),
                more=dict(src=src, tgt=tgt)
            )
Ejemplo n.º 11
0
def _jload_datasets(names):
    num = len(names)
    gw = get_gateway()
    datasets = gw.new_array(gw.jvm.org.eclipse.january.dataset.Dataset, num)
    ldr = gw.jvm.uk.ac.diamond.scisoft.analysis.io.NumPyFileLoader
    for i in range(num):
        n = names[i]
        try:
            datasets[i] = ldr(n).loadFile().getDataset(0)
        finally:
            _remove(n)
    if num > 0:
        _rmdir(_path.dirname(names[0]))
    return datasets
Ejemplo n.º 12
0
def _jload_datasets(names):
    num = len(names)
    gw = get_gateway()
    datasets = gw.new_array(gw.jvm.org.eclipse.january.dataset.Dataset, num)
    ldr = gw.jvm.uk.ac.diamond.scisoft.analysis.io.NumPyFileLoader
    for i in range(num):
        n = names[i]
        try:
            datasets[i] = ldr(n).loadFile().getDataset(0)
        finally:
            _remove(n)
    if num > 0:
        _rmdir(_path.dirname(names[0]))
    return datasets
Ejemplo n.º 13
0
 def delete(self):
     if self.path.exists():
         _close(self._os_level)
         _remove(str(self.path))
 def os_remove(path):
   return os._remove(longpathify(uni(path)))
Ejemplo n.º 15
0
 def os_remove(path):
     return os._remove(longpathify(uni(path)))
Ejemplo n.º 16
0
def rm(path):
    if exist(path):
        _remove(path)
Ejemplo n.º 17
0
    def _run_executable(self,
                        mode,
                        input_file=None,
                        output_file=None,
                        input_json=None,
                        output_json=None,
                        parameters=None,
                        extra_args=None):
        """
        Run accelerator executable.

        Args:
            mode (str): Accelerator mode ("0": start, "1": process, "2": stop)
            input_file (str): Input data file path.
            output_file (str): Output data file path.
            input_json (str): Input JSON file path.
            output_json: (str): Output JSON file path.
            parameters (dict): Parameters dict.
            extra_args (list of str): Extra accelerator arguments.

        Returns:
            dict or None: Content of output_json if any.
        """
        # Command base
        command = ['sudo', _cfg.ACCELERATOR_EXECUTABLE, '-m', mode]

        # Adds extra command line arguments
        if extra_args:
            command.extend(extra_args)

        # Input file
        if input_file:
            command += ['-i', input_file]

        # Output file
        if output_file:
            command += ['-o', output_file]

        # Input JSON file
        if input_json and parameters:

            # Convert "reset" to int
            parameters['app']['reset'] = int(parameters['app'].get(
                'reset', False))

            # Write file
            input_json = _join(self._tmp_dir, input_json)
            with open(input_json, 'wt') as json_input_file:
                _json.dump(parameters, json_input_file)
            command += ['-j', input_json]

        # Output JSON file
        if output_json:
            output_json = _join(self._tmp_dir, output_json)
            command += ['-p', output_json]

        # Runs command
        with self._accelerator_lock:
            _call(command, check_file=output_json)

        # Cleanup input JSON file
        if input_json:
            _remove(input_json)

        # Gets result from output JSON file
        if output_json:
            with open(output_json, 'rt') as json_output_file:
                response = _json.load(json_output_file)

            # Cleanup output JSON file
            _remove(output_json)
            return response