Ejemplo n.º 1
0
 def mount_fs(self,
              dst_path: str,
              fs_url: str = None,
              owner_uid: Optional[int] = 0,
              group_gid: Optional[int] = 0,
              perms: Optional[Union[Permissions, int]] = 0o755) -> subfs.SubFS:
     """
     To be called to mount individual filesystems.
     :param fs_url: Location/URL for the file system that is to be mounted.
     :param dst_path: Place in the Conpot's file system where the files would be placed. This should be relative to
     FS root.
     :param owner_uid: The owner `user` **UID** of the directory and the sub directory. Default is root/
     :param group_gid: The group 'group` to which the directory beings. Defaults to root.
     :param perms: Permission UMASK
     """
     path = self.norm_path(dst_path)
     if self.exists(path) and self.isdir(path):
         if not fs_url:
             new_dir = self.create_jail(path)
         else:
             temp_fs = open_fs(fs_url=fs_url)
             with temp_fs.lock():
                     new_dir = self.opendir(self.norm_path(dst_path), factory=SubAbstractFS)
                     mirror.mirror(src_fs=temp_fs, dst_fs=new_dir)
                     self._cache.update({path: info for path, info in self.walk.info(namespaces=['basic', 'access',
                                                                                                 'details',
                                                                                                 'stat'])})
             del temp_fs  # delete the instance since no longer required
         new_dir.default_uid, new_dir.default_gid = owner_uid, group_gid
         new_dir.chown('/', uid=owner_uid, gid=group_gid, recursive=True)
         new_dir.chmod('/', mode=perms, recursive=True)
         return new_dir
     else:
         raise fs.errors.DirectoryExpected('{} path does not exist'.format(path))
Ejemplo n.º 2
0
 def setUpClass(cls):
     try:
         cls.ebifs = HTTPDownloader(
             fs.open_fs(
                 "ftp://ftp.ebi.ac.uk/pub/databases/metabolights/studies/public/"
             )
         )
     except fs.errors.CreateFailed:
         raise unittest.SkipTest("cannot connect to the EBI FTP")
Ejemplo n.º 3
0
 def __init__(self, data_fs_path=None):
     self._conpot_vfs = dict()   # dictionary to keep all the protocol vfs instances, maintain easy access for
     # individual mounted protocols with paths
     if data_fs_path is None:
         try:
             self.data_fs = open_fs(os.path.join('/'.join(conpot.__file__.split('/')[:-1]), 'tests', 'data',
                                                 'data_temp_fs'))
         except fs.errors.FSError:
             logger.exception('Unable to create persistent storage for Conpot. Exiting')
             sys.exit(3)
     else:
         try:
             assert data_fs_path and isinstance(data_fs_path, str)
             self.data_fs = open_fs(data_fs_path)  # Specify the place where you would place the uploads
         except AssertionError:
             logger.exception('Incorrect FS url specified. Please check documentation for more details.')
             sys.exit(3)
         except fs.errors.CreateFailed:
             logger.exception('Unexpected error occurred while creating Conpot FS.')
             sys.exit(3)
     self.protocol_fs = None
Ejemplo n.º 4
0
Archivo: api.py Proyecto: edx/edx-val
def create_transcript_objects(xml, edx_video_id, resource_fs, static_dir, external_transcripts):
    """
    Create VideoTranscript objects.

    Arguments:
        xml (Element): lxml Element object.
        edx_video_id (str): Video id of the video.
        resource_fs (OSFS): Import file system.
        static_dir (str): The Directory to retrieve transcript file.
        external_transcripts (dict): A dict containing the list of names of the external transcripts.
            Example:
            {
                'en': ['The_Flash.srt', 'Harry_Potter.srt'],
                'es': ['Green_Arrow.srt']
            }
    """
    # File system should not start from /draft directory.
    with open_fs(resource_fs.root_path.split('/drafts')[0]) as file_system:
        # First import VAL transcripts.
        for transcript in xml.findall('.//transcripts/transcript'):
            try:
                file_format = transcript.attrib['file_format']
                language_code = transcript.attrib['language_code']
                transcript_file_name = u'{edx_video_id}-{language_code}.{file_format}'.format(
                    edx_video_id=edx_video_id,
                    language_code=language_code,
                    file_format=file_format
                )

                import_transcript_from_fs(
                    edx_video_id=edx_video_id,
                    language_code=transcript.attrib['language_code'],
                    file_name=transcript_file_name,
                    provider=transcript.attrib['provider'],
                    resource_fs=file_system,
                    static_dir=static_dir
                )
            except KeyError:
                logger.warn("VAL: Required attributes are missing from xml, xml=[%s]", etree.tostring(transcript).strip())

        # This won't overwrite transcript for a language which is already present for the video.
        for language_code, transcript_file_names in six.iteritems(external_transcripts):
            for transcript_file_name in transcript_file_names:
                import_transcript_from_fs(
                    edx_video_id=edx_video_id,
                    language_code=language_code,
                    file_name=transcript_file_name,
                    provider=TranscriptProviderType.CUSTOM,
                    resource_fs=file_system,
                    static_dir=static_dir
                )
Ejemplo n.º 5
0
 def __init__(self,
              src_path: str,
              create_mode: int = 0o777,      # Default file system permissions.
              temp_dir: Union[str, None]=None,
              identifier: Optional[str]='__conpot__',
              auto_clean: Optional[bool]=True,
              ignore_clean_errors: Optional[bool]=True) -> None:
     self._cwd = self.getcwd()  # keep track of the current working directory
     self._cache = {}   # Storing all cache of the file system
     self.identifier = identifier.replace('/', '-')
     self._auto_clean = auto_clean
     self._ignore_clean_errors = ignore_clean_errors
     self.temp_dir = temp_dir
     self._cleaned = False
     self.built_cache = False
     # Create our file system
     self._temp_dir = tempfile.mkdtemp(
         prefix=(self.identifier or "ConpotTempFS"),
         dir=self.temp_dir
     )
     # open various filesystems that would be used by Conpot
     try:
         self.vfs = open_fs(self._temp_dir)
         super(AbstractFS, self).__init__(self.vfs)
     except fs.errors.FSError as fs_err:
         logger.exception('File System exception occurred! {}'.format(fs_err))
     # Copy all files from src_path into our file system
     logger.info('Initializing Virtual File System at {}. Source specified : {}\n Please wait while the '
                 'system copies all specified files'.format(self._temp_dir, src_path))
     self.utime = self.settimes  # utime maps to settimes
     # keep records related to users and groups
     self.default_uid = 0
     self.default_gid = 0
     self.default_perms = create_mode
     self._users = {
         0: {'user': '******'}
     }
     self._grps = {
         0: {'group': 'root'}
     }
     # simple dictionary linking users to groups ->
     self._user_grps = {
         0: {0}      # --> gid: set(uids)
     }
     self._initialize_fs(src_path=src_path)
     # fixme: kind of hack-ish. Find the correct way of doing this.
     self._wrap_fs._meta['supports_rename'] = False
Ejemplo n.º 6
0
def file_from_zip(zip_file_path, filename, mode='r'):
    # eg rootpath =
    # FastQC.out/15-02380-CE11-T13-L1_AACCAG_L001_R1_001_fastqc.zip
    # ie   fastq_filename + '_fastqc.zip'

    # fs.open_fs magic detects a filesystem type if we give it
    # a URI-like string, eg zip://foo/bla/file.zip
    # (eg https://commons.apache.org/proper/commons-vfs/filesystems.html)
    # So we munge the path to a zip file to become a compatible URI
    # (an http, ftp, webdav url could also be used)
    if splitext(zip_file_path)[1] == '.zip':
        zip_file_path = 'zip://' + zip_file_path

    with open_fs(zip_file_path) as vfs:
        for fn in vfs.walk.files():
            if os.path.basename(fn) == filename:
                return vfs.open(fn, mode)
Ejemplo n.º 7
0
 def _initialize_fs(self, src_path: str) -> None:
         """
         Copies all data into Conpot's created fs folder and builds up the cache.
         :param src_path: FS URLS
         """
         # copy all contents from the source path the filesystem.
         src_fs = open_fs(src_path)
         logger.debug('Building up file system: copying contents from the source path {}'.format(src_path))
         with src_fs.lock():
             mirror.mirror(src_fs=src_fs, dst_fs=self.vfs)
             self._cache.update({path: info for path, info in self.walk.info(namespaces=['basic', 'access',
                                                                                         'details', 'stat'])})
             self._cache['/'] = self._wrap_fs.getinfo('/', namespaces=['basic', 'access', 'details', 'stat', 'link'])
             self.chown('/', self.default_uid, self.default_gid, recursive=True)
             self.chmod('/', self.default_perms, recursive=True)
             self.built_cache = True   # FS has been built. Now all info must be accessed from cache.
             src_fs.close()
         del src_fs
Ejemplo n.º 8
0
 def test_empty_mirror(self):
     m1 = open_fs("mem://")
     m2 = open_fs("mem://")
     mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
     self.assert_compare_fs(m1, m2)
Ejemplo n.º 9
0
 def setUp(self):
     self.destfs = fs.open_fs("temp://")
     self.tmpdir = self.destfs.getsyspath("/")
Ejemplo n.º 10
0
#!/usr/bin/python
import fs  #libreria pyfilesystem con la que se hace las operaciones
import sys  #librerias para elegir las opciones
import getopt

home_fs = fs.open_fs('~/')  #directorio por defecto

HELP = ('uso python <comando> <nombre del archivo>'
        'options:\n'
        '         -l --listar   lista el directorio actual\n'
        '         -c --crear   crea un archivo\n'
        '         -s --suprime suprime el archivo  \n'
        '         -m --muestra muestra el contenido del archivo\n'
        '         -e --escribe en el archivo    help\n'
        '         -a --agrega  agrega al final del archivo \n'
        '         -h --help    help\n')


def get_command_line_options():
    """Funcion para elegir las opciones del programa"""

    argv = sys.argv[1:]
    try:
        opts, args = getopt.getopt(argv, 'lhcsmea:', [
            'listar', 'help', 'crear', 'suprime', 'muestra', 'escribe',
            'agrega'
        ])
    except getopt.GetoptError:
        print(HELP)
        sys.exit(2)
    for opt, arg in opts:
Ejemplo n.º 11
0
def main(argv=None, stream=None):
    """Run from the command line interface.

    Arguments:
        argv (list): The positional arguments to read. Defaults to
            `sys.argv` to use CLI arguments.
        stream (~io.IOBase): A file where to write error messages.
            Leave to `None` to use the `~coloredlogs.StandardErrorHandler`
            for logs, and `sys.stderr` for error messages.

    Returns:
        int: An error code, or 0 if the program executed successfully.
    """

    _print = functools.partial(print, file=stream or sys.stderr)

    # Parse command line arguments
    try:
        args = docopt.docopt(HELP,
                             argv,
                             version='instalooter {}'.format(__version__))
    except docopt.DocoptExit as de:
        _print(de)
        return 1

    # Print usage and exit if required (docopt does not do this !)
    if args['--usage']:
        _print(USAGE)
        return 0

    # Set the logger up with the requested logging level
    level = "ERROR" if args['--quiet'] else args.get("--loglevel", "INFO")
    coloredlogs.install(level=int(level) if level.isdigit() else level,
                        stream=stream,
                        logger=logger)

    # Check the requested logging level
    if args['-W'] not in WARNING_ACTIONS:
        _print("Unknown warning action:", args['-W'])
        _print("    available actions:", ', '.join(WARNING_ACTIONS))
        return 1

    with warnings.catch_warnings():
        warnings.simplefilter(args['-W'])

        try:
            # Run in batch mode
            if args['batch']:
                with open(args['<batch_file>']) as batch_file:
                    batch_runner = BatchRunner(batch_file, args)
                batch_runner.run_all()
                return 0

            # Login if requested
            if args['login']:
                try:
                    if not args['--username']:
                        args['--username'] = six.moves.input('Username: '******'logout']:
                if InstaLooter._cachefs.exists(InstaLooter._COOKIE_FILE):
                    InstaLooter._logout()
                    logger.log(logutils.SUCCESS, 'Logged out.')
                else:
                    warnings.warn('Cookie file not found.')
                return 0

            # Normal download mode:
            if args['user']:
                looter_cls = ProfileLooter
                target = args['<profile>']
            elif args['hashtag']:
                looter_cls = HashtagLooter
                target = args['<hashtag>']
            elif args['post']:
                looter_cls = PostLooter
                target = args['<post_token>']
            else:
                raise NotImplementedError("TODO")

            # Instantiate the looter
            looter = looter_cls(
                target,
                add_metadata=args['--add-metadata'],
                get_videos=args['--get-videos'],
                videos_only=args['--videos-only'],
                jobs=int(args['--jobs']) if args['--jobs'] is not None else 16,
                template=args['--template'],
                dump_json=args['--dump-json'],
                dump_only=args['--dump-only'],
                extended_dump=args['--extended-dump'])

            # Attempt to login and extract the timeframe
            try:
                if args['--username']:
                    login(args)
                if args['--time']:
                    args['--time'] = get_times_from_cli(args['--time'])
                if args['--num-to-dl']:
                    args['--num-to-dl'] = int(args['--num-to-dl'])
            except ValueError as ve:
                _print("invalid format for --time parameter:", args["--time"])
                _print("    (format is [D]:[D] where D is an ISO 8601 date)")
                return 1

            logger.log(logutils.DEBUG, "Opening destination filesystem")
            dest_url = args.get('<directory>') or os.getcwd()
            dest_fs = fs.open_fs(dest_url, create=True)

            logger.log(logutils.NOTICE,
                       "Starting download of `{}`".format(target))
            n = looter.download(
                destination=dest_fs,
                media_count=args['--num-to-dl'],
                timeframe=args['--time'],
                new_only=args['--new'],
                pgpbar_cls=None if args['--quiet'] else TqdmProgressBar,
                dlpbar_cls=None if args['--quiet'] else TqdmProgressBar)
            if n > 1:
                logger.log(logutils.SUCCESS, "Downloaded {} posts.".format(n))
            elif n == 1:
                logger.log(logutils.SUCCESS, "Downloaded {} post.".format(n))

        except (Exception, KeyboardInterrupt) as e:
            from .threadutils import threads_force_join, threads_count
            # Show error traceback if any
            if not isinstance(e, KeyboardInterrupt):
                logger.log(logutils.CRITICAL, e)
                if args["--traceback"]:
                    traceback.print_exc()
            else:
                logger.log(logutils.CRITICAL, "Interrupted")
            # Close remaining threads spawned by InstaLooter.download
            count = threads_count()
            if count:
                logger.log(logutils.NOTICE,
                           "Terminating {} remaining workers...".format(count))
                threads_force_join()
            # Return the error number if any
            errno = e.errno if hasattr(e, "errno") else None
            return errno if errno is not None else 1

        else:
            return 0

        finally:
            logger.log(logutils.DEBUG, "Closing destination filesystem")
            try:
                dest_fs.close()
            except Exception:
                pass
Ejemplo n.º 12
0
 def teardown(self):
     if fs.open_fs(MSUI_CONFIG_PATH).exists("msui_settings.json"):
         fs.open_fs(MSUI_CONFIG_PATH).remove("msui_settings.json")
     config_file = os.path.join(self.sample_path,
                                'empty_msui_settings.json.sample')
     read_config_file(config_file)
Ejemplo n.º 13
0
import argparse

from fs import open_fs

parser = argparse.ArgumentParser()
# nargs allows this positional argument to be optional
parser.add_argument('dir', type=str, nargs='?', default='~/Downloads/')
args = parser.parse_args()

folder = open_fs(args.dir)
with folder:
    for path in folder.walk.files():
        print(path)
Ejemplo n.º 14
0
def svg_to_pdf(fname_or_drawing=None,
               text=None,
               data=None,
               file=None,
               fit_drawing=False,
               latex_width=None,
               out_name=None,
               only_final=True,
               config=DocumentConfig('standalone'),
               **pdf_args):
    '''Requires the inkscape command line tool.'''
    if ((fname_or_drawing is not None) + (text is not None) +
        (data is not None) + (file is not None)) != 1:
        raise TypeError(
            'Specify exactly one of fname_or_drawing, text, data, file, or '
            'fname.')

    fname = None
    if fname_or_drawing:
        if isinstance(fname_or_drawing, (str, bytes, Path)):
            fname = fname_or_drawing
        else:
            text = fname_or_drawing.asSvg()

    proj = LatexProject()
    proj.add_file('image.svg', text=text, data=data, file=file, fname=fname)

    if latex_width is None:
        width_str = ''
    else:
        width_str = r'\def\svgwidth' + f'{{{latex_width}}}\n'
    content = BasicContent(width_str + r'\input{image_svg-tex.pdf_tex}',
                           svg_packages, svg_commands)
    doc = content.as_document('main.tex', config=config)
    proj.add_file(doc)

    with tempfile.TemporaryDirectory() as tmp_dir:
        tmp_fs = fs.open_fs(tmp_dir, writeable=False)
        fs.copy.copy_file(proj.proj_fs, 'image.svg', tmp_fs, 'image.svg')
        if fit_drawing:
            options = ('-z', '-D', '--export-latex', '--export-type=pdf')
        else:
            options = ('-z', '--export-latex', '--export-type=pdf')
        _run_inkscape(proj, 'image.svg', tmp_dir, options=options)
        tmp_fs.remove('image.svg')
        proj.proj_fs.remove('image.svg')

        r = proj.compile_pdf(
            options=[
                '-shell-escape', '-halt-on-error', '-file-line-error',
                '-interaction', 'nonstopmode'
            ],
            tmp_dir=tmp_dir,
            #inkscape_list=['image.svg'],
            **pdf_args)

        if out_name is not None:
            if out_name.endswith('.svg') or out_name.endswith('.pdf'):
                out_name = out_name[:-4]

            r.save(out_name + '.pdf')

            def save_intermediate(fname, ext):
                out_fname = out_name + ext
                data = None
                if tmp_fs.exists(fname):
                    fs.copy.copy_file(tmp_fs, fname, '.', out_fname)

            if not only_final:
                save_intermediate('image_svg-tex.pdf_tex', '_svg-tex.pdf_tex')
                save_intermediate('image_svg-tex.pdf', '_svg-tex.pdf')
    return r
Ejemplo n.º 15
0
 def _cachefs(cls):
     """~fs.base.FS: the cache filesystem.
     """
     url = "usercache://{}:{}:{}".format(__appname__, __author__, __version__)
     return fs.open_fs(url, create=True)
Ejemplo n.º 16
0
                                        -----------------------------
                                            Program by xxxxx
                                            Core by tensorflow  '''),
                                     formatter_class=argparse.RawDescriptionHelpFormatter,
                                     epilog="copyright: (c) 2017-2018 svaxm")

    parser.add_argument('image', type=str, help='Assign the image path.', default="./lp.png")
    parser.add_argument("-m", '--mode', type=str, help='change mode: enum values[PRO, ALG]', default="PRO", dest="mode")
    parser.add_argument('-n', "--network", dest="url_from_network", action='append', default=[],
                        help='Add image url to a list')
    args = parser.parse_args()

    # alg_core = TEAlg(pb_path_1="model/frozen_model.pb")
    alg_core = TEAlg(pb_path_1="../model/frozen_model.pb")

    root_fs = open_fs(path_root)
    # for image_path in root_fs.walk.files(filter=["*.jpg"]):
    #     # print(image_path)
    #
    #     image = path_root + image_path
        # print(image)

    result = None

    if args.url_from_network:
        import re
        try:
            import urllib.request as urllib2
        except:
            import urllib2
        from io import BytesIO
Ejemplo n.º 17
0
def OpenFsFromPaths(paths: typing.List[str]):
    return [fs.open_fs(path) for path in paths]
Ejemplo n.º 18
0
def create_mss_settings_file(content):
    with fs.open_fs(MSS_CONFIG_PATH) as file_dir:
        file_dir.writetext("mss_settings.json", content)
Ejemplo n.º 19
0
def create_user_workspace(user_workspace_path):
    """Create user workspace directory."""
    reana_fs = fs.open_fs(app.config["SHARED_VOLUME_PATH"])
    if not reana_fs.exists(user_workspace_path):
        reana_fs.makedirs(user_workspace_path)
Ejemplo n.º 20
0
    def test_readonly(self):
        mem_fs = open_fs('mem://')
        fs = wrap.read_only(mem_fs)

        with self.assertRaises(errors.ResourceReadOnly):
            fs.open('foo', 'w')

        with self.assertRaises(errors.ResourceReadOnly):
            fs.appendtext('foo', 'bar')

        with self.assertRaises(errors.ResourceReadOnly):
            fs.appendbytes('foo', b'bar')

        with self.assertRaises(errors.ResourceReadOnly):
            fs.makedir('foo')

        with self.assertRaises(errors.ResourceReadOnly):
            fs.move('foo', 'bar')

        with self.assertRaises(errors.ResourceReadOnly):
            fs.openbin('foo', 'w')

        with self.assertRaises(errors.ResourceReadOnly):
            fs.remove('foo')

        with self.assertRaises(errors.ResourceReadOnly):
            fs.removedir('foo')

        with self.assertRaises(errors.ResourceReadOnly):
            fs.setinfo('foo', {})

        with self.assertRaises(errors.ResourceReadOnly):
            fs.settimes('foo', {})

        with self.assertRaises(errors.ResourceReadOnly):
            fs.copy('foo', 'bar')

        with self.assertRaises(errors.ResourceReadOnly):
            fs.create('foo')

        with self.assertRaises(errors.ResourceReadOnly):
            fs.settext('foo', 'bar')

        with self.assertRaises(errors.ResourceReadOnly):
            fs.setbytes('foo', b'bar')

        with self.assertRaises(errors.ResourceReadOnly):
            fs.makedirs('foo/bar')

        with self.assertRaises(errors.ResourceReadOnly):
            fs.touch('foo')

        with self.assertRaises(errors.ResourceReadOnly):
            fs.setbinfile('foo', None)

        with self.assertRaises(errors.ResourceReadOnly):
            fs.setfile('foo', None)

        self.assertTrue(mem_fs.isempty('/'))
        mem_fs.setbytes('file', b'read me')
        with fs.openbin('file') as read_file:
            self.assertEqual(read_file.read(), b'read me')

        with fs.open('file', 'rb') as read_file:
            self.assertEqual(read_file.read(), b'read me')
Ejemplo n.º 21
0
 def setUp(self):
     self.fs_tmp = fs.open_fs("temp://")
Ejemplo n.º 22
0
 def setUp(self):
     self.tmpfs = fs.open_fs("temp://")
Ejemplo n.º 23
0
            new_path = path + '/' + obj
            buildDB(root, path=new_path, orig_path=orig_path)

        else:
            print('file found:', obj)

            full_path = orig_path + path + '/' + obj
            db.append(full_path)


class MainHandler(tornado.web.RequestHandler):
    async def get(self):
        self.render('index.html')


def makeApp():
    return tornado.web.Application([(r'/', MainHandler)])


if __name__ == '__main__':
    if len(sys.argv) > 2:
        if sys.argv[1] == '-b':  # Build
            root = open_fs(sys.argv[2])

            buildDB(root, orig_path=sys.argv[2])
            pickle.dump(db, open('db', 'wb'))

    # app = makeApp()
    # app.listen(3000)
    # tornado.ioloop.IOLoop.current().start()
Ejemplo n.º 24
0
 def test_open_wrongid(self):
     with self.assertRaises(ValueError):
         y = fs.open_fs('youtube://12345')
Ejemplo n.º 25
0
def mock():
    return open_fs(openers.MEMORY)
Ejemplo n.º 26
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

from __future__ import (division, absolute_import, print_function,
                        unicode_literals)

import os
import sys

from fs import open_fs

file_path = sys.argv[1]
fs_url = sys.argv[2]

filename = os.path.basename(file_path)

with open_fs(fs_url) as fs:
    if fs.exists(filename):
        print("destination exists! aborting.")
    else:
        with open(file_path, "rb") as bin_file:
            fs.upload(filename, bin_file)
print("upload successful!")
Ejemplo n.º 27
0
def fs(opener):
    return open_fs(opener)
Ejemplo n.º 28
0
 def open_fs(cls, *args, **kwargs):
     return cls(open_fs(*args, **kwargs))
Ejemplo n.º 29
0
def os(path='.'):
    # We will use the current dir in the operating system fs as default value
    return open_fs(openers.OPERATING_SYSTEM + path)
Ejemplo n.º 30
0
  def poll(self):
    """Check for new or updated visualizations.
    Since the main use case involves remote files mounted with SSHFS/NFS, polling is
    the only viable mechanism to detect changes. This is further argued here:
    https://github.com/samuelcolvin/watchgod#why-no-inotify--kqueue--fsevent--winapi-support"""

    if not self.base_folder: return

    if not self.fs:  # create a file system object for the visualizations folder
      try:
        self.fs = open_fs(self.base_folder + '/visualizations')
      except FSError:
        # directory doesn't exist yet, try again later
        logger.debug(f"Vis loader thread: no visualizations directory: {self.base_folder}")
        self.timer.start(self.poll_time * 1000)
        return

    # find files in the visualizations directory
    if self.files_iterator is None:
      self.files_iterator = self.fs.filterdir('.', files=['*.pth'], namespaces=['details'])

    if self.retry_file:  # try the same file again if required
      logger.debug(f"Vis loader thread: Retrying file {self.retry_file}")
      entry = self.retry_file
      self.retry_file = None
    else:
      # get next file
      try:
        entry = next(self.files_iterator)
      except StopIteration:
        entry = None

    # get pytorch pickle files
    if entry:
      name = entry.name[:-4]  # remove extension
      new_size = entry.size

      if new_size != self.known_file_sizes.get(name):
        logger.debug(f"Vis loader thread: File changed, reloading: {name}")

        # new file or file size changed
        self.known_file_sizes[name] = new_size

        # if the source code hasn't been loaded yet, read it
        if name not in self.source_code:
          try:
            self.source_code[name] = self.fs.readtext(name + '.py')
          except FSError:  # not found, must be a built-in (like tshow)
            self.source_code[name] = None

        # load the file (asynchronously with the main thread)
        try:
          with self.fs.open(name + '.pth', mode='rb') as file:
            data = load(file)

          if not isinstance(data, dict) or 'func' not in data:
            raise OSError("Attempted to load a visualization saved with a different protocol version (saving with PyTorch and loading without it is not supported, and vice-versa).")

          # send a signal with the results to the main thread
          self.visualization_ready.emit(name, data, self.source_code[name], self.base_folder)

        except Exception as err:
          # ignore errors about incomplete data, since file may
          # still be written to; otherwise log the error.
          if isinstance(err, RuntimeError) and 'storage has wrong size' in str(err):
            self.retry_file = entry  # try this file again later
          else:
            logger.exception(f"Error loading visualization data from {self.base_folder}/{name}.pth")
      else:
        logger.debug(f"Vis loader thread: File did not change: {name}")
    
    # wait a bit before checking next file, or a longer time if finished all files.
    # if the experiment is done, don't check again at the end.
    if entry:
      self.timer.start(100)
    elif not self.exp_done:
      self.files_iterator = None  # check directory contents from scratch next time
      self.timer.start(self.poll_time * 1000)
    else:
      logger.debug(f"Vis loader thread: Experiment done, not reloading any visualizations.")
Ejemplo n.º 31
0
from collections import defaultdict
import hashlib
import sys

from fs import open_fs


def get_hash(getbin_file):
    """Get the md5 hash of a file."""
    getfile_hash = hashlib.md5()
    while True:
        chunk = getbin_file.read(1024 * 1024)
        if not chunk:
            break
        getfile_hash.update(chunk)
    return getfile_hash.hexdigest()


hashes = defaultdict(list)
with open_fs(sys.argv[1]) as fs:
    for path in fs.walk.files():
        with fs.open(path, "rb") as bin_file:
            file_hash = get_hash(bin_file)
        hashes[file_hash].append(path)

for paths in hashes.values():
    if len(paths) > 1:
        for path in paths:
            print(f" {path}")
        print()
Ejemplo n.º 32
0
 def setUp(self):
     self.destfs = fs.open_fs("temp://")
     self.tmpdir = self.destfs.getsyspath("/")
     warnings._showwarning = warnings.showwarning
Ejemplo n.º 33
0
 def test_open_plist_by_id(self):
     x = fs.open_fs('youtube://PLYlZ5VtcfgitfPyMGkZsYkhLm-eOZeQpY')
     x.close()
Ejemplo n.º 34
0
def get_sample_project_mapping(basepath,
                               samplesheet=None,
                               suffix='.fastq.gz',
                               absolute_paths=False,
                               catch_undetermined=True):
    """
    Given a path containing fastq.gz files, possibily nested in Project/Sample
    directories, return a data structure mapping fastq-samples to projects.

    TODO: The SampleSheet.csv may be used as a hint but is not required.

    :param basepath: Path to directory tree of fastq.gz files - eg, bcl2fastq
                     output directory
    :type basepath: str
    :param catch_undetermined: Map any FASTQ files with 'Undetermined' in the
                               name to a project 'Undetermined_indices'
    :type catch_undetermined: bool
    :return: Dictionary lists, {project_id : [relative fastq.gz paths]}
    :rtype: OrderedDict
    """

    if not os.path.exists(basepath):
        raise IOError('Path %s does not exist !' % basepath)

    fq_files = []
    with open_fs(basepath) as vfs:
        for fn in vfs.walk.files():
            if fn.endswith(suffix):
                fq_files.append(fn.lstrip('/').lstrip('\\'))

    fq_files = sorted(fq_files)

    project_mapping = OrderedDict()
    for fqpath in fq_files:
        project = ''
        fqfile = fqpath
        parts = Path(fqpath).parts
        if len(parts) == 3:
            project, sample_id, fqfile = map(str, parts)
        if len(parts) == 2:
            project, fqfile = map(str, parts)
        if len(parts) == 1:
            fqfile = str(parts[0])

        if catch_undetermined and 'Undetermined_' in fqfile:
            project = u'Undetermined_indices'

        # TODO: we currently don't deal with Project_ prefixes, really
        #       the project ID doesn't include Project_. If we strip
        #       this here, maybe we need to include the project directory
        #       in the fastq paths so we can know the path and project id
        #       - will require fixes to downstream code that
        #       does join(bcl2fastq_output_dir, project_id, fastq_file)

        # TODO: also incorporate sample_id in this datastructure
        if project not in project_mapping:
            project_mapping[project] = []
        if absolute_paths:
            fqpath = join(basepath, fqpath)
        project_mapping[project].append(fqpath)

    # TODO: Use the SampleSheet.csv to validate or hint
    # TODO: Also assign sample_id, sample_name, lane, read, etc
    #       we could use parse_sample_info_from_filename for this,
    #       and/or use the FASTQ header(s)

    return project_mapping
Ejemplo n.º 35
0
class BakeryConfig(AppConfig):
    name = 'bakery'
    verbose_name = "Bakery"
    filesystem_name = getattr(settings, 'BAKERY_FILESYSTEM', "osfs:///")
    filesystem = fs.open_fs(filesystem_name)
Ejemplo n.º 36
0
 def setUpClass(cls):
     cls.fs_project = fs.open_fs(os.path.join(__file__, pardir, pardir))
     cls.fs_examples = cls.fs_project.opendir("examples")
     cls.dir_config = cls.fs_project.getsyspath("static/isa-config")
 def __init__(self):
     yt_fs = fs.open_fs('dlna:///')
     dlna_fs = fs.dlna.DLNAFS(timeout=10)
Ejemplo n.º 38
0
from fs import open_fs
from PIL import Image

from . import database, mojang
from .validate import regex, noneof

app = Flask(__name__)

db_path = os.getenv('DATABASE_URL', 'sqlite://hdskins.sqlite')
textures_fs = os.getenv('TEXTURES_FS', 'file://.')
root_url = os.getenv('ROOT_URL', 'http://127.0.0.1')
offline_mode = bool(os.getenv('OFFLINE', False))

blacklist = ["cape"]

upload_fs = open_fs(textures_fs, cwd='textures', writeable=True)

# Customize the FS upload args if they exist. Mostly for S3
if hasattr(upload_fs, 'upload_args'):
    upload_fs.upload_args = {
        'ContentType': 'image/png',
        'ACL': 'public-read'  # S3: Make public
    }


def open_database():
    return database.Database(db_path)


def authorize(func):
    @functools.wraps(func)
Ejemplo n.º 39
0
 def test_open_not_exist(self):
     with self.assertRaises(IOError):
         y = fs.open_fs(
             'youtube://https://www.youtube.com/watch?v=cpPG1bKHYKc')
Ejemplo n.º 40
0
 def test_open_file_by_id(self):
     y = fs.open_fs('youtube://cpPG0bKHYKc')
     y.close()
Ejemplo n.º 41
0
import os
from fs import open_fs
import gnupg
import logging
from . import KeyManager, Encrypt, Decrypt

home_fs = open_fs('.')

logger = logging.getLogger('init')
logger.setLevel(logging.INFO)

ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)

formatter = logging.Formatter(
    '%(asctime)s - %(name)s - %(levelname)s - %(message)s')

ch.setFormatter(formatter)

logger.addHandler(ch)

if not os.path.exists('signatures/'):
    home_fs.makedir(u'signatures')
    logger.info('Created signatures directory.')
else:
    logger.info('signatures directory already exists, skipping...')

if not os.path.exists('keys'):
    home_fs.touch(u'keys')
    logger.info('Created id-key storage file.')
Ejemplo n.º 42
0
 def test_open_plist_by_id(self):
     x = fs.open_fs(
         'youtube://https://www.youtube.com/playlist?list=PLYlZ5VtcfgitfPyMGkZsYkhLm-eOZeQpY'
     )
     x.close()
Ejemplo n.º 43
0
 def test_open_file_by_url(self):
     y = fs.open_fs('youtube://https://www.youtube.com/watch?v=cpPG0bKHYKc')
     y.close()
Ejemplo n.º 44
0
    def index(self, conf):
        fs = open_fs(conf['url'])
        walker = Walker()

        for path in walker.files(fs):
            yield Pyfsfile(fs, path)
Ejemplo n.º 45
0
def convert(
    in_path,
    out_path,
    study_identifier,
    usermeta=None,
    split=True,
    merge=False,
    jobs=1,
    template_directory=None,
    verbose=True,
):
    """ Parses a study from given *in_path* and then creates an ISA file.

    A new folder is created in the out directory bearing the name of
    the study identifier.

    Arguments:
        in_path (str): path to the directory or archive containing mzml files
        out_path (str): path to the output directory
        study_identifier (str): study identifier (e.g. MTBLSxxx)

    Keyword Arguments:
        usermeta (str, optional): the path to a json file, a xlsx file or
            directly a json formatted string containing user-defined
            metadata [default: None]
        split (bool): split assay files based on the polarity of the scans
            [default: True]
        merge (bool): for imzML studies, try to merge centroid and profile
            scans in a single sample row [default: False]
        jobs (int): the number of jobs to use for parsing mzML or imzML files
            [default: 1]
        template_directory (str, optional): the path to a directory
            containing custom templates to use when importing ISA tab
            [default: None]
        verbose (bool): display more output [default: True]
    """

    PARSERS = {"mzML": MzMLFile, "imzML": ImzMLFile}

    # open user metadata file if any
    meta_loader = UserMetaLoader(usermeta)

    # open the filesystem containing the files
    with fs.open_fs(in_path) as filesystem:

        # get all mzML files
        mzml_files = list(
            filesystem.filterdir("/", files=["*mzML"], exclude_dirs=["*"])
        )

        if mzml_files:
            # store the first mzml_files extension
            extension = mzml_files[0].name.rsplit(os.path.extsep)[-1]
            parser = PARSERS[extension]

            # prepare the parser arguments
            files_iter = [
                (filesystem, mzml_file.name, parser)
                for mzml_file in sorted(mzml_files, key=lambda f: f.name)
            ]

            # wrap in a progress bar if needed
            if not verbose and tqdm is not None:
                files_iter = tqdm.tqdm(files_iter)

            # parse using threads if needed
            if jobs > 1:
                with contextlib.closing(multiprocessing.pool.ThreadPool(jobs)) as pool:
                    metalist = pool.map(_parse_file, files_iter)
            else:
                metalist = list(map(_parse_file, files_iter))

            # merge spectra if needed
            if merge and extension == "imzML":
                if verbose:
                    print("Attempting to merge profile and centroid scans")
                metalist = merge_spectra(metalist)

            # write isa-tab file
            if metalist:
                if verbose:
                    print("Parsing mzML meta information into ISA-Tab structure")
                    print(out_path, template_directory)
                isa_tab = ISA_Tab(
                    out_path,
                    study_identifier,
                    usermeta=meta_loader.usermeta,
                    template_directory=template_directory,
                )
                isa_tab.write(metalist, extension, split=split)

        else:
            warnings.warn("No files were found in {}.".format(in_path), UserWarning)